#ifndef libavwrapper_H #define libavwrapper_H /* * libavwrapper.h * May 2012 Christopher Bruns * The libavwrapper class is a C++ wrapper around the poorly documented * libavcodec movie API used by ffmpeg. I made extensive use of Nathan * Clack's implemention in the whisk project. * * The libavwrapper.h and libavwrapper.cpp files depend only on the libavcodec * and allied sets of libraries. To compartmentalize and reduce dependencies * I placed the Vaa3d specific use of this class into a separate set of * source files: loadV3dFFMpeg.h/cpp */ //////////////////////// //now that we have guards //instead of crashing instantly when the 2nd thread tries to encode a frame, we get an error //*** Error in `./rotord': corrupted double-linked list: 0x00007f3c31b1b630 *** //or //*** Error in `./rotord': double free or corruption (out): 0x00007f3bf8210080 *** /////////////////////// //http://blog.tomaka17.com/2012/03/libavcodeclibavformat-tutorial/ //great to use c++11 features //http://dranger.com/ffmpeg/ //the mnost up to date tutorial? //https://github.com/lbrandy/ffmpeg-fas/tree/master //try this! #ifndef UINT64_C #define UINT64_C(c) (c ## ULL) #endif #include "Poco/Mutex.h" extern "C" { #include #include #include #include //#include //#include #include //? } /* #include #include #include #include #include */ #include #include #include #include #include #include #include namespace libav { static bool b_is_one_time_inited=false; // Some libavcodec calls are not reentrant void maybeInitFFMpegLib(); static int sws_flags = SWS_BICUBIC; // Translated to C++ by Christopher Bruns May 2012 // from ffmeg_adapt.c in whisk package by Nathan Clack, Mark Bolstadt, Michael Meeuwisse class decoder { public: enum Channel { RED = 0, GRAY = 0, GREEN = 1, BLUE = 2, ALPHA = 3 }; decoder(PixelFormat pixelFormat=PIX_FMT_RGB24); //decoder(QUrl url, PixelFormat pixelFormat=PIX_FMT_RGB24); void cleanup(); virtual ~decoder(); //bool open(QUrl url, enum PixelFormat formatParam = PIX_FMT_RGB24); //bool open(QIODevice& fileStream, QString& fileName, enum PixelFormat formatParam = PIX_FMT_RGB24); bool reinit_buffers_and_scaler(); bool init_buffers_and_scaler(); uint8_t getPixelIntensity(int x, int y, Channel c = GRAY) const; bool fetchFrame(int targetFrameIndex = 0); bool fetchFrame(int w,int h,int targetFrameIndex = 0); int getNumberOfFrames() const; int getWidth() const; int getHeight() const; float getFrameRate() const{return framerate;}; int getNumberOfChannels() const; bool readNextFrame(int targetFrameIndex = 0); bool readNextFrameWithPacket(int targetFrameIndex, AVPacket& packet, AVFrame* pYuv); int seekToFrame(int targetFrameIndex = 0); int seekToFrameNew(int targetFrameIndex = 0); // make certain members public, for use by Fast3DTexture class AVFrame *pFrameRGB; AVFrame *pRaw; AVFormatContext *container; AVCodecContext *pCtx; int videoStream; int previousFrameIndex; bool isOpen; bool open(std::string& fileName, enum PixelFormat formatParam = PIX_FMT_RGB24); bool open(char* fileName, enum PixelFormat formatParam = PIX_FMT_RGB24); protected: void initialize(); bool openUsingInitializedContainer(enum PixelFormat formatParam = PIX_FMT_RGB24 ); static bool avtry(int result, const std::string& msg); AVCodec *pCodec; uint8_t *buffer, *blank; //struct SwsContext *Sctx; int width, height; PixelFormat format; size_t numBytes; int numFrames; int sc; // number of color channels float framerate; //NB representing framerate as a float implies that //ABOVE ~93 HOURS AT 25FPS the calculations will be inaccurate }; class ffms2_decoder { public: ffms2_decoder(){ maybeInitFFMpegLib(); pixfmts[0] = FFMS_GetPixFmt("rgb24"); pixfmts[1] = -1; h=0; w=0; videosource=NULL; loaded=false; errinfo.Buffer = errmsg; errinfo.BufferSize = sizeof(errmsg); errinfo.ErrorType = FFMS_ERROR_SUCCESS; errinfo.SubType = FFMS_ERROR_SUCCESS; } ~ffms2_decoder(){ cleanup(); } void cleanup(); bool open(const std::string& filename); float getFrameRate(){ if (loaded) return (((float)videoprops->FPSNumerator)/((float)videoprops->FPSDenominator)); else return -1.0f; } int getNumberOfFrames(){ if (loaded) return videoprops->NumFrames; else return -1; } int getNumberOfChannels(){ return 3; //this is what we convert to } int getWidth(){ return w; } int getHeight(){ return h; } bool fetchFrame(int width,int height,int wanted){ if (FFMS_SetOutputFormatV2(videosource, pixfmts, width, height, FFMS_RESIZER_BICUBIC, &errinfo)) { std::cerr<<"ffmpegsource: "<NumFrames, &errinfo); if (frame == NULL) { std::cerr<<"ffmpegsource: "< buffer; AVFrame* frame; AVStream* audioStream; AVPacket packet; int sample_end; int sample_start; bool isPlanar; }; } #endif // libavwrapper_H