From 440ba17594d17736a56cac0d89be97fb63577989 Mon Sep 17 00:00:00 2001 From: Tim Redfern Date: Mon, 16 Sep 2013 14:31:04 +0100 Subject: ffmpegsource audio loading --- rotord/src/graph.cpp | 148 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 148 insertions(+) (limited to 'rotord/src/graph.cpp') diff --git a/rotord/src/graph.cpp b/rotord/src/graph.cpp index 5487717..29b46be 100644 --- a/rotord/src/graph.cpp +++ b/rotord/src/graph.cpp @@ -80,6 +80,154 @@ bool Graph::preview(xmlIO &XML,string &node,string &_format,int frame,int w,int } bool Graph::video_render(const string &output_filename,const float framerate) { + //vector loaders=find_nodes("video_loader"); + //for (auto i:loaders){ + // if (!dynamic_cast(i)->isLoaded) { + // cerr<<"Rotor: all loaders must be populated before rendering"<(find_node("video_output")); + for (auto f: find_nodes("video_feedback")){ + (dynamic_cast(f))->set_feedback(&(video_output->image)); + } + // + //setup defaults + int bitRate=5000000; + AVCodecID codecId=AV_CODEC_ID_H264; //MPEG4; + std::string container ="mp4"; + + //at the moment it crashes if you render before audio is loaded and also on 2nd render + libav::exporter exporter; + + float spct=100.0f/duration; + + if (exporter.setup(outW,outH,bitRate,framerate,container)) { //codecId, + if (exporter.record(output_filename)) { + + libav::audio_decoder audioloader; + + bool usingaudio=audioloader.open(audio_filename); + float *avframe=nullptr; + + Logger& logger = Logger::get("Rotor"); + logger.information("Video_output rendering "+output_filename+": "+toString(duration)+" seconds at "+toString(framerate)+" fps, audio frame size: "+toString(exporter.get_audio_framesize())); + //25fps video and 43.06640625fps audio? hmm + //how to get the timecodes correct for the interleaved files + + struct timeval start, end; + + gettimeofday(&start, NULL); + + uint16_t *audioframe=nullptr; + uint16_t *audio=nullptr; + int samples_in_frame; + + if (usingaudio){ + samples_in_frame=(audioloader.get_sample_rate())/framerate; + string whether=usingaudio?"Loading":"Cannot load"; + logger.information(whether+" audio file: "+audio_filename+", each frame contains "+toString(samples_in_frame)+" samples at "+toString(audioloader.get_sample_rate())+" hz"); + audioframe=new uint16_t[(samples_in_frame+exporter.get_audio_framesize())*audioloader.get_number_channels()]; + audio=new uint16_t[samples_in_frame*audioloader.get_number_channels()]; + } + + float vstep=1.0f/framerate; + float v=0.0f; + float vf=0.0f; + float af=0.0f; + int aoffs=0; + int audioend=0; + Audio_frame *a; + int64_t sample_start=0; + while (vf0){ + //shift down samples + int s=0; + while ((s+aoffs)get_output(Frame_spec(vf,framerate,duration,outW,outH,a)); + } + else i=video_output->get_output(Frame_spec(vf,framerate,duration,outW,outH)); + if (i) { + exporter.encodeFrame(i->RGBdata); + } + vf+=vstep; + progress=vf/duration; + if (usingaudio) {delete a;}; + } + + exporter.finishRecord(); + + gettimeofday(&end, NULL); + + float mtime = ((end.tv_sec-start.tv_sec) + (end.tv_usec-start.tv_usec)/1000000.0) + 0.5; + + logger.information("Video_output: rendered "+output_filename+": in "+toString(mtime)+" seconds"); + + if (usingaudio) { + audioloader.cleanup(); + delete[] audioframe; + delete[] audio; + } + + + + return true; + } + } + + return false; + } + + cerr<<"Rotor: video output node not found"< loaders=find_nodes("video_loader"); //for (auto i:loaders){ // if (!dynamic_cast(i)->isLoaded) { -- cgit v1.2.3