/* requirement driven design do we store graphs as files or in a db with UUID as key? we traverse the graph as recursive function calls until we satisfy all dependencies NO NODE HAS MORE THAN ONE OUTPUT WE DON'T LINK TO AN OUTPUT OBJECT WE LINK TO THE NODE - GET_OUTPUT IS THE RENDER FUNCTION MORE THAN ONE NODE CAN LINK THE SAME OUTPUT NODES CACHE AT LEAST ONE FRAME ??the only node with more than 1 output is audio? ??lets rethink this ??audio analysis nodes can be seperate - they can all load from the same audio file - were gonna have to process each pass ??output splitter? channel splitter? these can be done as 1 object per channel? ??I think so settings - how do we deal with settings being controllable signal inputs can have a gui representation as well other gui items don't have an input scaling to come time is always in floating points seconds - time has to be requested when rendering - either a preview what about testing a float for equality? maybe we should look at time in int (frames) - - what does this imply is it easier to have a function like: bool Same_frame(float time1, float time2); nb where a signal enters a channel comp input - it is duplicated - so nodes should cache a value (more for time effects) sql stuff NB best way to use is: interface uploads audio and makes thumbnail: graph determines what kind of audio analysis is used by referring to plugins jesus where is it hanging in the frigging debugger GOOD GOOD GOOD next - build signal_output and make a working chain with dummy data main definitions of libavcodec.h are in utils.c */ #include #include #include #include #include "Poco/Net/HTTPServer.h" #include "Poco/Net/HTTPResponse.h" #include "Poco/UUID.h" #include "Poco/UUIDGenerator.h" #include "Poco/Notification.h" #include "Poco/NotificationCenter.h" #include "Poco/Observer.h" #include "Poco/ThreadPool.h" #include "Poco/Thread.h" #include "Poco/Task.h" #include "Poco/Runnable.h" #include "Poco/Mutex.h" #include "Poco/Random.h" #include "Poco/AutoPtr.h" #include "Poco/File.h" #include using Poco::UUID; using Poco::UUIDGenerator; using Poco::Net::HTTPResponse; /* extern "C" { #include #include #include #include #include #include #include #include #include //#include stops the compiler error but causes a linker error. does libavcodec need to be statically linked? #include #include //#include } */ #include "ofxMovieExporter.h" #define AUDIO_INBUF_SIZE 20480 #define AUDIO_REFILL_THRESH 4096 #include "vampHost.h" #include "xmlIO.h" //#include "avCodec.h" namespace Rotor { #define IDLE 0 #define ANALYSING_AUDIO 1 #define AUDIO_READY 2 #define CREATING_PREVIEW 3 #define PREVIEW_READY 4 #define RENDERING 5 #define RENDER_READY 6 #define ANALYSE_AUDIO 1 #define PREVIEW 2 #define RENDER 3 //forward declaration class Node; class Signal_node; class Image_node; //http://blog.tomaka17.com/2012/03/libavcodeclibavformat-tutorial/ struct Packet { explicit Packet(AVFormatContext* ctxt = nullptr) { av_init_packet(&packet); packet.data = nullptr; packet.size=0; if (ctxt) reset(ctxt); } Packet(Packet&& other) : packet(std::move(other.packet)) { other.packet.data = nullptr; } ~Packet() { if (packet.data) av_free_packet(&packet); } void reset(AVFormatContext* ctxt) { if (packet.data) av_free_packet(&packet); if (av_read_frame(ctxt, &packet) < 0) packet.data = nullptr; } AVPacket packet; }; class Time_spec{ public: Time_spec(float _seconds,float _framerate){ seconds=_seconds; framerate=_framerate; }; float seconds; float framerate; Time_spec lastframe(){ return Time_spec(seconds-(1.0f/framerate),framerate); } }; class Frame_spec{ public: Frame_spec(float _time,float _framerate,int _w,int _h){ time=_time; framerate=_framerate; w=_w; h=_h;}; float time; float framerate; int h,w; Frame_spec lastframe(){ return Frame_spec(time-(1.0f/framerate),framerate,w,h); } }; class Render_status{ public: int id; float progress; }; class Render_requirements{ public: int num_performances; int num_clips; }; class Command_response{ public: Command_response() { status=Poco::Net::HTTPResponse::HTTP_OK; } std::string description; Poco::Net::HTTPResponse::HTTPStatus status; }; class Input{ public: Input(const string &_desc): description(_desc),connection(nullptr){}; Node* connection; string description; }; class Image_input: public Input{ public: bool connect(Image_node *source); Image_input(const string &_desc): Input(_desc){}; }; class Signal_input: public Input{ public: bool connect(Signal_node *source); Signal_input(const string &_desc): Input(_desc){}; }; class Node{ public: virtual Node* clone(map &_settings)=0; UUID uid; //every usable node has a UUID int id; vector inputs; //simple node can have signal inputs, output depends on node type void create_signal_input(const string &description) {inputs.push_back(new Signal_input(description));}; string description; string type; string output_type; string ID; string check(map &settings,string key,string def=""){ if (settings.find(key)!=settings.end()) return settings[key]; else return def;}; void base_settings(map &settings) { description=check(settings,"description"); type=check(settings,"type"); output_type=check(settings,"output"); ID=check(settings,"ID"); } }; class Image{ public: Image(){ zero(); }; Image(int _w,int _h){ zero(); setup(_w,_h); }; ~Image() { free(); }; void free(){ if (RGBdata) delete[] RGBdata; if (Adata) delete[] Adata; if (Zdata) delete[] Zdata; zero(); } void zero(){ RGBdata=nullptr; Adata=nullptr; Zdata=nullptr; w=0; h=0; } bool setup(int _w,int _h){ if (w!=_w||h!=_h){ free(); w=_w; h=_h; RGBdata=new uint8_t[w*h*3]; Adata=new uint8_t[w*h]; Zdata=new uint16_t[w*h]; return true; } else return false; } uint8_t *RGBdata; uint8_t *Adata; uint16_t *Zdata; private: int h,w; }; class Signal_node: public Node{ public: virtual float get_output(const Time_spec &time) { return 0.0f; }; }; class Image_node: public Node{ public: vector image_inputs; //image node also has image inputs and outputs void create_image_input(const string &description) {image_inputs.push_back(new Image_input(description));}; virtual Image *get_output(const Frame_spec &frame)=0; /*{ //sample implementation //do something with the inputs //and then return ((Image_node*)(image_inputs[0]->connection))->get_output(frame); } */ Image *get_preview(const Frame_spec &frame); Image *image; //this can be privately allocated or just passed on as the node see fit private: float image_time; }; class Base_audio_processor: public Signal_node { public: virtual int process_frame(uint8_t *data,int samples)=0; virtual bool init(int _channels,int _bits,int _samples,int _rate)=0; virtual void cleanup()=0; int channels,bits,samples,rate; }; //actual nodes------------------------------------------------- class Audio_analysis: public Base_audio_processor { public: Audio_analysis(){}; Audio_analysis(map &settings) { base_settings(settings); soname=check(settings,"soname"); id=check(settings,"id"); outputNo=ofToInt(check(settings,"output","0")); }; Audio_analysis* clone(map &_settings) { return new Audio_analysis(_settings);}; bool init(int _channels,int _bits,int _samples,int _rate); void cleanup(); int process_frame(uint8_t *data,int samples_in_frame); float get_output(const Time_spec &time) { if (analyser.features.size()) { auto i=analyser.features.lower_bound(time.seconds); if (i!=analyser.features.end()){ float lk=i->first; int ln=i->second; if (i++!=analyser.features.end()){ float uk=i->first; return (((time.seconds-lk)/(uk-lk))+ln); } else return (float)ln; } } return 0.0f; } void print_features(); private: string soname,id; int outputNo; vampHost::Analyser analyser; }; class Signal_divide: public Signal_node { public: Signal_divide(){}; Signal_divide(map &settings) { base_settings(settings); divide_amount=ofToFloat(check(settings,"amount")); }; Signal_divide* clone(map &_settings) { return new Signal_divide(_settings);}; float get_output(const Time_spec &time) { if (inputs[0]->connection) { return (((Signal_node*)inputs[0]->connection)->get_output(time))/divide_amount; } return 0.0f; } float divide_amount; }; class Is_new_integer: public Signal_node { public: Is_new_integer(){}; Is_new_integer(map &settings) { base_settings(settings); }; Is_new_integer* clone(map &_settings) { return new Is_new_integer(_settings);}; float get_output(const Time_spec &time) { if (inputs[0]->connection) { float s1=(((Signal_node*)(inputs[0]->connection))->get_output(time)); float s2=(((Signal_node*)(inputs[0]->connection))->get_output(time.lastframe())); if (((int)s1)>((int)s2)) { return 1.0f; } } return 0.0f; } }; class Signal_output: public Signal_node { public: Signal_output(){}; Signal_output(map &settings) { base_settings(settings); }; Signal_output* clone(map &_settings) { return new Signal_output(_settings);}; bool render(const float duration, const float framerate,string &xml_out); float get_output(const Time_spec &time) { if (inputs[0]->connection) { return ((Signal_node*)(inputs[0]->connection))->get_output(time); } else return 0.0f; } }; class Testcard: public Image_node { public: Testcard(){}; Testcard(map &settings) { base_settings(settings); image=new Image(); }; ~Testcard(){ delete image;}; Testcard* clone(map &_settings) { return new Testcard(_settings);}; Image *get_output(const Frame_spec &frame){ if (image->setup(frame.w,frame.h)) { } //always create testcard float ws=(255.0f/frame.w); float hs=(255.0f/frame.h); for (int i=0;iRGBdata[(i*frame.w+j)*3]=(uint8_t)((int)((i+(frame.time*25.0f)*hs))%255); image->RGBdata[((i*frame.w+j)*3)+1]=(uint8_t)((int)((j+(frame.time*100.0f)*hs))%255); image->RGBdata[((i*frame.w+j)*3)+2]=(uint8_t)(0); image->Adata[i*frame.w+j]=(uint8_t)255; image->Zdata[i*frame.w+j]=(uint16_t)512; //1.0 in fixed point 8.8 bits } } return image; } private: Image *image; }; class Video_output: public Image_node { public: Video_output(){}; Video_output(map &settings) { base_settings(settings); exporter=new ofxMovieExporter(); }; Image *get_output(const Frame_spec &frame){ if (image_inputs[0]->connection) { return ((Image_node*)(image_inputs[0]->connection))->get_output(frame); } else return nullptr; }; Video_output* clone(map &_settings) { return new Video_output(_settings);}; bool render(const float duration, const float framerate,const string &output_filename,const string &audio_filename); private: ofxMovieExporter *exporter; }; //------------------------------------------------------------------- class Node_factory{ public: Node_factory(); void add_type(string type,Node* proto){ type_map[type]=proto; }; Node *create(map &settings){ if (settings.find("type")!=settings.end()) { if (type_map.find(settings["type"])!=type_map.end()) { return type_map[settings["type"]]->clone(settings); } } return NULL; }; private: unordered_map type_map; }; class Graph{ public: Graph(){duration=10.0f;loaded = false;}; Graph(const string& _uid,const string& _desc){init(_uid,_desc);}; void init(const string& _uid,const string& _desc){ uid=_uid;description=_desc;duration=10.0f;}; string uid; //every version of a graph has a UUID, no particular need to actually read its data(?) //?? is it faster than using strings?? string description; std::unordered_map nodes; vector find_nodes(const string &type){ vector found; for (std::unordered_map::iterator it=nodes.begin();it!=nodes.end();++it) { if (it->second->type==type) found.push_back(it->second); } return found; }; Node* find_node(const string &type){ for (std::unordered_map::iterator it=nodes.begin();it!=nodes.end();++it) { if (it->second->type==type) return it->second; } return nullptr; //can be tested against }; bool signal_render(string &signal_xml,const float framerate) { if (find_node("signal_output")) { Signal_output *signal_output=dynamic_cast(find_node("signal_output")); return signal_output->render(duration,framerate,signal_xml); } else return false; } bool video_render(const string &output_filename,const string &audio_filename,const float framerate) { if (find_node("video_output")) { Video_output *video_output=dynamic_cast(find_node("video_output")); return video_output->render(duration,framerate,output_filename,audio_filename); } else return false; } int load(Poco::UUID uid); bool load(string &graph_filename); UUID save(); //save to DB, returns UUID of saved graph bool loaded; float duration; const string toString(); private: Node_factory factory; xmlIO xml; }; class Audio_thumbnailer: public Base_audio_processor { public: Audio_thumbnailer(){ height=32; width=64; //fit data=new uint8_t[height*width]; memset(data,0,height*width); }; ~Audio_thumbnailer(){ delete[] data; }; Audio_thumbnailer* clone(map &_settings) { return new Audio_thumbnailer();}; bool init(int _channels,int _bits,int _samples,int _rate); void cleanup(){}; int process_frame(uint8_t *data,int samples_in_frame); string print(); uint8_t *data; int height,width,samples_per_column; int column,out_sample,sample,samples; int offset; double scale,accum; }; class Render_context: public Poco::Task { //Poco task object //manages a 'patchbay' //high level interfaces for the wizard //and low level interface onto the graph public: Render_context(const std::string& name): Task(name) { audio_thumb=new Audio_thumbnailer(); state=IDLE; output_framerate=25.0f; }; void runTask(); void add_queue(int item); Command_response session_command(const std::vector& command); Render_status get_status(); void cancel(); //interrupt locking process int make_preview(int nodeID, float time); //starts a frame preview - returns status code - how to retrieve? bool load_audio(const string &filename,vector processors); Render_requirements get_requirements(); int load_video(int num,string &filename); //can be performance or clip private: int state; double progress; //for a locking process: audio analysis or rendering //thread only does one thing at once std::deque work_queue; Poco::Mutex mutex; //lock for access from parent thread std::string audio_filename; std::string output_filename; Audio_thumbnailer *audio_thumb; vampHost::QMAnalyser audio_analyser; Graph graph; Node_factory factory; float output_framerate; }; } /* coding style Types begin with capitals 'New_type' variables/ instances use lower case with underscore as a seperator */