#ifndef ROTOR_H #define ROTOR_H /*------------------------ Definitions of base classes and types for rotor rendering graph -------------------------*/ /*------------------------ Aims -realtime, what does this entail? patchbay must be working incremental graph editing - examine new graph and make alterations window manager network stream? rtp? realtime architecture - maybe a tiny amount of buffering framerate limiting -undefined number of message types - a mssage is a pointer to a struct -documentation embedded in nodes -------------------------*/ /*------------------------ Requirements -stretch a video to fit a segment either use a signal as a playhead {seconds|stretched or deliver segment information with a signal -------------------------*/ #include #include #include #include #include #include #include #include "Poco/Net/HTTPResponse.h" #include "Poco/Logger.h" #include "Poco/File.h" #include "Poco/Path.h" #include "Poco/Base64Encoder.h" #include "Poco/FileStream.h" #include "Poco/CountingStream.h" #include "Poco/StreamCopier.h" #include "xmlIO.h" #include "utils.h" #include "cvimage.h" #include "libavwrapper.h" //using namespace cv; namespace Rotor { //forward declarations class Node; class Signal_node; class Image_node; class Parameter; class Audio_frame{ public: Audio_frame(uint16_t *_samples,int _channels,int _numsamples){ samples=_samples; channels=_channels; numsamples=_numsamples; } uint16_t *samples; int channels,numsamples; }; class Time_spec{ public: Time_spec(){}; Time_spec(float _time,float _framerate,float _duration,Audio_frame *_audio=nullptr){ time=_time; framerate=_framerate; duration=_duration; audio=_audio;}; float time; //num/denom ? float framerate; float duration; Audio_frame *audio; Time_spec lastframe() const{ return Time_spec(time-(1.0f/framerate),framerate,duration); } int frame(){ return (int)((time*framerate)+0.5); //rounded to the nearest frame } }; class Frame_spec: public Time_spec{ public: Frame_spec(float _time,float _framerate,float _duration,int _w,int _h,Audio_frame *_audio=nullptr) { time=_time; framerate=_framerate; duration=_duration; w=_w; h=_h;audio=_audio;}; Frame_spec(int _frame,float _framerate,float _duration,int _w,int _h,Audio_frame *_audio=nullptr) { time=((float)_frame)/_framerate; framerate=_framerate; duration=_duration; w=_w; h=_h;audio=_audio;}; int h,w; Frame_spec lastframe(){ return Frame_spec(time-(1.0f/framerate),framerate,duration,w,h); } }; class Colour{ public: Colour(){ r=g=b=0; } Colour(int c){ r=c&0xFF; g=(c&0xFF00)>>8; b=(c&0xFF0000)>>16; } Colour(std::string s){ r=(uint8_t)hexToChar(s.substr(0,2)); g=(uint8_t)hexToChar(s.substr(2,2)); b=(uint8_t)hexToChar(s.substr(4,2)); } float Rfloat(){ return ((float)r)/255.0f; } float Gfloat(){ return ((float)g)/255.0f; } float Bfloat(){ return ((float)b)/255.0f; } uint8_t r,g,b; }; class Command_response{ public: Command_response() { status=Poco::Net::HTTPResponse::HTTP_OK; } std::string description; Poco::Net::HTTPResponse::HTTPStatus status; }; class Input{ public: Input(const string &_desc,const string &_title): connection(nullptr),description(_desc),title(_title){}; Node* connection; string description; string title; }; class Image_input: public Input{ public: virtual ~Image_input(){}; bool connect(Node *source); Image_input(const string &_desc,const string &_title,Node* _connect): Input(_desc,_title){ connect(_connect); }; Image* get(const Frame_spec& time); }; class Signal_input: public Input{ public: virtual ~Signal_input(){}; bool connect(Node *source); Signal_input(const string &_desc,const string &_title,Node* _connect): Input(_desc,_title){ connect(_connect); }; float get(const Time_spec& time); }; class Parameter: public Signal_input{ public: virtual ~Parameter(){}; void init(const float &_val){ value=_val; } Parameter(const string &_type,const string &_desc,const string &_title,float _value,float _min,float _max,Node* _connect): Signal_input(_desc,_title,_connect),value(_value),min(_min),max(_max),type(_type){}; float value,min,max; float get(const Time_spec& time); string type; }; class Attribute{ //description of a static attribute which can be an enumerated string array public: virtual ~Attribute(){}; Attribute(const string &_desc,const string &_title,const string &_value,std::vector _vals={}): description(_desc),title(_title),value(_value),intVal(0){ vals=_vals; init(_value); }; void init(const string &_key){ //inits int value from set::string vals index value=_key; std::vector::iterator it=it = find(vals.begin(),vals.end(),value); if (it!=vals.end()){ intVal = std::distance(vals.begin(),it)+1; //using 1-index for enums } else intVal=0; } string value,description,title; std::vector vals; int intVal; }; class Node{ public: Node(){duplicate_inputs=false;}; virtual Node* clone(map &_settings)=0; //pure virtual virtual ~Node(){ for (auto a: attributes) delete a.second; for (auto p: parameters) delete p.second; for (auto s: inputs) delete s; }; vector inputs; //simple node can have signal inputs, output depends on node type unordered_map parameters; //linked parameters can convert from settings to inputs unordered_map attributes; void create_signal_input(const string &_desc,const string &_title,Node* _connect=nullptr ) { inputs.push_back(new Signal_input(_desc,_title,_connect)); }; void create_parameter(const string &_name,const string &_type,const string &_desc,const string &_title,float _value=1.0f,float _min=0.0f,float _max=0.0f,Node* _connect=nullptr) { parameters[_name]=new Parameter(_type,_desc,_title,_value,_min,_max,_connect); }; void create_attribute(const string &_attr,const string &_desc,const string &_title,const string &_value,std::vector _vals={}) { attributes[_attr]=new Attribute(_desc,_title,_value,_vals); }; void create_attribute(string *alias,const string &_attr,const string &_desc,const string &_title,const string &_value,std::vector _vals={}) { attributes[_attr]=new Attribute(_desc,_title,_value,_vals); alias=&(attributes[_attr]->value); }; void create_attribute(int *alias,const string &_attr,const string &_desc,const string &_title,const string &_value,std::vector _vals={}) { attributes[_attr]=new Attribute(_desc,_title,_value,_vals); alias=&(attributes[_attr]->intVal); }; string description; string type; string ID; string UID; string title; bool duplicate_inputs; string find_setting(map &settings,string key,string def=""){ if (settings.find(key)!=settings.end()) return settings[key]; else return def;}; float find_setting(map &settings,string key,float def){ if (settings.find(key)!=settings.end()) return toFloat(settings[key]); else return def;}; int find_setting(map &settings,string key,int def){ if (settings.find(key)!=settings.end()) return toInt(settings[key]); else return def;}; void base_settings(map &settings) { description=find_setting(settings,"description"); type=find_setting(settings,"type"); ID=find_setting(settings,"ID"); title=find_setting(settings,"title"); for (auto a: attributes){ if (find_setting(settings,a.first,"")!="") { attributes[a.first]->init(find_setting(settings,a.first,"")); cerr<<"setting attribute '"<intVal<<")"<init(find_setting(settings,p.first,0.0f)); cerr<<"setting parameter '"<get(time); } } void set_parameter(const std::string &key,const std::string &value){ if (parameters.find(key)!=parameters.end()) parameters[key]->value=toFloat(value); }; }; class Signal_node: public Node{ public: virtual ~Signal_node(){}; const float get_output(const Time_spec &time) { update(time); return output(time); }; virtual const float output(const Time_spec &time) { return 0.0f; }; }; class Image_node: public Node{ public: virtual ~Image_node(){ for (auto i: image_inputs) delete i; }; vector image_inputs; //image node also has image inputs and outputs void create_image_input(const string &_title,const string &_desc,Node* _connect=nullptr) { image_inputs.push_back(new Image_input(_desc,_title,_connect)); }; Image *get_image_output(const Frame_spec &frame) { image.setup(frame.w,frame.h); update((Time_spec)frame); return output(frame); } virtual const Image *output(const Frame_spec &frame)=0; Image image; private: float image_time; //? could be used to detect image reuse? }; class LUT { LUT(){ lut=nullptr; }; ~LUT(){if (lut) { delete[] lut;} }; void generate(float black_in,float white_in,float black_out,float white_out,float gamma){ //can check here if anything has changed if (lut) delete[] lut; lut=new unsigned char[256]; float fltmax=(255.0f/256.0f); for (int i=0;i<256;i++){ lut[i]=(unsigned char)(((pow(min(fltmax,max(0.0f,(((((float)i)/256.0f)-black_in)/(white_in-black_in)))),(1.0/gamma))*(white_out-black_out))+black_out)*255.0f); } } void apply(const cv::Mat& in,cv::Mat &out){ //facility to apply to other images for inherited classes out.create(in.rows,in.cols,in.type()); for (int i=0;i &settings):Video_cycler() { base_settings(settings); }; ~Video_cycler(){}; bool load(const string &filename); Image *output(const Frame_spec &frame){ Frame_spec inframe=frame; switch (attributes["time_mode"]->intVal){ case CYCLER_rel: break; case CYCLER_stretch: break; } if (attributes["mode"]->intVal==CYCLER_mix&&image_inputs.size()>1){ int im1=((int)inputs[0]->get((Time_spec)frame))%image_inputs.size(); int im2=(im1+1)%image_inputs.size(); float f=fmod(inputs[0]->get((Time_spec)frame),1.0f); Image *in1=image_inputs[im1]->get(frame); if (in1){ Image *in2=image_inputs[im2]->get(frame); if (in2){ image=(*in1); image*=(1.0f-f); Image i2=(*in2); i2*=f; image+=i2; return ℑ } return in1; } return nullptr; } //cut mode for (int i=0;iget((Time_spec)frame)+i)%image_inputs.size(); Image *in=image_inputs[whichinput]->get(frame); if (in) return in; } return nullptr; } Video_cycler* clone(map &_settings) { return new Video_cycler(_settings);}; }; #define VIDEOFRAMES_frame 1 #define VIDEOFRAMES_blend 2 #define VIDEOTIME_play 1 #define VIDEOTIME_stretch 2 //relative timelines used to stretch video //1. make a video position input for video node - seconds and stretch modes //2. video mode that maps to duration- timeline remapping from cycler and others class Video_loader: public Image_node { public: Video_loader(){ create_signal_input("playhead","Playhead"); //floating point control of playback time //if signal is connected it overrides normal playback //time_mode dictates whether control is seconds, or duration create_parameter("speed","number","video playback speed","Speed",1.0f,0.0f,0.0f); create_parameter("framerate","number","framerate override","Frame rate",0.0f,0.0f,0.0f); create_attribute("filename","name of video file to load","File name",""); create_attribute("frame_mode","frame mode","Frame mode","frame",{"frame","blend"}); create_attribute("time_mode","time mode","Time mode","play",{"play","stretch"}); title="Video loader"; description="Loads a video file"; UID="5b64b8ca-2d0a-11e3-92ed-4b7420b40040"; }; Video_loader(map &settings): Video_loader() { base_settings(settings); isLoaded=false; if (attributes["filename"]->value!="") { load(find_setting(settings,"media_path","")+attributes["filename"]->value); } }; ~Video_loader(){}; bool load(const string &filename){ Poco::Logger& logger = Poco::Logger::get("Rotor"); if (isLoaded) { player.cleanup(); ///should be in decoder class? isLoaded=false; } isLoaded=player.open(filename); if (isLoaded){ logger.information("Video_loader loaded "+filename+": "\ +toString(player.get_number_frames())+" frames, "\ +toString(player.get_framerate())+" fps, "\ +toString(player.get_width())+"x"+toString(player.get_height())\ +", channels:"+toString(player.get_number_channels())); lastframe=-2; return true; } logger.error("Video_loader failed to load "+filename); return false; } Image *output(const Frame_spec &frame){ if (isLoaded){ float clipframerate=(parameters["framerate"]->value==0.0f?player.get_framerate():parameters["framerate"]->value); float clipspeed=(clipframerate/frame.framerate)*parameters["speed"]->value; float wanted; if (inputs[0]->connection) { //using playhead //should speed affect it? //if you want absolute control then you just want absolute control? switch (attributes["frame_mode"]->intVal){ case VIDEOTIME_play: wanted=fmod(inputs[0]->get((Time_spec)frame)*frame.framerate*clipspeed,(float)player.get_number_frames()); break; case VIDEOTIME_stretch: wanted=fmod(fmod(inputs[0]->get((Time_spec)frame),1.0f)*((float)player.get_number_frames())*clipspeed,(float)player.get_number_frames()); break; } } else { switch (attributes["frame_mode"]->intVal){ case VIDEOTIME_play: wanted=fmod(frame.time*frame.framerate*clipspeed,(float)player.get_number_frames()); break; case VIDEOTIME_stretch: wanted=fmod((frame.time/frame.duration)*((float)player.get_number_frames())*clipspeed,(float)player.get_number_frames()); break; } } if (attributes["frame_mode"]->intVal==VIDEOFRAMES_blend){ if (((int)wanted)!=lastframe){ //get a new pair of frames possibly by switching the next one //darn peculiar, as if copying wasn't actually copying if ((lastframe==(((int)wanted)-1))&&(in2.w>0)) { in1=in2; } else { player.fetch_frame(frame.w,frame.h,(int)wanted); //use a temp image because setup_fromRGB just copies pointer temp.setup_fromRGB(frame.w,frame.h,player.frame->Data[0],player.frame->Linesize[0]-(frame.w*3)); in1=temp; } player.fetch_frame(frame.w,frame.h,((int)wanted+1)%max(1,player.get_number_frames())); temp.setup_fromRGB(frame.w,frame.h,player.frame->Data[0],player.frame->Linesize[0]-(frame.w*3)); in2=temp; lastframe=wanted; } float amt=1.0f-(wanted-((int)wanted)); //cout<<"video loader time:"<value); if (image.w>0) return ℑ //just return the previous frame if possible else return nullptr; } image.setup_fromRGB(frame.w,frame.h,player.frame->Data[0],player.frame->Linesize[0]-(frame.w*3)); } } return ℑ } return nullptr; }; Video_loader* clone(map &_settings) { return new Video_loader(_settings);}; bool isLoaded; private: //ffmpegsource::decoder player; libav::video_decoder player; int lastframe; Image in1,in2,in2t,temp; //for blend frames; }; class Video_output: public Image_node { //Video_output 'presents' the output movie. Aspect ratio, bars, fadein/fadeout would happen here public: Video_output(){ create_image_input("image to output","Image input"); create_attribute("begin_mode","mode to begin movie","Begin mode","cut",{"cut","blank silence","fade peak"}); create_attribute("end_mode","mode to end movie","End mode","cut",{"cut","blank silence","fade peak"}); title="Video output"; description="Output to video"; start_silence=start_peak=end_silence=end_peak=-1.0f; silence_threshold=0.01f; UID="693d2220-2d0a-11e3-9312-232908c3cc33"; }; Video_output(map &settings):Video_output() { base_settings(settings); }; ~Video_output(){ }; void create_envelope(const vector &audio){ if (audio.size()){ int i=0; while (iaudio[i-1]) i++; start_peak=((float)i-1)/audio.size(); i=audio.size(); while (i>-1&&audio[i]audio[i+1]) i--; end_peak=((float)i+1)/audio.size(); cerr<<"Video_output sound envelope: silence - "<get(frame); if (in){ //make copy of the image, for feedback //optimise? float amount=1.0f; float track_time=frame.time/frame.duration; if (attributes["begin_mode"]->value=="fade peak"||attributes["begin_mode"]->value=="blank silence"){ if (track_timevalue=="fade peak"&&start_peak>start_silence){ amount = (track_time-start_silence)/(start_peak-start_silence); } } if (attributes["end_mode"]->value=="fade peak"||attributes["end_mode"]->value=="blank silence"){ if (track_time>end_silence){ amount=0.0f; } else if (track_time>end_peak&&attributes["end_mode"]->value=="fade peak"&&end_silence>end_peak){ amount = 1.0f-((track_time-end_peak)/(end_silence-end_peak)); } } if (amount<(1.0f/254.0f)){ image.clear(); } image=(*in); if (amount<1.0f){ image*=amount; } //seems to be outputting correctly but not saving frames return ℑ } return nullptr; }; Video_output* clone(map &_settings) { return new Video_output(_settings);}; private: float silence_threshold; float start_silence; float start_peak; float end_silence; float end_peak; }; class Video_feedback: public Image_node { public: Video_feedback(){ title="Video feedback"; description="Repeats output of the last frame"; feedback=nullptr; UID="78edfa28-2d0a-11e3-86c7-9f266fabb10c"; }; Video_feedback(map &settings):Video_feedback() { base_settings(settings); }; ~Video_feedback(){ }; void set_feedback(Image *iptr){ feedback=iptr; } Image *output(const Frame_spec &frame){ if (feedback->RGBdata){ return feedback; } image.setup(frame.w,frame.h); image.clear(); return ℑ }; Video_feedback* clone(map &_settings) { return new Video_feedback(_settings);}; private: Image *feedback; }; //------------------------------------------------------------------- class Node_factory{ public: Node_factory(); ~Node_factory(){ for (auto t:type_map) delete t.second; } void add_type(string type,Node* proto){ type_map[type]=proto; type_map[type]->type=type; }; void add_type(string type,Node* proto,vector &category){ add_type(type,proto); category.push_back(proto); }; Node *create(map &settings){ if (settings.find("type")!=settings.end()) { if (type_map.find(settings["type"])!=type_map.end()) { return type_map[settings["type"]]->clone(settings); } } return NULL; }; bool list_node(const string &t,xmlIO XML); void list_node(Rotor::Node* type,xmlIO XML,int i=0); void list_nodes(xmlIO XML); void list_nodes(Json::Value &JSON); void list_categories(xmlIO XML); void list_categories(Json::Value &JSON); private: unordered_map type_map; unordered_map > categories; }; } #endif