summaryrefslogtreecommitdiff
path: root/rotord
diff options
context:
space:
mode:
Diffstat (limited to 'rotord')
-rw-r--r--rotord/src/graph.cpp30
-rw-r--r--rotord/src/rotor.h20
2 files changed, 36 insertions, 14 deletions
diff --git a/rotord/src/graph.cpp b/rotord/src/graph.cpp
index 566d7b0..6994614 100644
--- a/rotord/src/graph.cpp
+++ b/rotord/src/graph.cpp
@@ -146,8 +146,8 @@ bool Graph::video_render(const string &output_filename,const double framerate,in
libav::audio_decoder audioloader;
bool usingaudio=audioloader.open(audio_filename);
-
-
+
+
logger.information("Video_output rendering "+namestub+suffix+": "+toString(duration)+" seconds at "+toString(framerate)+" fps, audio frame size: "+toString(exporter.get_audio_framesize()));
//25fps video and 43.06640625fps audio? hmm
//how to get the timecodes correct for the interleaved files
@@ -250,7 +250,7 @@ bool Graph::video_render(const string &output_filename,const double framerate,in
double mtime = ((_end.tv_sec-_start.tv_sec) + (_end.tv_usec-_start.tv_usec)/1000000.0);
logger.information("Video_output: rendered "+namestub+suffix+": in "+toString(mtime)+" seconds");
-
+
for (auto n:nodes) {
if (dynamic_cast<Image_node*>(n.second)){
logger.information(n.second->type+" node '"+n.first+"' took "+toString(n.second->get_time_used())+" seconds");
@@ -297,7 +297,7 @@ bool Graph::video_render2(const string &output_filename,const double framerate,i
}
Video_output *video_output=dynamic_cast<Video_output*>(find_node("video_output"));
-
+
//
//should text handling and verification happen outside of here?
//
@@ -323,7 +323,7 @@ bool Graph::video_render2(const string &output_filename,const double framerate,i
suffix=".mp4";
}
-
+
bool use_dash=false;
if (suffix==".mpd") {
use_dash=true;
@@ -355,8 +355,8 @@ bool Graph::video_render2(const string &output_filename,const double framerate,i
//cerr<<"path= "<<path<<" ,stub= "<<namestub<<" ,suffix= "<<suffix<<endl;
logger.information("Video_output rendering "+namestub+suffix+": "+toString(duration)+" seconds at "+toString(framerate)+" fps, audio frame size: "+toString(exporter.get_audio_framesize()));
-
-
+
+
Image *i;
libav::audio_decoder audioloader;
@@ -401,7 +401,7 @@ bool Graph::video_render2(const string &output_filename,const double framerate,i
}
else exporter.record(path+namestub+suffix);
- while (vf<min(duration,stop*vstep)&&!cancelled){
+ while (vf<min(duration,stop*vstep)&&!cancelled){
vf+=vstep;
progress=vf/duration;
@@ -474,9 +474,13 @@ bool Graph::parseJson(string &data,string &media_path){
//we know the json validates so clear the existing graph
clear();
Node_factory factory;
- analysis_seed=root["seed"].asInt();
- check_audio(root["audio"].asString(),media_path);
- init(root["ID"].asString(),root["description"].asString());
+
+ //this always falls over on DDs 3404.json file WHY?
+ //is there way to make a json value safe?
+
+ //analysis_seed=root.get("seed","0").asInt();
+ check_audio(root.get("audio","").asString(),media_path);
+ init(root.get("ID","").asString(),root.get("description","").asString());
Json::Value jnodes = root["nodeDefinitions"];
for ( uint32_t i = 0; i < jnodes.size(); ++i ) {
string nodeID=jnodes[i]["id"].asString();
@@ -500,7 +504,7 @@ bool Graph::parseJson(string &data,string &media_path){
Attribute *attr=node->attributes.find(attribute)->second;
if (attr->type=="enum"){
val=jnodes[i]["attributes"][m]["value"].asString();
- attr->init(val);
+ attr->init(val);
}
if (attr->type=="string") {
val=jnodes[i]["attributes"][m]["value"].asString();
@@ -527,7 +531,7 @@ bool Graph::parseJson(string &data,string &media_path){
node->init_attribute(attribute);
//cerr << "Rotor: setting attribute '"<<attribute<<"' of "<<nodeID<<" type "<<attr->type<<" to "<<val<<endl;
cerr << "Rotor: setting attribute '"<<attribute<<"' of "<<nodeID<<" type "<<attr->type<<" to "<<val<<endl;
-
+
}
//settings[attribute]=val;
}
diff --git a/rotord/src/rotor.h b/rotord/src/rotor.h
index 009348a..f54f80d 100644
--- a/rotord/src/rotor.h
+++ b/rotord/src/rotor.h
@@ -533,6 +533,24 @@ namespace Rotor {
unsigned char *lut;
};
//actual nodes-------------------------------------------------
+ class Signal_retimer: public Signal_node {
+ Signal_retimer(){
+ create_signal_input("Signal input","Signal input");
+ create_parameter("transition_length","number","transition length","Transition length",1.0,0.0,0.0);
+ title="Signal retimer";
+ description="Time stretch and offset an image node";
+ NODEID="f2e0f178-98b7-11e3-aa34-b7b96e4d9fa6";
+ }
+ };
+ class Video_retimer: public Image_node {
+ Video_retimer(){
+ create_image_input("Image input","Image input");
+ create_parameter("transition_length","number","transition length","Transition length",1.0,0.0,0.0);
+ title="Video retimer";
+ description="Time stretch and offset an image node";
+ NODEID="ca071ff2-98b7-11e3-9fbf-5705e7dfbef4";
+ }
+ };
#define CYCLER_cut 1
#define CYCLER_mix 2
#define CYCLER_seconds 1
@@ -559,7 +577,7 @@ namespace Rotor {
//
//how else?
//q: how to create a relative timeline from a segment?
- //a: you need the begiining and end points to calculate the duration
+ //a: you need the beginning and end points to calculate the duration
//
//q: where do we have this info/ where do we get it?
//a: only in the node that generated it?