summaryrefslogtreecommitdiff
path: root/rotord/src/nodes_audio_analysis.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'rotord/src/nodes_audio_analysis.cpp')
-rw-r--r--rotord/src/nodes_audio_analysis.cpp29
1 files changed, 27 insertions, 2 deletions
diff --git a/rotord/src/nodes_audio_analysis.cpp b/rotord/src/nodes_audio_analysis.cpp
index 6ec1188..9ad33f6 100644
--- a/rotord/src/nodes_audio_analysis.cpp
+++ b/rotord/src/nodes_audio_analysis.cpp
@@ -69,6 +69,7 @@ namespace Rotor{
bits=_bits;
samples=_samples;
+ features.clear();
return analyser.init(soname,id,_channels,_bits,_samples,_rate,outputNo,params);
@@ -167,7 +168,6 @@ namespace Rotor{
if (intensity>max_intensity) max_intensity=intensity;
if (intensity<min_intensity) min_intensity=intensity;
intensities.push_back(intensity);
- cerr<<"segment "<<i<<" average intensity: "<<intensity<<endl;
}
//make relative scale 0.0-1.0 and save weighted totals
vector< pair<int,float>> totals;
@@ -178,9 +178,34 @@ namespace Rotor{
}
//sort and convert to features
std::sort(totals.begin(),totals.end(),sortsegments);
+ for (i=0;i<totals.size();i++) {
+ cerr<<"segment "<<totals[i].first<<" average intensity: "<<totals[i].second<<endl;
+ }
+ vector<float> bucketoffsets;
+ for (auto t:totals) bucketoffsets.push_back(0.0f);
+ if (parameters["levels"]->value>0.0f&&parameters["levels"]->value<totals.size()){
+ //use bucketoffsets to redistribute into smaller number of buckets
+ int numbertoredistribute=totals.size()-((int)parameters["levels"]->value);
+ float numberperbin=((float)numbertoredistribute/totals.size());
+ float toadd=0.5f;
+ int added=0;
+ for (int j=0;j<totals.size();j++){
+ int numbertoadd=min(numbertoredistribute-added,(int)toadd);
+ toadd=(toadd+numberperbin)-numbertoadd;
+ added+=numbertoadd;
+ bucketoffsets[j]=added;
+ }
+ if (numbertoredistribute>0) {
+ cerr<<"reducing number of levels by "<<numbertoredistribute<<", offsets:"<<endl;
+ for (auto o:bucketoffsets) {
+ cerr<<o<<":";
+ }
+ cerr<<endl;
+ }
+ }
for (i=0;i<totals.size();i++){
vampHost::feature f;
- f.values.push_back((float)i);
+ f.values.push_back((float)i-bucketoffsets[i]);
features[times[totals[i].first]]=f;
}
}