summaryrefslogtreecommitdiff
path: root/rotord/src
diff options
context:
space:
mode:
Diffstat (limited to 'rotord/src')
-rw-r--r--rotord/src/nodes_audio_analysis.cpp137
1 files changed, 96 insertions, 41 deletions
diff --git a/rotord/src/nodes_audio_analysis.cpp b/rotord/src/nodes_audio_analysis.cpp
index 1bc82e6..96244d0 100644
--- a/rotord/src/nodes_audio_analysis.cpp
+++ b/rotord/src/nodes_audio_analysis.cpp
@@ -113,6 +113,9 @@ namespace Rotor{
bool sortseggrps(std::pair<double,vector<pair<double,int> > > i,std::pair<double,vector<pair<double,int> > > j){
return (i.first<j.first);
}
+ bool sortgroupmembers(pair<double,int> i,pair<double,int> j){
+ return (i.first<j.first);
+ }
void Intensity_segmenter::cleanup(){
//algorithm idea:
//get average tempo and intensity for each segment and store them
@@ -157,7 +160,14 @@ namespace Rotor{
}
i++;
}
- for (auto s:similarities) cerr<<"group "<<s.first<<" count: "<<s.second.size()<<endl;
+ for (auto s:similarities) {
+ string list="";
+ for (int j=0;j<s.second.size();j++){
+ if (j>0) list+=",";
+ list +=toString(s.second[j]);
+ }
+ cerr<<"group "<<s.first<<" ["<<list<<"]"<<endl;
+ }
cerr<<analysers["segmenter"].features.size()<<" segments"<<endl;
cerr<<analysers["tempo"].features.size()<<" tempo features"<<endl;
@@ -172,7 +182,6 @@ namespace Rotor{
vector<double> times;
auto g=++analysers["segmenter"].features.begin();
for (auto f=analysers["segmenter"].features.begin();g!=analysers["segmenter"].features.end();f++,g++,i++){
- cerr<<"segment "<<i<<": "<<f->first<<" to "<<g->first<<endl;
times.push_back(f->first);
//integrate tempo and intensity algorithmically
double tempo=0;
@@ -190,8 +199,7 @@ namespace Rotor{
if (tempo>max_tempo) max_tempo=tempo;
if (tempo<min_tempo) min_tempo=tempo;
tempos.push_back(tempo);
- cerr<<"segment "<<i<<" average tempo: "<<tempo<<endl;
-
+
double intensity=0;
if (analysers["intensity"].features.size()) {
double pt=f->first;
@@ -207,7 +215,21 @@ namespace Rotor{
if (intensity>max_intensity) max_intensity=intensity;
if (intensity<min_intensity) min_intensity=intensity;
intensities.push_back(intensity);
+
+ cerr<<"segment "<<i<<": "<<f->first<<" to "<<g->first<<" average tempo: "<<tempo<<" ,intensity: "<<intensity<<" ,weighted: "<<(tempo*parameters["tempo_weight"]->value)+(intensity*parameters["intensity_weight"]->value)<<endl;
}
+ //
+ //
+ //need to calculate the last segment
+ //
+ //
+ //either by adding a bit of code here or by adding a dummy feature at the track duration, previously
+ //
+ //
+ //
+ //
+
+
//make relative scale 0.0-1.0 and save weighted totals
vector< pair<int,double>> totals;
vector<double> totalsmap;
@@ -298,56 +320,89 @@ nned to retrieve total intensity by segment
std::sort(seggrps.begin(),seggrps.end(),sortseggrps);
//possible mergers will be with groups with adjacent intensity
if (((int)parameters["levels"]->value)>0) {
- while (seggrps.size()>(int)parameters["levels"]->value){
- //reduce similarity groups
- //decide the best 2 to merge
- vector<double> diffs;
- for (int j=0;j<seggrps.size()-1;j++) diffs.push_back(seggrps[j+1].first-seggrps[j].first);
- int smallest=0;
- for (int j=1;j<diffs.size();j++) if (diffs[j]<diffs[smallest]) smallest=j;
- for (int j=0;j<seggrps[smallest].second.size();j++) {
- seggrps[smallest+1].second.push_back(seggrps[smallest].second[j]);
- }
- //recalculate intensity average
- double avg=0.0f;
- for (auto p:seggrps[smallest+1].second) avg+=p.first;
- avg/=seggrps[smallest+1].second.size();
- seggrps[smallest+1].first=avg;
+ if (seggrps.size()>(int)parameters["levels"]->value){
+ while (seggrps.size()>(int)parameters["levels"]->value){
+ //reduce similarity groups
+ //decide the best 2 to merge
+ vector<double> diffs;
+ for (int j=0;j<seggrps.size()-1;j++) diffs.push_back(seggrps[j+1].first-seggrps[j].first);
+ int smallest=0;
+ for (int j=1;j<diffs.size();j++) if (diffs[j]<diffs[smallest]) smallest=j;
+ for (int j=0;j<seggrps[smallest].second.size();j++) {
+ seggrps[smallest+1].second.push_back(seggrps[smallest].second[j]);
+ }
+ //recalculate intensity average
+ double avg=0.0f;
+ for (auto p:seggrps[smallest+1].second) avg+=p.first;
+ avg/=seggrps[smallest+1].second.size();
+ seggrps[smallest+1].first=avg;
- seggrps.erase(seggrps.begin()+smallest);
+ seggrps.erase(seggrps.begin()+smallest);
+ }
+ cerr<<"intensities merged, "<<seggrps.size()<<" levels remain"<<endl;
}
- cerr<<"intensities merged, "<<seggrps.size()<<" levels remain"<<endl;
- while (false) {
- //while (seggrps.size()<min((int)parameters["levels"]->value,(int)totalsmap.size())) {
- //split groups
- //calculate standard deviation of intensity variation
- vector<double> devs;
- for (int j=0;j<seggrps.size()-1;j++) {
- double avg=0.0;
- double dev=0.0;
- for (int k=0;k<seggrps[j].second.size();k++){
- avg+=k<seggrps[j].second[k].first;
+ if (seggrps.size()<min((int)parameters["levels"]->value,(int)totalsmap.size())){
+ while (seggrps.size()<min((int)parameters["levels"]->value,(int)totalsmap.size())) {
+ //split groups
+ //calculate standard deviation of intensity variation
+ vector<double> devs;
+ for (int j=0;j<seggrps.size()-1;j++) {
+ double avg=0.0;
+ double dev=0.0;
+ for (int k=0;k<seggrps[j].second.size();k++){
+ avg+=k<seggrps[j].second[k].first;
+ }
+ avg/=seggrps[j].second.size();
+ for (int k=0;k<seggrps[j].second.size();k++){
+ dev+=pow(avg-k<seggrps[j].second[k].first,2.0);
+ }
+ dev/=seggrps[j].second.size();
+ devs.push_back(pow(dev,0.5));
}
- avg/=seggrps[j].second.size();
- for (int k=0;k<seggrps[j].second.size();k++){
- dev+=pow(avg-k<seggrps[j].second[k].first,2.0);
+ //find group with largest standard deviation
+ int largest=0;
+ for (int j=1;j<devs.size();j++) if (devs[j]>devs[largest]) largest=j;
+ //sanity check: if there are any groups that can be split they will have larger SD than singleton groups
+ //sort members of the group
+ std::sort(seggrps[largest].second.begin(),seggrps[largest].second.end(),sortgroupmembers);
+ //create a new group
+ std::pair<double,vector<pair<double,int> > > newgroup;
+ for (int j=seggrps[largest].second.size();j>seggrps[largest].second.size()/2;j--) {
+ newgroup.second.push_back(seggrps[largest].second[j]);
+ seggrps[largest].second.erase(seggrps[largest].second.begin()+j);
}
- dev/=seggrps[j].second.size();
- devs.push_back(pow(dev,0.5));
+
+ //refresh averages for the 2 groups
+ double avg=0.0f;
+ for (auto p:seggrps[largest].second) avg+=p.first;
+ avg/=seggrps[largest].second.size();
+ seggrps[largest].first=avg;
+
+ avg=0.0f;
+ for (auto p:newgroup.second) avg+=p.first;
+ avg/=newgroup.second.size();
+ newgroup.first=avg;
+
+ //add the new group
+ seggrps.push_back(newgroup);
}
- //find group with largest standard deviation
- int largest=0;
- for (int j=1;j<devs.size();j++) if (devs[j]>devs[largest]) largest=j;
- //sanity check: if there are any groups that can be split they will have larger SD than singleton groups
- //TODO actually split group
+
+
}
+ cerr<<"similaritity groups split, "<<seggrps.size()<<" levels total"<<endl;
+ //seggrps are now out of order
+ std::sort(seggrps.begin(),seggrps.end(),sortseggrps);
}
map<int,int> outputvalues;
for (int j=0;j<seggrps.size();j++){
+ string list="";
for (int k=0;k<seggrps[j].second.size();k++){
outputvalues[seggrps[j].second[k].second]=j;
+ if (k>0) list+=",";
+ list +=toString(seggrps[j].second[k].second);
}
+ cerr<<"output value: "<<j<<" assigned to ["<<list<<"]"<<endl;
}