#include "testApp.h" /* maybe draw pixel lines to a range of buffers and then play them in various 3D directions pixel lines can have nice, missing corners and be nicely smoothed is it insane not to do this in vvvv can there be an editing interface have a play with vvvv/ max and try to replicate this idea QUICKLY? */ //-------------------------------------------------------------- void testApp::setup(){ midiIn.listPorts(); midiIn.openPort(1); //this was 1 on linux, 0 on OSX midiIn.addListener(this); midiOut.listPorts(); midiOut.openPort(1); //this was 1 on linux, 0 on OSX // 0 output channeLs, // 2 input channels // 44100 samples per second // BUFFER_SIZE samples per buffer // 4 num buffers (latency) soundStream.listDevices(); left = new float[BUFFER_SIZE]; right = new float[BUFFER_SIZE]; //soundStream.setDeviceID(7); //ignore to use default? soundStream.setup(this, 0, 2, 44100, BUFFER_SIZE, 4); //soundStream.setup(this, 0, 4, 44100, BUFFER_SIZE, 4); ofSetHexColor(0x666666); lFFTanalyzer.setup(44100, BUFFER_SIZE/2,32); lFFTanalyzer.peakHoldTime = 15; // hold longer lFFTanalyzer.peakDecayRate = 0.95f; // decay slower lFFTanalyzer.linearEQIntercept = 0.9f; // reduced gain at lowest frequency lFFTanalyzer.linearEQSlope = 0.01f; // increasing gain at higher frequencies rFFTanalyzer.setup(44100, BUFFER_SIZE/2,32); rFFTanalyzer.peakHoldTime = 15; // hold longer rFFTanalyzer.peakDecayRate = 0.95f; // decay slower rFFTanalyzer.linearEQIntercept = 0.9f; // reduced gain at lowest frequency rFFTanalyzer.linearEQSlope = 0.01f; // increasing gain at higher frequencies /* ======= if (4chan) { lFFTanalyzer2.setup(44100, BUFFER_SIZE/2,32); lFFTanalyzer2.peakHoldTime = 15; // hold longer lFFTanalyzer2.peakDecayRate = 0.95f; // decay slower lFFTanalyzer2.linearEQIntercept = 0.9f; // reduced gain at lowest frequency lFFTanalyzer2.linearEQSlope = 0.01f; // increasing gain at higher frequencies rFFTanalyzer2.setup(44100, BUFFER_SIZE/2,32); rFFTanalyzer2.peakHoldTime = 15; // hold longer rFFTanalyzer2.peakDecayRate = 0.95f; // decay slower rFFTanalyzer2.linearEQIntercept = 0.9f; // reduced gain at lowest frequency rFFTanalyzer2.linearEQSlope = 0.01f; // increasing gain at higher frequencies } <<<<<<< HEAD */ ofSetFrameRate(60); ofBackground(0,0,0); F_movieSpeed=0.0; //blendImage.loadImage("blend01.png"); blendImage.loadMovie("blend.mp4"); blendImage.play(); blendImage.setLoopState(OF_LOOP_NORMAL); blendImage.setSpeed(F_movieSpeed); I_movieSource=0; I_moviePlaying=0; //renderImage.allocate(ofGetWidth(),ofGetHeight(),GL_RGB); renderImage.allocate(4080,768,GL_RGB); maskShader.load("composite"); //testImage.loadImage("mask.png"); maskShader.begin(); maskShader.setUniformTexture("Tex0", blendImage.getTextureReference(), 0); maskShader.setUniformTexture("Tex1", renderImage.getTextureReference(), 1); maskShader.end(); showFPS=false; //defaults F_scale=0; F_drawFrames=10; F_drawStep=0; F_drawDecay=0.95; F_lineWidth=1.0; //1.0; F_drawAxis=90; //Y F_xRotate=0; F_yRotate=0; F_zRotate=0; F_xRotation=180; F_yRotation=180; //was 180 F_zRotation=0; F_particleAmount=2000; //100 F_particleLife=0.75; //0.5 F_particleSize=16.0; //1.0; I_fade1=0; I_fade2=0; thisFFTbuffer=0; lastFrameTime=ofGetElapsedTimef(); draworder=true; //forwards; visMode=WAVEFORM; B_vSync=true; B_fill=true; inputMode=PICTURE; gammamap=new unsigned char[256]; whitePt=0.9; blackPt=0.3; gamma=1.0; //calc gamma map for (int i=0;i<256;i++){ float ewp=max(whitePt,blackPt+0.1f); //limit range to 0.1 gammamap[i]=(unsigned char)(pow(min(1.0f,max(0.0f,(((float(i)/255.0f)-blackPt)/(ewp-blackPt)))),gamma)*255.0); } B_glitch=false; //blanker.allocate(64,64,OF_IMAGE_COLOR); blanker.loadImage("black.png"); fullScreen=false; setMidiState(); receiver.setup(OSCPORT); target.setPosition(0,0,0); camera.setPosition(0,0,-700); camera.lookAt(target,ofVec3f(0,1,0)); narrator.init("TRSS_nesbitt_recordings.xml"); if( !XML.loadFile("videos.xml") ){ printf("unable to load videos, check data/ folder\n"); }else{ if(XML.pushTag("TRSS")) { int num=XML.getNumTags("video"); if(num) { int i; for (i=0;i path; explode(m.getAddress(),'/', back_inserter(path)); cerr<<"OSC: "<1) { if (path[1]=="mrmr"){ if (path.size()>3){ if (path[2]=="pushbutton"){ int channel=ofToInt(path[3]); if (channel>12) channel--; //12 is missing if (m.getArgAsInt32(0)==1000) { if (videoplaying>-1&&videoclips.size()>channel) { videoclips[videoplaying].stop(); videoclips[videoplaying].setPosition(0.0f); } //videoclips[channel].setPosition(0.0f); //videoclips[channel].setSpeed(1.0f); videoplaying=channel; videoclips[channel].play(); } } } } else { string type=path[1]; if (type=="narrator"||type=="video") { int channel=ofToInt(path[2])-1; int data=m.getArgAsInt32(0); cerr<<"type: "< -1)&&videoclips.size()>channel) { if (data>0) { cerr<<"playing video "<-1&&videoclips.size()>channel) { videoclips[videoplaying].stop(); videoclips[videoplaying].setPosition(0.0f); } //videoclips[channel].setPosition(0.0f); //videoclips[channel].setSpeed(1.0f); videoplaying=channel; videoclips[channel].play(); } } } if (type=="narrator"){ if ((channel > -1)&&narrator.getNumClips()>channel) { if (data>0) { //???doesn't seem to work cerr<<"playing narrator "<-1&&videoclips.size()>videoplaying) videoclips[videoplaying].update(); //if (videoclips[i].getSpeed()>0.01f) videoclips[i].update(); //if (videoclips[i].getPosition()>0.99f) videoclips[i].setSpeed(0.0f); //if (videoclips[i].isPlaying()) videoclips[i].stop(); //} narrator.update(); xOffs*=.95; yOffs*=.95; } //-------------------------------------------------------------- void testApp::draw(){ ofSetVerticalSync(B_vSync); static int index=0; float lavg_power = 0.0f; float ravg_power = 0.0f; if (visMode==FFT_RAW||visMode==FFT_AVG) { myfft.powerSpectrum(0,(int)BUFFER_SIZE/2, left,BUFFER_SIZE,&lmagnitude[0],&lphase[0],&lpower[0],&lavg_power); myfft.powerSpectrum(0,(int)BUFFER_SIZE/2, right,BUFFER_SIZE,&rmagnitude[0],&rphase[0],&rpower[0],&ravg_power); } if (visMode==FFT_AVG) { lFFTanalyzer.calculate(lmagnitude); rFFTanalyzer.calculate(rmagnitude); } thisFFTbuffer=(thisFFTbuffer-1); thisFFTbuffer=thisFFTbuffer<0?BUFFER_FRAMES-1:thisFFTbuffer; switch(visMode) { case FFT_AVG: for (int i=0;i270) F_yseg-=360; if (F_yseg>90) { jStart=jEnd; jEnd=0; jStep=-1; if (draworder) { draworder=false; //printf("switched to reverse order\n"); } } else if (!draworder) { draworder=true; //printf("switched to normal order\n"); } for (int j=jStart;j!=jEnd;j+=jStep) { int fB=(thisFFTbuffer+j)%BUFFER_FRAMES; ofPushMatrix(); //coordinate transform for FFT draw direction ofTranslate(0,0,(j*zStep)-hw); ofRotateX(F_drawAxis); ofFill(); ofSetColor(0,0,0); if (B_fill) { glBegin(GL_QUAD_STRIP); for (int i = 0; i < lFFTanalyzer.nAverages-1; i++){ glVertex3f((i*xStep)-hw,(FFTbuffer[0][i][fB] * F_scale),0); glVertex3f((i*xStep)-hw,0,0); } for (int i =lFFTanalyzer.nAverages, k=lFFTanalyzer.nAverages-1; i < lFFTanalyzer.nAverages+rFFTanalyzer.nAverages; i++, k--){ glVertex3f((i*xStep)-hw,(FFTbuffer[1][(lFFTanalyzer.nAverages+rFFTanalyzer.nAverages)-(i+1)][fB] * F_scale),0); glVertex3f((i*xStep)-hw,0,0); } glEnd(); } ofNoFill(); ofSetLineWidth(F_lineWidth); ofSetColor(I_fade1,I_fade1,I_fade1); glBegin(GL_LINE_STRIP); for (int i = 0; i < lFFTanalyzer.nAverages; i++){ glVertex3f((i*xStep)-hw,(FFTbuffer[0][i][fB] * F_scale),0); } for (int i =lFFTanalyzer.nAverages, k=lFFTanalyzer.nAverages-1; i < lFFTanalyzer.nAverages+rFFTanalyzer.nAverages; i++, k--){ glVertex3f((i*xStep)-hw,(FFTbuffer[1][(lFFTanalyzer.nAverages+rFFTanalyzer.nAverages)-(i+1)][fB] * F_scale),0); } glEnd(); ofPopMatrix(); } camera.end(); ofPopMatrix(); if (videoplaying>-1&&videoclips.size()>videoplaying) { if (videoclips[videoplaying].getPosition()<0.1f) { glEnable(GL_BLEND); uint8_t b=uint8_t((videoclips[videoplaying].getPosition()/0.1f)*255); ofSetColor(255,255,255,b); } else if (videoclips[videoplaying].getPosition()>0.8f) { glEnable(GL_BLEND); uint8_t b=uint8_t((1.0f-((videoclips[videoplaying].getPosition()-0.8f)/0.2f))*255); ofSetColor(255,255,255,b); } else ofSetColor(255,255,255); videoclips[videoplaying].draw(0,0,renderImage.getWidth()/3,renderImage.getHeight()); videoclips[videoplaying].draw(renderImage.getWidth()*0.666666667,0,renderImage.getWidth()/3,renderImage.getHeight()); glDisable(GL_BLEND); } ofPushMatrix(); //ofTranslate(hw,hh); camera.begin(); ofRotateX(F_xRotation); ofRotateY(F_yRotation); ofRotateZ(F_zRotation); ofPushMatrix(); //ofTranslate(0,-100,-1050); ofScale(sModel,sModel,sModel); //ofTranslate(xModel*sModel,yModel*sModel,zModel*sModel); ofTranslate(xModel,yModel,zModel); //narrator.drawCloud(2); narrator.drawPoints(F_particleSize,F_particleAmount,F_particleLife,F_particleX,F_particleY,F_particleZ); ofPopMatrix(); ofSetColor(255,255,255); camera.end(); ofPopMatrix(); renderImage.end(); //ofEndShape(false); //fbImage.grabScreen(0,0,ofGetWidth(),ofGetHeight()); /* ofSetHexColor(0xaaaaaa); int width=2048/FFTanalyzer.nAverages; for (int i = 0; i < FFTanalyzer.nAverages; i++){ ofRect(i*width,768,width,-FFTanalyzer.averages[i] * 20); } ofSetHexColor(0xffffff); for (int i = 0; i < (int)(BUFFER_SIZE/2 - 1); i++){ ofRect(i,768-freq[i]*10.0f,1,1); } ofSetHexColor(0xff0000); for (int i = 0; i < FFTanalyzer.nAverages; i++){ ofRect(i*width,768-FFTanalyzer.peaks[i] * 20,width,-1); } float avgStep = 1024/FFTanalyzer.nAverages; ofNoFill(); ofSetLineWidth(1); ofSetColor(223, 218, 218); ofDrawBitmapString("FFT average", 4, 18); ofBeginShape(); for (int i = 0; i < FFTanalyzer.nAverages; i++){ ofVertex(i*avgStep, 284 -FFTanalyzer.averages[i] * 20); } ofEndShape(false); ofSetColor(97,181,243); ofDrawBitmapString("FFT magnitude", 4, 38); ofBeginShape(); for (int i = 0; i < BUFFER_SIZE; i++){ ofVertex(i*avgStep, 384 -magnitude[i] * 20); } ofEndShape(false); ofSetColor(97,243,174); ofDrawBitmapString("FFT phase", 4, 58); ofBeginShape(); for (int i = 0; i < BUFFER_SIZE; i++){ ofVertex(i*avgStep, 484 -phase[i] * 20); } ofEndShape(false); ofSetColor(243,174,94); ofDrawBitmapString("FFT power", 4, 78); ofBeginShape(); for (int i = 0; i < BUFFER_SIZE; i++){ ofVertex(i*avgStep, 584 -power[i] * 20); } ofEndShape(false); //float * averages; // the actual averages //float * peaks; // peaks of the averages, aka "maxAverages" in other implementations */ ofEnableAlphaBlending(); maskShader.begin(); glActiveTexture(GL_TEXTURE0_ARB); blendImage.getTextureReference().bind(); glActiveTexture(GL_TEXTURE1_ARB); renderImage.getTextureReference().bind(); glBegin(GL_QUADS); glMultiTexCoord2d(GL_TEXTURE0_ARB, 0, 0); glMultiTexCoord2d(GL_TEXTURE1_ARB, 0, 0); glVertex2f( 0, 0); glMultiTexCoord2d(GL_TEXTURE0_ARB, blendImage.getWidth(), 0); glMultiTexCoord2d(GL_TEXTURE1_ARB, renderImage.getWidth(), 0); glVertex2f( ofGetWidth(), 0); glMultiTexCoord2d(GL_TEXTURE0_ARB, blendImage.getWidth(), blendImage.getHeight()); glMultiTexCoord2d(GL_TEXTURE1_ARB, renderImage.getWidth(), renderImage.getHeight() ); glVertex2f( ofGetWidth(), ofGetHeight()); glMultiTexCoord2d(GL_TEXTURE0_ARB, 0, blendImage.getHeight()); glMultiTexCoord2d(GL_TEXTURE1_ARB, 0, renderImage.getHeight() ); glVertex2f( 0, ofGetHeight() ); glEnd(); glActiveTexture(GL_TEXTURE1_ARB); renderImage.getTextureReference().unbind(); glActiveTexture(GL_TEXTURE0_ARB); blendImage.getTextureReference().unbind(); maskShader.end(); /* glPushMatrix(); glTranslatef(ofGetWidth()*0.5,ofGetHeight()*0.5,0); glRotatef((float)ofGetFrameNum(),0,1.0,0); renderImage.draw(ofGetWidth()*-0.5,ofGetHeight()*-0.5); glPopMatrix(); */ if (showFPS) { string msg=ofToString(ofGetFrameRate(), 2); msg+="\n"+ofToString(F_xRotation, 4)+" "+ofToString(F_yRotation, 4)+" "+ofToString(F_zRotation, 4); msg+="\n"+ofToString(F_yseg, 4); msg+="\n"+ofToString(narrator.getNumParticles())+" size "+ofToString(F_lineWidth); msg+="\n"+ofToString(xModel)+","+ofToString(yModel)+","+ofToString(zModel)+" * "+ofToString(sModel); syncOniPlayer *player=narrator.getCurrentPlayer(); if (player) { msg+="\n"+ofToString(player->getCurrentFrame())+","+ofToString(player->getPosition(),2); msg+="\n"+player->getCurrentFile(); } ofDrawBitmapString(msg,20,20); } } //-------------------------------------------------------------- void testApp::keyPressed (int key){ if(key == 'f'){ showFPS=!showFPS; } if(key == ' '){ fullScreen=!fullScreen; ofSetFullscreen(fullScreen); } if(key == '/'){ narrator.startPlayer(23); } if(key == ','){ narrator.previous(); } if(key == '.'){ narrator.next(); } if(key>='1'&key<='9'){ int clip=key-'1'; cerr<<"playing video clip "<clip) { videoclips[videoplaying].stop(); videoclips[videoplaying].setPosition(0.0f); videoclips[clip].play(); videoplaying=clip; } } if(key == 't'){ for (int i=0;i