#include "testApp.h" //texture binding with normalised coords void bindTexture(ofBaseHasTexture &t) { ofTexture &tex = t.getTextureReference(); tex.bind(); glMatrixMode(GL_TEXTURE); glPushMatrix(); glLoadIdentity(); ofTextureData texData = tex.getTextureData(); if(texData.textureTarget == GL_TEXTURE_RECTANGLE_ARB) { glScalef(tex.getWidth(), tex.getHeight(), 1.0f); } else { glScalef(tex.getWidth() / texData.tex_w, tex.getHeight() / texData.tex_h, 1.0f); } glMatrixMode(GL_MODELVIEW); } void unbindTexture(ofBaseHasTexture &t) { t.getTextureReference().unbind(); glMatrixMode(GL_TEXTURE); glPopMatrix(); glMatrixMode(GL_MODELVIEW); } void bindTex(ofTexture &tex) { tex.bind(); glMatrixMode(GL_TEXTURE); glPushMatrix(); glLoadIdentity(); ofTextureData texData = tex.getTextureData(); if(texData.textureTarget == GL_TEXTURE_RECTANGLE_ARB) { glScalef(tex.getWidth(), tex.getHeight(), 1.0f); } else { glScalef(tex.getWidth() / texData.tex_w, tex.getHeight() / texData.tex_h, 1.0f); } glMatrixMode(GL_MODELVIEW); } void unbindTex(ofTexture &tex) { tex.unbind(); glMatrixMode(GL_TEXTURE); glPopMatrix(); glMatrixMode(GL_MODELVIEW); } //-------------------------------------------------------------- void testApp::setup(){ isLive = true; isRecording = false; #ifdef NEWAPI openNIDevice.setup(false);//FromXML("openni/config/ofxopenni_config.xml"); openNIDevice.setLogLevel(OF_LOG_VERBOSE); openNIDevice.addDepthGenerator(); openNIDevice.addImageGenerator(); // comment this out openNIDevice.addUserGenerator(); openNIDevice.setRegister(true); openNIDevice.setMirror(true); openNIDevice.start(); openNIDevice.setUseDepthRawPixels(true); //openNIDevice.setSafeThreading(true); ?slower and still crashes? openNIDevice.setMaxNumUsers(1); ofAddListener(openNIDevice.userEvent, this, &testApp::userEvent); ofxOpenNIUser user; user.setUseMaskTexture(true); user.setUsePointCloud(true); user.setPointCloudDrawSize(1); // this is the size of the glPoint that will be drawn for the point cloud user.setPointCloudResolution(1); // this is the step size between points for the cloud -> eg., this sets it to every second point openNIDevice.setBaseUserClass(user); #else /* recordContext.setup(); recordDepth.setup(&recordContext); recordImage.setup(&recordContext); recordUser.setup(&recordContext); recordUser.setUseCloudPoints(true); recordUser.setSmoothing(10.0f); recordContext.toggleRegisterViewport(); */ #endif setupRecording(); whichUser=&recordUser; whichImage=&recordImage; guiWin=new guiWindow(); ofxFenster* win=ofxFensterManager::get()->createFenster(0, 0, 200, 400, OF_WINDOW); win->setWindowTitle("config"); win->addListener(guiWin); guiWin->setup(); } void testApp::setupRecording(string _filename) { bool isCloud=true; #if defined (TARGET_OSX) //|| defined(TARGET_LINUX) // only working on Mac/Linux at the moment (but on Linux you need to run as sudo...) hardware.setup(); // libusb direct control of motor, LED and accelerometers hardware.setLedOption(LED_OFF); // turn off the led just for yacks (or for live installation/performances ;-) #endif recordContext.setup(); // all nodes created by code -> NOT using the xml config file at all //recordContext.setupUsingXMLFile(); recordDepth.setup(&recordContext); recordImage.setup(&recordContext); recordUser.setup(&recordContext); //recordUser.setSmoothing(filterFactor); // built in openni skeleton smoothing... //recordUser.setUseMaskPixels(isMasking); recordUser.setUseCloudPoints(isCloud); recordUser.setMaxNumberOfUsers(2); // use this to set dynamic max number of users (NB: that a hard upper limit is defined by MAX_NUMBER_USERS in ofxUserGenerator) recordContext.toggleRegisterViewport(); recordContext.toggleMirror(); oniRecorder.setup(&recordContext, ONI_STREAMING); //oniRecorder.setup(&recordContext, ONI_CYCLIC, 60); //read the warning in ofxOpenNIRecorder about memory usage with ONI_CYCLIC recording!!! } void testApp::setupPlayback(string _filename) { bool isCloud=true; playContext.shutdown(); playContext.setupUsingRecording(ofToDataPath(_filename)); playDepth.setup(&playContext); playImage.setup(&playContext); playUser.setup(&playContext); //playUser.setSmoothing(filterFactor); // built in openni skeleton smoothing... //playUser.setUseMaskPixels(isMasking); playUser.setUseCloudPoints(isCloud); playContext.toggleRegisterViewport(); playContext.toggleMirror(); } //-------------------------------------------------------------- void testApp::update(){ if (isLive) { #ifdef NEWAPI openNIDevice.update(); #else recordContext.update(); recordDepth.update(); recordImage.update(); recordUser.update(); #endif } else { playContext.update(); playDepth.update(); playImage.update(); playUser.update(); } if (isRecording) oniRecorder.update(); } //-------------------------------------------------------------- void testApp::draw(){ #ifdef NEWAPI ofSetColor(255, 255, 255); //openNIDevice.drawDebug(); // draws all generators //openNIDevice.drawDepth(0, 0); if (guiWin->drawPoints) { /* glEnable( GL_POINT_SMOOTH ); glEnable( GL_BLEND ); glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA ); glPointSize( guiWin->pointSize ); glBegin(GL_POINTS); glColor3f( 1.0f, 1.0f, 1.0f ); //uint16_t *depth=openNIDevice.getDepthRawPixels().getPixels(); ofPoint p; for (int i=0;i<640;i+=guiWin->drawStep){ for (int j=0;j<480;j+=guiWin->drawStep){ p=openNIDevice.cameraToWorld(ofVec2f(i,j)); glVertex3f(p.x,p.y,p.z); } } */ ofPushMatrix(); ofEnableBlendMode(OF_BLENDMODE_ALPHA); int numUsers = openNIDevice.getNumTrackedUsers(); for (int nID = 0; nID < numUsers; nID++){ ofxOpenNIUser & user = openNIDevice.getTrackedUser(nID); user.drawMask(); ofPushMatrix(); ofTranslate((1024/2)-(640/2),(768/2)-(480/2),-500); user.drawPointCloud(); ofPopMatrix(); } ofDisableBlendMode(); ofPopMatrix(); } else { openNIDevice.drawImage(0, 0,ofGetWidth(),ofGetHeight()); } #else //cam.begin(); //bind texture recordImage //get point data from recordDepth //draw textured polys and allow manipulation float cloudWidth=ofGetWidth(); float cloudHeight=ofGetHeight(); ofPushMatrix(); ofTranslate((ofGetWidth()/2)-(640/2),(ofGetHeight()/2)-(480/2),800); ofRotate(0.1,1,0.05,30); //ofRotate(0,0,1,180); int step = 1; if (guiWin->drawPoints) { // http://stackoverflow.com/questions/1513811/getting-smooth-big-points-in-opengl glEnable( GL_POINT_SMOOTH ); glEnable( GL_BLEND ); glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA ); glPointSize( guiWin->pointSize ); glBegin(GL_POINTS); glColor3f( 1.0f, 1.0f, 1.0f ); float spread=guiWin->distMax-guiWin->distMin; for(int y = step; y < 480; y += guiWin->drawStep) { for(int x = step; x < 640; x += guiWin->drawStep) { ofPoint pos4 = whichUser->getWorldCoordinateAt(x, y, 0); //userID); if (guiWin->distMindistMax) { float l=(pos4.z-guiWin->distMin)/spread; glColor3f( l, l, l ); glVertex3f(pos4.x, pos4.y,-pos4.z); } } } glEnd(); } else { bindTex(whichImage->getTexture()); for(int y = step; y < 480; y += guiWin->drawStep) { glBegin(GL_QUADS); for(int x = step; x < 640; x += guiWin->drawStep) { ofPoint pos1 = whichUser->getWorldCoordinateAt(x-step, y-step, 0); //userID); ofPoint pos2 = whichUser->getWorldCoordinateAt(x, y-step, 0); //userID); ofPoint pos3 = whichUser->getWorldCoordinateAt(x-step, y, 0); //userID); ofPoint pos4 = whichUser->getWorldCoordinateAt(x, y, 0); //userID); if ((guiWin->distMindistMax)&& (guiWin->distMindistMax)&& (guiWin->distMindistMax)&& (guiWin->distMindistMax)) { glTexCoord2f(((float)x-step)/640.0f,((float)y-step)/480.0f); glVertex3f(pos1.x, pos1.y,-pos1.z); glTexCoord2f(((float)x)/640.0f,((float)y-step)/480.0f); glVertex3f(pos2.x, pos2.y,-pos2.z); glTexCoord2f(((float)x)/640.0f,((float)y)/480.0f); glVertex3f(pos4.x, pos4.y,-pos4.z); glTexCoord2f(((float)x-step)/640.0f,((float)y)/480.0f); glVertex3f(pos3.x, pos3.y,-pos3.z); } } glEnd(); } unbindTex(whichImage->getTexture()); } ofPopMatrix(); //recordImage.draw(0, 0, ofGetWidth(),ofGetHeight()); //cam.end(); #endif } #ifdef NEWAPI void testApp::userEvent(ofxOpenNIUserEvent & event){ ofLogNotice() << getUserStatusAsString(event.userStatus) << "for user" << event.id << "from device" << event.deviceID; } #endif void testApp::exit(){ #ifdef NEWAPI openNIDevice.stop(); #endif } string testApp::generateFileName() { string _root = "kinectRecord"; string _timestamp = ofToString(ofGetDay()) + ofToString(ofGetMonth()) + ofToString(ofGetYear()) + ofToString(ofGetHours()) + ofToString(ofGetMinutes()) + ofToString(ofGetSeconds()); string _filename = (_root + _timestamp + ".oni"); return _filename; } //-------------------------------------------------------------- void testApp::keyPressed(int key){ switch (key) { case 's': case 'S': if (isRecording) { oniRecorder.stopRecord(); isRecording = false; break; } else { oniRecorder.startRecord(generateFileName()); isRecording = true; break; } break; case 'p': case 'P': if (oniRecorder.getCurrentFileName() != "" && !isRecording && isLive) { setupPlayback(oniRecorder.getCurrentFileName()); isLive = false; whichUser=&playUser; whichImage=&playImage; } else { isLive = true; whichUser=&recordUser; whichImage=&recordImage; } break; } } //-------------------------------------------------------------- void testApp::keyReleased(int key){ } //-------------------------------------------------------------- void testApp::mouseMoved(int x, int y ){ } //-------------------------------------------------------------- void testApp::mouseDragged(int x, int y, int button){ } //-------------------------------------------------------------- void testApp::mousePressed(int x, int y, int button){ } //-------------------------------------------------------------- void testApp::mouseReleased(int x, int y, int button){ } //-------------------------------------------------------------- void testApp::windowResized(int w, int h){ } //-------------------------------------------------------------- void testApp::gotMessage(ofMessage msg){ } //-------------------------------------------------------------- void testApp::dragEvent(ofDragInfo dragInfo){ }