summaryrefslogtreecommitdiff
path: root/cvtest/src/testApp.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'cvtest/src/testApp.cpp')
-rw-r--r--cvtest/src/testApp.cpp280
1 files changed, 280 insertions, 0 deletions
diff --git a/cvtest/src/testApp.cpp b/cvtest/src/testApp.cpp
new file mode 100644
index 0000000..a05cc49
--- /dev/null
+++ b/cvtest/src/testApp.cpp
@@ -0,0 +1,280 @@
+#include "testApp.h"
+
+//--------------------------------------------------------------
+//units ~ 10cm
+//
+/*
+Can use a floating point image or array to accumulate the screen and generate averaged background?
+
+Is this too much work for every frame? Should it be put in a seperate thread?
+
+*/
+
+void testApp::setup(){
+
+ printf("setup: %ix%i on screen %ix%i\n",ofGetWidth(),ofGetHeight(),ofGetScreenWidth(),ofGetScreenHeight());
+
+ int windowMode = ofGetWindowMode();
+ if(windowMode == OF_FULLSCREEN){
+ windowWidth = ofGetScreenWidth();
+ windowHeight = ofGetScreenHeight();
+ }
+ else if(windowMode == OF_WINDOW){
+ windowWidth = ofGetWidth();
+ windowHeight = ofGetHeight();
+ }
+
+
+ vidPlayer.loadMovie("camoutput3.mov"); //cam-grass-01.mov"); //footage/ camera needs to be the same res as opencv planes and output
+ vidPlayer.setLoopState(OF_LOOP_NORMAL);
+
+ vidPlayer.play();
+
+ currentFrame.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+ background.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+
+ grayFrame.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+ grayBg.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+ grayDiff.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+
+ mogoutput.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+
+ learningRate = 0.001f;
+ bFirstFrame=true;
+
+ threshold=10.0;
+
+ mogf=0.008;
+ /*
+ fg = new ofxOpenCvUtilsForeground();
+
+ //method(ACCUMULATE_WEIGHTED)
+ //,learningRate(0.01)
+ //,threshold(10)
+ //,blockSize(6)
+ //,blur(0)
+ //,backgroundSet(false));
+
+
+
+ fg->threshold = 10.0f; //threshold;
+ fg->learningRate = 0.01f; // learningRate;
+ fg->blockSize = 5.0f; //blockSize;
+ fg->blur = 1.0f; //blockSize;
+ fg->shadowThreshold = 10.0f; //shadowThreshold;
+
+
+
+ meanShift = new ofxOpenCvUtilsMeanShift();
+
+
+ resultFrame.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+ blobFrame.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+ trackFrame.allocate(CAM_WIDTH_FG, CAM_HEIGHT_FG);
+
+ selectEnabled = false;
+ */
+
+ ofSetFrameRate(30);
+
+}
+
+
+//--------------------------------------------------------------
+void testApp::update(){
+
+ bool bNewFrame = false;
+
+
+ vidPlayer.idleMovie();
+ bNewFrame = vidPlayer.isFrameNew();
+
+ if (bNewFrame) {
+
+ currentFrame.setFromPixels(vidPlayer.getPixels(), CAM_WIDTH_FG, CAM_HEIGHT_FG);
+ cv::Mat img = currentFrame.getCvImage();
+
+
+ cv::Mat im3;
+ img.convertTo(im3, CV_32FC3);
+
+ if (bFirstFrame) {
+ img.convertTo(accumulator, CV_32FC3);
+ bFirstFrame=false;
+ }
+
+ //accumulator;
+ accumulateWeighted(im3, accumulator, max(1.0f/(ofGetElapsedTimef()*30.0f),learningRate));
+
+ accumulator.convertTo(outmat,CV_8UC3);
+ IplImage* tmp = new IplImage(outmat);
+ //printf("tmp: %ix%i channels: %i depth:%i\n",tmp->width,tmp->height,tmp->nChannels,tmp->depth);
+ //printf("output: %ix%i channels: %i depth:%i\n",output.getCvImage()->width,output.getCvImage()->height,output.getCvImage()->nChannels,output.getCvImage()->depth);
+ background=tmp;
+
+ //grayBg = background;
+ //grayFrame = currentFrame;
+
+
+ // take the abs value of the difference between background and incoming and then threshold:
+ //grayDiff.absDiff(grayBg, grayFrame);
+ //grayDiff.threshold(threshold);
+ //grayDiff.adaptiveThreshold( threshold,1,false,true); //int blockSize, int offset=0,bool invert=false, bool gauss=false);
+ //grayDiff.erode_3x3();
+ //grayDiff.resize(windowWidth,windowHeight);
+ //contourFinder.findContours(grayDiff, 200, (640*480)/3, 20, false); // don't find holes
+
+ //MOG
+ mog(img, outmat, mogf);
+
+ // Complement the image
+ //cv::threshold(outmat, output, threshold, 255, cv::THRESH_BINARY_INV);
+ IplImage* tmp1 = new IplImage(outmat);
+ //printf("tmp: %ix%i channels: %i depth:%i\n",tmp->width,tmp->height,tmp->nChannels,tmp->depth);
+ //printf("grayDiff: %ix%i channels: %i depth:%i\n",grayDiff.getCvImage()->width,grayDiff.getCvImage()->height,grayDiff.getCvImage()->nChannels,grayDiff.getCvImage()->depth);
+ grayDiff=tmp1; //copy to ofx
+
+
+
+ /*
+ if (!fg->backgroundSet) fg->setBackground(currentFrame.getCvImage());
+
+ if(isTracking) {
+ // define ROI
+ cv::Mat img = currentFrame.getCvImage();
+ cv::Mat img2;
+
+ IplImage imgf = fg->update(currentFrame.getCvImage());
+
+ cvNot(&imgf, &imgf);
+ resultFrame = &imgf;
+
+ blobFrame = resultFrame;
+ cv::Mat mask = blobFrame.getCvImage();
+ cv::threshold(mask,mask,128,255,cv::THRESH_BINARY_INV);
+ cv::add(mask,img,img2);
+
+ IplImage* tmp = new IplImage(img2);
+ trackFrame = tmp;
+
+ meanShift->update(tmp);
+
+ return;
+ }
+
+ if(bNewFrame && fg->backgroundSet) {
+
+
+ if((int)blur%2==0) {
+ blur++;
+ }
+ fg->blur = blur;
+
+ if(useMoG) {
+ fg->setMethod(MIXTURE_OF_GUASSIAN);
+ } else {
+ fg->setMethod(ACCUMULATE_WEIGHTED);
+ }
+
+ // Update the fg
+ IplImage img = fg->update(currentFrame.getCvImage());
+ cvNot(&img, &img);
+ resultFrame = &img;
+
+ blobFrame = resultFrame;
+ }
+ */
+
+ }
+
+
+
+
+}
+
+//--------------------------------------------------------------
+void testApp::draw(){
+
+ ofSetHexColor(0xffffff);
+
+ background.draw(0, 0);
+ grayDiff.draw(640, 0);
+ //mogoutput.draw(640, 0);
+
+ /*
+
+ if(fg->backgroundSet) {
+
+ resultFrame.draw(0, 0);
+ //if(isTracking) {
+ // trackFrame.draw(320,240);
+ //} else {
+ blobFrame.draw(640,0);
+ //}
+ }
+ */
+ ofSetHexColor(0xffffff);
+ char reportStr[1024];
+ //sprintf(reportStr, "fps: %f\nthreshold: %f", ofGetFrameRate(),threshold);
+ sprintf(reportStr, "fps: %f\nmog: %f", ofGetFrameRate(),mogf);
+ ofDrawBitmapString(reportStr, 1100, 440);
+
+
+}
+
+//--------------------------------------------------------------
+void testApp::keyPressed(int key){
+ switch (key){
+ case '+':
+ threshold ++;
+ mogf +=.001;
+ if (threshold > 255) threshold = 255;
+ break;
+ case '-':
+ threshold --;
+ mogf-=.001;
+ if (threshold < 0) threshold = 0;
+ break;
+ }
+}
+
+//--------------------------------------------------------------
+void testApp::keyReleased(int key){
+
+}
+
+//--------------------------------------------------------------
+void testApp::mouseMoved(int x, int y ){
+
+}
+
+//--------------------------------------------------------------
+void testApp::mouseDragged(int x, int y, int button){
+
+}
+
+//--------------------------------------------------------------
+void testApp::mousePressed(int x, int y, int button){
+
+}
+
+//--------------------------------------------------------------
+void testApp::mouseReleased(int x, int y, int button){
+
+}
+
+//--------------------------------------------------------------
+void testApp::windowResized(int w, int h){
+
+}
+
+//--------------------------------------------------------------
+void testApp::gotMessage(ofMessage msg){
+
+}
+
+//--------------------------------------------------------------
+void testApp::dragEvent(ofDragInfo dragInfo){
+
+}
+