Community Core Vision 1.4
Cross platform, user friendly computer vision.
C:/CCV/1.4/addons/ofxNCore/src/Filters/ProcessFilters.h
Go to the documentation of this file.
00001 /*
00002 *  ProcessFilters.h
00003 *  
00004 *
00005 *  Created on 2/2/09.
00006 *  Copyright 2009 NUI Group. All rights reserved.
00007 *
00008 */
00009 
00010 #ifndef PROCESS_FILTERS_H_
00011 #define PROCESS_FILTERS_H_
00012 
00013 #include "Filters.h"
00014 
00015 class ProcessFilters : public Filters {
00016 
00017   public:
00018 
00019     void allocate( int w, int h ) {
00020 
00021         camWidth = w;
00022         camHeight = h;
00023                 //initialize learnrate
00024                 fLearnRate = 0.0f;
00025         bMiniMode = false;
00026 
00027                 exposureStartTime = ofGetElapsedTimeMillis();
00028 
00029         //CPU Setup
00030         grayImg.allocate(camWidth, camHeight);          //Gray Image
00031         grayBg.allocate(camWidth, camHeight);           //Background Image
00032         subtractBg.allocate(camWidth, camHeight);   //Background After subtraction
00033         grayDiff.allocate(camWidth, camHeight);         //Difference Image between Background and Source
00034         highpassImg.allocate(camWidth, camHeight);  //Highpass Image
00035         ampImg.allocate(camWidth, camHeight);           //Amplied Image
00036         floatBgImg.allocate(camWidth, camHeight);       //ofxShortImage used for simple dynamic background subtraction
00037 
00038         //GPU Setup
00039                 gpuReadBackBuffer = new unsigned char[camWidth*camHeight*3];
00040         gpuReadBackImageGS.allocate(camWidth, camHeight);
00041         //allocateGPU();
00042                 //^^ Commented out for now, till full GPU implementation
00043     }
00044 
00045     void allocateGPU(){
00046 
00047         glGenTextures(1, &gpuSourceTex);
00048         glGenTextures(1, &gpuBGTex);
00049 
00050                 delete gpuReadBackBuffer;
00051 
00052         gpuReadBackBuffer = new unsigned char[camWidth*camHeight*3];
00053         gpuReadBackImageGS.allocate(camWidth, camHeight);
00054 
00055         glEnable(GL_TEXTURE_2D);
00056         glBindTexture(GL_TEXTURE_2D, gpuSourceTex);
00057         glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8,  camWidth, camHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
00058         glBindTexture(GL_TEXTURE_2D, gpuBGTex);
00059         glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8,  camWidth, camHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
00060         glBindTexture(GL_TEXTURE_2D, 0);
00061         glDisable(GL_TEXTURE_2D);
00062 
00063         subtractFilter = new GPUImageFilter("filters/absSubtract.xml", camWidth, camHeight);
00064         subtractFilter2 = new GPUImageFilter("filters/subtract.xml", camWidth, camHeight);
00065         contrastFilter = new GPUImageFilter("filters/contrast.xml", camWidth, camHeight);
00066         gaussVFilter = new GPUImageFilter("filters/gaussV.xml", camWidth, camHeight);
00067         gaussHFilter = new GPUImageFilter("filters/gauss.xml", camWidth, camHeight);
00068         gaussVFilter2 = new GPUImageFilter("filters/gaussV2.xml", camWidth, camHeight);
00069         gaussHFilter2 = new GPUImageFilter("filters/gauss2.xml", camWidth, camHeight);
00070         threshFilter = new GPUImageFilter("filters/threshold.xml", camWidth, camHeight);
00071         copyFilter = new GPUImageFilter("filters/copy.xml", camWidth, camHeight);
00072         grayScale = new GPUImageFilter("filters/grayScale.xml", camWidth, camHeight);
00073     }
00074 
00075 /****************************************************************
00076  *      CPU Filters
00077  ****************************************************************/
00078      void applyCPUFilters(CPUImageFilter& img){
00079 
00080         //Set Mirroring Horizontal/Vertical
00081         if(bVerticalMirror || bHorizontalMirror) img.mirror(bVerticalMirror, bHorizontalMirror);
00082 
00083         if(!bMiniMode) grayImg = img; //for drawing
00084 
00085         //Dynamic background with learn rate
00086         if(bDynamicBG){
00087             floatBgImg.addWeighted( img, fLearnRate);
00088                         //grayBg = floatBgImg;  // not yet implemented
00089                          cvConvertScale( floatBgImg.getCvImage(), grayBg.getCvImage(), 255.0f/65535.0f, 0 );       
00090                          grayBg.flagImageChanged();
00091         }
00092 
00093         //recapature the background until image/camera is fully exposed
00094         if((ofGetElapsedTimeMillis() - exposureStartTime) < CAMERA_EXPOSURE_TIME) bLearnBakground = true;
00095 
00096         //Capture full background
00097         if (bLearnBakground == true){
00098             floatBgImg = img;
00099                         //grayBg = floatBgImg;  // not yet implemented
00100                         cvConvertScale( floatBgImg.getCvImage(), grayBg.getCvImage(), 255.0f/65535.0f, 0 );       
00101                         grayBg.flagImageChanged();
00102             bLearnBakground = false;
00103         }
00104 
00105                 //Background Subtraction
00106         //img.absDiff(grayBg, img);             
00107                 if(bTrackDark)
00108                         cvSub(grayBg.getCvImage(), img.getCvImage(), img.getCvImage());
00109                 else
00110                         cvSub(img.getCvImage(), grayBg.getCvImage(), img.getCvImage());
00111 
00112                 img.flagImageChanged();
00113     
00114                 
00115                 if(bSmooth){//Smooth
00116             img.blur((smooth * 2) + 1); //needs to be an odd number
00117             if(!bMiniMode)
00118             subtractBg = img; //for drawing
00119         }
00120 
00121         if(bHighpass){//HighPass
00122             img.highpass(highpassBlur, highpassNoise);
00123             if(!bMiniMode)
00124             highpassImg = img; //for drawing
00125         }
00126 
00127         if(bAmplify){//Amplify
00128             img.amplify(img, highpassAmp);
00129             if(!bMiniMode)
00130             ampImg = img; //for drawing
00131         }
00132 
00133         img.threshold(threshold); //Threshold
00134                 //img.adaptiveThreshold(threshold, -3);
00135 
00136         if(!bMiniMode)
00137         grayDiff = img; //for drawing
00138     }
00139 
00140 /****************************************************************
00141  *      GPU Filters
00142  ****************************************************************/
00143     void applyGPUFilters(){
00144 
00145         //recapature the background until image/camera is fully exposed
00146         if((ofGetElapsedTimeMillis() - exposureStartTime) < CAMERA_EXPOSURE_TIME) bLearnBakground = true;
00147 
00148         if (bLearnBakground == true){
00149 
00150             gpuBGTex = copyFilter->apply(gpuSourceTex, gpuBGTex);
00151             bLearnBakground = false;
00152         }
00153 
00154         GLuint processedTex;
00155 
00156         processedTex = subtractFilter->apply(gpuSourceTex, gpuBGTex);
00157 
00158         if(bSmooth){//Smooth
00159             gaussHFilter->parameters["kernel_size"]->value = (float)smooth;
00160             gaussVFilter->parameters["kernel_size"]->value = (float)smooth;
00161             processedTex = gaussHFilter->apply(processedTex);
00162             processedTex = gaussVFilter->apply(processedTex);
00163         }
00164 
00165         if(bHighpass){//Highpass
00166             gaussHFilter2->parameters["kernel_size"]->value = (float)highpassBlur;
00167             gaussVFilter2->parameters["kernel_size"]->value = (float)highpassBlur;
00168             processedTex = gaussHFilter2->apply(processedTex);
00169             processedTex = gaussVFilter2->apply(processedTex);
00170 
00171             if(bSmooth)
00172                 processedTex = subtractFilter2->apply(gaussVFilter->output_texture, processedTex);
00173             else
00174                 processedTex = subtractFilter2->apply(subtractFilter->output_texture, processedTex);
00175         }
00176 
00177         if(bAmplify){}//amplify
00178 
00179         threshFilter->parameters["Threshold"]->value = (float)threshold / 255.0; //threshold
00180         processedTex = threshFilter->apply(processedTex);
00181 
00182         //convert to grayscale so readback is faster. maybe do this from the start?
00183         grayScale->apply(processedTex);
00184 
00185         //until the rest of the pipeline is fixed well just download the preprocessing result from the gpu and use that for the blob detection
00186         //TODO: make this part not super slow ;)
00187         glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, threshFilter->output_buffer);
00188         glReadPixels(0,0,camWidth, camHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, gpuReadBackBuffer);
00189         gpuReadBackImageGS.setFromPixels(gpuReadBackBuffer, camWidth, camHeight);
00190         glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
00191     }
00192 
00193 /****************************************************************
00194  *      Draw Filter Images
00195  ****************************************************************/
00196     void draw()
00197     {
00198         grayImg.draw(30, 15, 320, 240);
00199         grayDiff.draw(375, 15, 320, 240);
00200         floatBgImg.draw(30, 392, 128, 96);
00201         subtractBg.draw(165, 392, 128, 96);
00202         highpassImg.draw(300, 392, 128, 96);
00203         ampImg.draw(435, 392, 128, 96);
00204     }
00205 
00206     void drawGPU()
00207     {
00208         drawGLTexture(30, 15, 320, 240, gpuSourceTex);
00209         drawGLTexture(30, 392, 128, 96, gpuBGTex);
00210         gaussVFilter->drawOutputTexture(165, 392, 128, 96);
00211         subtractFilter2->drawOutputTexture(300, 392, 128, 96);
00212         threshFilter->drawOutputTexture(435, 392, 128, 96); //this should be amplify filter but we don't have one yet
00213         gpuReadBackImageGS.draw(375, 15, 320, 240);
00214     }
00215 };
00216 #endif
 All Classes Files Functions Variables Typedefs Enumerations Enumerator Defines