#include "opencv2/opencv.hpp" #include "opencv2/videoio.hpp" #include "opencv2/highgui.hpp" const int FPS = 5; bool isDiscardData = true; int countDiscard = 0; const int DISCARD_DURATION = 10; const int BUFFER_DURATION = 10; const int I = 1; template cv::Mat plotGraph(std::vector& vals, const double* YRange) { auto it = minmax_element(vals.begin(), vals.end()); float scale = 1./ceil(*it.second - *it.first); float bias = *it.first; int rows = YRange[1] - YRange[0] + 1; cv::Mat image = 255*cv::Mat::ones( rows, vals.size(), CV_8UC3 ); image.setTo(255); for (int i = 0; i < (int)vals.size()-1; i++) { cv::line(image, cv::Point(i, rows - 1 - (vals[i] -bias)*scale*YRange[1]), cv::Point(i+1, rows - 1 - (vals[i+1] - bias)*scale*YRange[1]), cv::Scalar(255, 0, 0), 1); } return image; } int main() { cv::VideoCapture cap; cap.open(0); cv::CascadeClassifier faceDetector; if( !faceDetector.load("./haarcascade_frontalface_alt.xml")) { std::cerr << "[ERROR] Unable to load face cascade" << std::endl; return -1; }; cv::Rect foreheadROI; if (!cap.isOpened()) { std::cerr << "[ERROR] Unable to open camera!" << std::endl; return -2; } while (true) { if(isDiscardData){ countDiscard++; if(countDiscard == DISCARD_DURATION*FPS) isDiscardData = false; } else{ cv::Mat frame; cap.read(frame); if (frame.empty()) { std::cerr << "[ERROR] blank frame grabbed" << std::endl; break; } std::vector faceRectangles; faceDetector.detectMultiScale(frame, faceRectangles, 1.1, 3, 0,cv::Size(20, 20)); if (faceRectangles.size() > 0) { foreheadROI = faceRectangles[0]; foreheadROI.height *= 0.3; cv::rectangle(frame, faceRectangles[0], cv::Scalar(0, 0, 255), 1, 1, 0); cv::rectangle(frame, foreheadROI, cv::Scalar(0, 255, 0), 1, 1, 0); cv::Mat frame_forehead = frame(foreheadROI); cv::Scalar avg_forehead = mean(frame_forehead); bool isBufferFull = false; int sampleIdBuffer = 0; cv::Mat greenSignal(1, FPS*BUFFER_DURATION, CV_64F); if (!isBufferFull) { greenSignal.at(0, sampleIdBuffer) = avg_forehead[1] ; sampleIdBuffer++; if (sampleIdBuffer == FPS*BUFFER_DURATION) { isBufferFull = true; } } else { std::vector greenSignalNormalized; cv::Scalar mean, stddev; cv::meanStdDev(greenSignal, mean, stddev); for (int l_sample=0; l_sample < FPS*BUFFER_DURATION; l_sample++) { greenSignalNormalized.push_back((greenSignal.at(0, l_sample) - mean[0])/stddev[0]); } cv::Mat greenFFT; std::vector greenFFTModule; cv::dft(greenSignalNormalized,greenFFT,cv::DFT_ROWS|cv::DFT_COMPLEX_OUTPUT); cv::Mat planes[] = {cv::Mat::zeros(greenSignalNormalized.size(),1, CV_64F), cv::Mat::zeros(greenSignalNormalized.size(),1, CV_64F)}; cv::split(greenFFT, planes); // planes[0] = Re(DFT(I), // planes[1] = Im(DFT(I)) greenFFTModule.clear(); for (int l=0; l < planes[1].cols; l++) { double moduleFFT = pow(planes[1].at(0,l),2) + pow(planes[0].at(0,l),2); greenFFTModule.push_back(sqrt(moduleFFT)); } // display green FFT const double range[2] = {0.0, 150.0}; cv::imshow("FFT module green", plotGraph(greenFFTModule, range)); putText(image, "Text in Images", text_position, FONT_HERSHEY_COMPLEX, font_size,font_Color, font_weight); } } cv::imshow("Color", frame); // int range[2] = {0, (int)(FPS*BUFFER_DURATION)}; // cv::imshow("green", plotGraph(greenSignalNormalized, range)); if (cv::waitKey(1000.0 / FPS) >= 0) break; } } return 0; }