OpenCV實(shí)現(xiàn)人臉檢測(cè)功能
更新時(shí)間:2020年02月14日 14:07:14 作者:老狼主
這篇文章主要為大家詳細(xì)介紹了OpenCV實(shí)現(xiàn)人臉檢測(cè)功能,文中示例代碼介紹的非常詳細(xì),具有一定的參考價(jià)值,感興趣的小伙伴們可以參考一下
本文實(shí)例為大家分享了OpenCV實(shí)現(xiàn)人臉檢測(cè)功能的具體代碼,供大家參考,具體內(nèi)容如下
1、HAAR級(jí)聯(lián)檢測(cè)
#include <opencv2/opencv.hpp> #include <iostream> using namespace cv; #include <iostream> #include <cstdlib> using namespace std; int main(int artc, char** argv) { face_detect_haar(); waitKey(0); return 0; } void face_detect_haar() { CascadeClassifier faceDetector; std::string haar_data_file = "./models/haarcascades/haarcascade_frontalface_alt_tree.xml"; faceDetector.load(haar_data_file); vector<Rect> faces; //VideoCapture capture(0); VideoCapture capture("./video/test.mp4"); Mat frame, gray; int count=0; while (capture.read(frame)) { int64 start = getTickCount(); if (frame.empty()) { break; } // 水平鏡像調(diào)整 // flip(frame, frame, 1); imshow("input", frame); if (frame.channels() == 4) cvtColor(frame, frame, COLOR_BGRA2BGR); cvtColor(frame, gray, COLOR_BGR2GRAY); equalizeHist(gray, gray); faceDetector.detectMultiScale(gray, faces, 1.2, 1, 0, Size(30, 30), Size(400, 400)); for (size_t t = 0; t < faces.size(); t++) { count++; rectangle(frame, faces[t], Scalar(0, 255, 0), 2, 8, 0); } float fps = getTickFrequency() / (getTickCount() - start); ostringstream ss;ss.str(""); ss << "FPS: " << fps << " ; inference time: " << time << " ms"; putText(frame, ss.str(), Point(20, 20), 0, 0.75, Scalar(0, 0, 255), 2, 8); imshow("haar_face_detection", frame); if (waitKey(1) >= 0) break; } printf("total face: %d\n", count); }
2、 DNN人臉檢測(cè)
#include <opencv2/dnn.hpp> #include <opencv2/opencv.hpp> using namespace cv; using namespace cv::dnn; #include <iostream> #include <cstdlib> using namespace std; const size_t inWidth = 300; const size_t inHeight = 300; const double inScaleFactor = 1.0; const Scalar meanVal(104.0, 177.0, 123.0); const float confidenceThreshold = 0.7; void face_detect_dnn(); void mtcnn_demo(); int main(int argc, char** argv) { face_detect_dnn(); waitKey(0); return 0; } void face_detect_dnn() { //這里采用tensorflow模型 std::string modelBinary = "./models/dnn/face_detector/opencv_face_detector_uint8.pb"; std::string modelDesc = "./models/dnn/face_detector/opencv_face_detector.pbtxt"; // 初始化網(wǎng)絡(luò) dnn::Net net = readNetFromTensorflow(modelBinary, modelDesc); net.setPreferableBackend(DNN_BACKEND_OPENCV); net.setPreferableTarget(DNN_TARGET_CPU); if (net.empty()) { printf("Load models fail...\n"); return; } // 打開(kāi)攝像頭 // VideoCapture capture(0); VideoCapture capture("./video/test.mp4"); if (!capture.isOpened()) { printf("Don't find video...\n"); return; } Mat frame; int count=0; while (capture.read(frame)) { int64 start = getTickCount(); if (frame.empty()) { break; } // 水平鏡像調(diào)整 // flip(frame, frame, 1); imshow("input", frame); if (frame.channels() == 4) cvtColor(frame, frame, COLOR_BGRA2BGR); // 輸入數(shù)據(jù)調(diào)整 Mat inputBlob = blobFromImage(frame, inScaleFactor, Size(inWidth, inHeight), meanVal, false, false); net.setInput(inputBlob, "data"); // 人臉檢測(cè) Mat detection = net.forward("detection_out"); vector<double> layersTimings; double freq = getTickFrequency() / 1000; double time = net.getPerfProfile(layersTimings) / freq; Mat detectionMat(detection.size[2], detection.size[3], CV_32F, detection.ptr<float>()); ostringstream ss; for (int i = 0; i < detectionMat.rows; i++) { // 置信度 0~1之間 float confidence = detectionMat.at<float>(i, 2); if (confidence > confidenceThreshold) { count++; int xLeftBottom = static_cast<int>(detectionMat.at<float>(i, 3) * frame.cols); int yLeftBottom = static_cast<int>(detectionMat.at<float>(i, 4) * frame.rows); int xRightTop = static_cast<int>(detectionMat.at<float>(i, 5) * frame.cols); int yRightTop = static_cast<int>(detectionMat.at<float>(i, 6) * frame.rows); Rect object((int)xLeftBottom, (int)yLeftBottom, (int)(xRightTop - xLeftBottom), (int)(yRightTop - yLeftBottom)); rectangle(frame, object, Scalar(0, 255, 0)); ss << confidence; std::string conf(ss.str()); std::string label = "Face: " + conf; int baseLine = 0; Size labelSize = getTextSize(label, FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); rectangle(frame, Rect(Point(xLeftBottom, yLeftBottom - labelSize.height), Size(labelSize.width, labelSize.height + baseLine)), Scalar(255, 255, 255), FILLED); putText(frame, label, Point(xLeftBottom, yLeftBottom), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0, 0, 0)); } } float fps = getTickFrequency() / (getTickCount() - start); ss.str(""); ss << "FPS: " << fps << " ; inference time: " << time << " ms"; putText(frame, ss.str(), Point(20, 20), 0, 0.75, Scalar(0, 0, 255), 2, 8); imshow("dnn_face_detection", frame); if (waitKey(1) >= 0) break; } printf("total face: %d\n", count); }
以上就是本文的全部?jī)?nèi)容,希望對(duì)大家的學(xué)習(xí)有所幫助,也希望大家多多支持腳本之家。
相關(guān)文章
C語(yǔ)言關(guān)系運(yùn)算符實(shí)例詳解
本文主要介紹C語(yǔ)言的關(guān)系運(yùn)算符的知識(shí),這里提供實(shí)例代碼以便參考,希望能幫助有需要的小伙伴2016-07-07C語(yǔ)言中結(jié)構(gòu)體struct編寫的一些要點(diǎn)解析
這篇文章主要介紹了C語(yǔ)言中結(jié)構(gòu)體struct編寫的一些要點(diǎn)解析,談到了結(jié)構(gòu)體的聲明和指針指向等重要知識(shí)點(diǎn),需要的朋友可以參考下2016-04-04C++深入探究類與對(duì)象之對(duì)象模型與this指針使用方法
C++對(duì)象模型中只有類的非static成員以及一個(gè)指向虛函數(shù)表的指針被配置于類對(duì)象內(nèi),其他都在類對(duì)象外,在 C++ 中,每一個(gè)對(duì)象都能通過(guò) this 指針來(lái)訪問(wèn)自己的地址。this 指針是所有成員函數(shù)的隱含參數(shù)。因此,在成員函數(shù)內(nèi)部,它可以用來(lái)指向調(diào)用對(duì)象2022-04-04C指針原理教程之編譯原理-小型計(jì)算器實(shí)現(xiàn)
本文給大家分享的是如何使用C語(yǔ)言編寫一個(gè)小型計(jì)算器的實(shí)例代碼,有需要的小伙伴可以參考下2019-02-02