赞
踩
https://github.com/openvinotoolkit/openvino/releases
直接解压(随便放到哪个路径)
环境配置
********(openvino所在路径)\runtime\bin\intel64\Release
******** \runtime\3rdparty\tbb\bin
// Zy_OV_YOLOV5.cpp : 定义 DLL 应用程序的导出函数。 // #include "stdafx.h" #include "Zy_OV_YOLOV5.h" // 这是导出变量的一个示例 ZY_OV_YOLOV5_API int nZy_OV_YOLOV5=0; // 这是导出函数的一个示例。 ZY_OV_YOLOV5_API int fnZy_OV_YOLOV5(void) { return 42; } // 这是已导出类的构造函数。 // 有关类定义的信息,请参阅 Zy_OV_YOLOV5.h CZy_OV_YOLOV5::CZy_OV_YOLOV5() { return; } extern "C" { YoloModel::YoloModel() { detector = new Detector; } /*bool YoloModel::init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold, int classNum) { bool initflag = detector->init(xml_path, bin_path, cof_threshold, nms_area_threshold, classNum); return initflag; } bool YoloModel::YoloInfer(const Mat& srcImg,vector<Object>& vecObj) { bool inferflag = detector->yoloInfer(srcImg, vecObj); return inferflag; }*/ bool YoloModel::init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold) { bool initflag = detector->init(xml_path, bin_path, cof_threshold, nms_area_threshold); return initflag; } bool YoloModel::YoloInfer(const Mat& srcImg, vector<Rect>& vecObj) { bool inferflag = detector->yoloInfer(srcImg, vecObj); return inferflag; } }
#include "detector.h" Detector::Detector(){} Detector::~Detector(){} bool Detector::parse_yolov5(const Blob::Ptr &blob, int net_grid, float cof_thr, vector<Rect>& o_rect, vector<float>& o_rect_cof, vector<int>& classids) { vector<int> anchors = get_anchors(net_grid); LockedMemory<const void> blobMapped = as<MemoryBlob>(blob)->rmap(); const float *output_blob = blobMapped.as<float *>(); int item_size = _classNum + 5; for(int n=0; n<3; ++n) { for(int i=0; i<net_grid; ++i) { for(int j=0; j<net_grid; ++j) { double box_prob = output_blob[n*net_grid*net_grid*item_size + i*net_grid*item_size + j*item_size + 4]; box_prob = sigmoid(box_prob); if(box_prob < cof_thr) continue; //注意此处输出为中心点坐标,需要转化为角点坐标 double x = output_blob[n*net_grid*net_grid*item_size + i*net_grid*item_size + j*item_size + 0]; double y = output_blob[n*net_grid*net_grid*item_size + i*net_grid*item_size + j*item_size + 1]; double w = output_blob[n*net_grid*net_grid*item_size + i*net_grid*item_size + j*item_size + 2]; double h = output_blob[n*net_grid*net_grid*item_size + i*net_grid*item_size + j *item_size+ 3]; double max_prob = 0; int idx = 0; for(int t=5; t< item_size; ++t){ double tp = output_blob[n*net_grid*net_grid*item_size + i*net_grid*item_size + j*item_size + t]; tp = sigmoid(tp); if(tp > max_prob){ max_prob = tp; idx = t; } } float cof = box_prob * max_prob; if(cof < cof_thr) continue; /*x = (sigmoid(x) * 2 - 0.5 + j) * 640.0f / net_grid; y = (sigmoid(y) * 2 - 0.5 + i) * 640.0f / net_grid;*/ x = (sigmoid(x) * 2 - 0.5 + j) * static_cast<float>(imgSize) / net_grid; y = (sigmoid(y) * 2 - 0.5 + i) * static_cast<float>(imgSize) / net_grid; int left = n * 2; int right = n * 2 + 1; w = pow(sigmoid(w) * 2, 2) * anchors[left]; h = pow(sigmoid(h) * 2, 2) * anchors[right]; double r_x = x - w / 2; double r_y = y - h / 2; Rect rect = Rect(round(r_x), round(r_y), round(w), round(h)); o_rect.push_back(rect); o_rect_cof.push_back(cof); classids.push_back(idx - 5); } } } if (o_rect.size() == 0) { return false; } return true; } //bool Detector::init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold, int classNum) //{ // _xml_path = xml_path; // _bin_path = bin_path; // _cof_threshold = cof_threshold; // _nms_area_threshold = nms_area_threshold; // InferenceEngine::Core ie; // auto cnnNetwork = ie.ReadNetwork(_xml_path, _bin_path); // // // // 输入设置 // InputsDataMap inputInfo(cnnNetwork.getInputsInfo()); // InputInfo::Ptr& input = inputInfo.begin()->second; // _input_name = inputInfo.begin()->first; // cout << "input_name: " << _input_name << endl; // input->setPrecision(Precision::FP32); // input->getInputData()->setLayout(InferenceEngine::Layout::NCHW); // ICNNNetwork::InputShapes inputShapes = cnnNetwork.getInputShapes(); // // int InputImgSize = inputShapes["images"].back(); // imgSize = InputImgSize; // _classNum = classNum; // // cout << "InputImgSize:" << imgSize << endl; // if (imgSize != 160 && imgSize != 640 && imgSize != 1280) // { // return false; // } // // // SizeVector& inSizeVector = inputShapes.begin()->second; // cnnNetwork.reshape(inputShapes); // // 输出设置 // _outputinfo = OutputsDataMap(cnnNetwork.getOutputsInfo()); // for (auto &output : _outputinfo) // { // cout << "output_name: " << output.first << endl; // output.second->setPrecision(Precision::FP32); // } // //_network = ie.LoadNetwork(cnnNetwork, "CPU"); // _network = ie.LoadNetwork(cnnNetwork, "GPU"); // return true; //} //bool Detector::process_frame(Mat& inframe, vector<Object>& detected_objects) //{ // resize(inframe, inframe, Size(imgSize, imgSize)); // cvtColor(inframe, inframe, COLOR_BGR2RGB); // size_t img_size = imgSize * imgSize; // // InferenceEngine::InferRequest infer_request = _network.CreateInferRequest(); // Blob::Ptr frameBlob = infer_request.GetBlob(_input_name); // // InferenceEngine::LockedMemory<void> blobMapped = InferenceEngine::as<InferenceEngine::MemoryBlob>(frameBlob)->wmap(); // float* blob_data = blobMapped.as<float*>(); // // for(size_t row = 0; row < imgSize; row++) { // for(size_t col = 0; col < imgSize; col++) { // for(size_t ch = 0; ch < 3; ch++) { // blob_data[img_size * ch + row * imgSize + col] = float(inframe.at<Vec3b>(row, col)[ch]) / 255.0f; // } // } // } // // infer_request.Infer(); // vector<Rect> origin_rect; // vector<float> origin_rect_cof; // vector<int> classids; // // // int s[3]; // if (640 == imgSize) // { // s[0] = 80; // s[1] = 40; // s[2] = 20; // } // else if (160 == imgSize) // { // s[0] = 20; // s[1] = 10; // s[2] = 5; // } // // int i = 0; // for (auto &output : _outputinfo) { // auto output_name = output.first; // // cout << "output_name: " << output_name << endl; // Blob::Ptr blob = infer_request.GetBlob(output_name); // parse_yolov5(blob, s[i], _cof_threshold, origin_rect, origin_rect_cof, classids); // ++i; // if (i == 3) // { // break; // } // } // // vector<int> final_id; // dnn::NMSBoxes(origin_rect, origin_rect_cof, _cof_threshold, _nms_area_threshold, final_id); // for(int i=0; i<final_id.size(); ++i) // { // Rect resize_rect = origin_rect[final_id[i]]; // detected_objects.push_back(Object{ origin_rect_cof[final_id[i]], resize_rect,classids[final_id[i]] }); // } // // return true; //} //bool Detector::yoloInfer(const Mat& srcImg, vector<Object>& vecObj) //{ // Mat src_copy; // int width = srcImg.cols; // int height = srcImg.rows; // int channel = srcImg.channels(); // double scale = min(static_cast<double>(imgSize) / width, static_cast<double>(imgSize) / height); // int w = round(width * scale); // int h = round(height * scale); // resize(srcImg, src_copy, Size(w, h)); // int top = 0, bottom = 0, left = 0, right = 0; // if (w > h) // { // top = (w - h) / 2; // bottom = (w - h) - top; // } // else if (h > w) // { // left = (h - w) / 2; // right = (h - w) - left; // } // copyMakeBorder(src_copy, src_copy, top, bottom, left, right, BORDER_CONSTANT, Scalar(114, 114, 114)); // vector<Object> detected_objects; // auto start = chrono::high_resolution_clock::now(); // process_frame(src_copy, detected_objects); // infer // auto end = chrono::high_resolution_clock::now(); // std::chrono::duration<double> diff = end - start; // cout << "use " << diff.count() << " s" << endl; // for (int i = 0; i < detected_objects.size(); ++i) { // int xmin = max(detected_objects[i].rect.x - left, 0); // int ymin = max(detected_objects[i].rect.y - top, 0); // int width = detected_objects[i].rect.width; // int height = detected_objects[i].rect.height; // Rect rect(int(xmin / scale), int(ymin / scale), int(width / scale), int(height / scale)); // vecObj.push_back(Object{ detected_objects[i].prob,rect,detected_objects[i].classid }); // } // // return true; //} bool Detector::init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold) { int classNum = 1; // 外面增加类的时候 此处需修改 _xml_path = xml_path; _bin_path = bin_path; _cof_threshold = cof_threshold; _nms_area_threshold = nms_area_threshold; InferenceEngine::Core ie; auto cnnNetwork = ie.ReadNetwork(_xml_path, _bin_path); // 输入设置 InputsDataMap inputInfo(cnnNetwork.getInputsInfo()); InputInfo::Ptr& input = inputInfo.begin()->second; _input_name = inputInfo.begin()->first; cout << "input_name: " << _input_name << endl; input->setPrecision(Precision::FP32); input->getInputData()->setLayout(InferenceEngine::Layout::NCHW); ICNNNetwork::InputShapes inputShapes = cnnNetwork.getInputShapes(); int InputImgSize = inputShapes["images"].back(); imgSize = InputImgSize; _classNum = classNum; cout << "InputImgSize:" << imgSize << endl; if (imgSize != 160 && imgSize != 640 && imgSize != 1280) { return false; } SizeVector& inSizeVector = inputShapes.begin()->second; cnnNetwork.reshape(inputShapes); // 输出设置 _outputinfo = OutputsDataMap(cnnNetwork.getOutputsInfo()); for (auto& output : _outputinfo) { cout << "output_name: " << output.first << endl; output.second->setPrecision(Precision::FP32); } //_network = ie.LoadNetwork(cnnNetwork, "CPU"); _network = ie.LoadNetwork(cnnNetwork, "GPU"); return true; } bool Detector::process_frame(Mat& inframe, vector<Rect>& detected_objects) { resize(inframe, inframe, Size(imgSize, imgSize)); cvtColor(inframe, inframe, COLOR_BGR2RGB); size_t img_size = imgSize * imgSize; InferenceEngine::InferRequest infer_request = _network.CreateInferRequest(); Blob::Ptr frameBlob = infer_request.GetBlob(_input_name); InferenceEngine::LockedMemory<void> blobMapped = InferenceEngine::as<InferenceEngine::MemoryBlob>(frameBlob)->wmap(); float* blob_data = blobMapped.as<float*>(); for (size_t row = 0; row < imgSize; row++) { for (size_t col = 0; col < imgSize; col++) { for (size_t ch = 0; ch < 3; ch++) { blob_data[img_size * ch + row * imgSize + col] = float(inframe.at<Vec3b>(row, col)[ch]) / 255.0f; } } } infer_request.Infer(); vector<Rect> origin_rect; vector<float> origin_rect_cof; vector<int> classids; int s[3]; if (640 == imgSize) { s[0] = 80; s[1] = 40; s[2] = 20; } else if (160 == imgSize) { s[0] = 20; s[1] = 10; s[2] = 5; } int i = 0; for (auto& output : _outputinfo) { auto output_name = output.first; // cout << "output_name: " << output_name << endl; Blob::Ptr blob = infer_request.GetBlob(output_name); parse_yolov5(blob, s[i], _cof_threshold, origin_rect, origin_rect_cof, classids); ++i; if (i == 3) { break; } } vector<int> final_id; dnn::NMSBoxes(origin_rect, origin_rect_cof, _cof_threshold, _nms_area_threshold, final_id); for (int i = 0; i < final_id.size(); ++i) { Rect resize_rect = origin_rect[final_id[i]]; detected_objects.push_back(resize_rect); } return true; } bool Detector::yoloInfer(const Mat& srcImg, vector<Rect>& vecObj) { Mat src_copy; int width = srcImg.cols; int height = srcImg.rows; int channel = srcImg.channels(); double scale = min(static_cast<double>(imgSize) / width, static_cast<double>(imgSize) / height); int w = round(width * scale); int h = round(height * scale); resize(srcImg, src_copy, Size(w, h)); int top = 0, bottom = 0, left = 0, right = 0; if (w > h) { top = (w - h) / 2; bottom = (w - h) - top; } else if (h > w) { left = (h - w) / 2; right = (h - w) - left; } copyMakeBorder(src_copy, src_copy, top, bottom, left, right, BORDER_CONSTANT, Scalar(114, 114, 114)); vector<Rect> detected_objects; auto start = chrono::high_resolution_clock::now(); process_frame(src_copy, detected_objects); // infer auto end = chrono::high_resolution_clock::now(); std::chrono::duration<double> diff = end - start; cout << "use " << diff.count() << " s" << endl; for (int i = 0; i < detected_objects.size(); ++i) { int xmin = max(detected_objects[i].x - left, 0); int ymin = max(detected_objects[i].y - top, 0); int width = detected_objects[i].width; int height = detected_objects[i].height; Rect rect(int(xmin / scale), int(ymin / scale), int(width / scale), int(height / scale)); vecObj.push_back(rect); } return true; } double Detector::sigmoid(double x){ return (1 / (1 + exp(-x))); } vector<int> Detector::get_anchors(int net_grid){ vector<int> anchors(6); int a80[6] = {10, 13, 16, 30, 33, 23}; int a40[6] = {30, 61, 62, 45, 59, 119}; int a20[6] = {116, 90, 156, 198, 373, 326}; if(net_grid == 80){ anchors.insert(anchors.begin(), a80, a80 + 6); } else if(net_grid == 40){ anchors.insert(anchors.begin(), a40, a40 + 6); } else if(net_grid == 20){ anchors.insert(anchors.begin(), a20, a20 + 6); } return anchors; }
// dllmain.cpp : 定义 DLL 应用程序的入口点。 #include "stdafx.h" BOOL APIENTRY DllMain( HMODULE hModule, DWORD ul_reason_for_call, LPVOID lpReserved ) { switch (ul_reason_for_call) { case DLL_PROCESS_ATTACH: case DLL_THREAD_ATTACH: case DLL_THREAD_DETACH: case DLL_PROCESS_DETACH: break; } return TRUE; }
// stdafx.cpp : 只包括标准包含文件的源文件
// Zy_OV_YOLOV5.pch 将作为预编译头
// stdafx.obj 将包含预编译类型信息
#include "stdafx.h"
// TODO: 在 STDAFX.H 中
// 引用任何所需的附加头文件,而不是在此文件中引用
// 下列 ifdef 块是创建使从 DLL 导出更简单的 // 宏的标准方法。此 DLL 中的所有文件都是用命令行上定义的 ZY_OV_YOLOV5_EXPORTS // 符号编译的。在使用此 DLL 的 // 任何其他项目上不应定义此符号。这样,源文件中包含此文件的任何其他项目都会将 // ZY_OV_YOLOV5_API 函数视为是从 DLL 导入的,而此 DLL 则将用此宏定义的 // 符号视为是被导出的。 #ifdef ZY_OV_YOLOV5_EXPORTS #define ZY_OV_YOLOV5_API __declspec(dllexport) #else #define ZY_OV_YOLOV5_API __declspec(dllimport) #endif #include "detector.h" // 此类是从 Zy_OV_YOLOV5.dll 导出的 class ZY_OV_YOLOV5_API CZy_OV_YOLOV5 { public: CZy_OV_YOLOV5(void); // TODO: 在此添加您的方法。 }; extern ZY_OV_YOLOV5_API int nZy_OV_YOLOV5; ZY_OV_YOLOV5_API int fnZy_OV_YOLOV5(void); extern "C" { class ZY_OV_YOLOV5_API YoloModel { public: YoloModel(); /*bool init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold, int classNum); bool YoloInfer(const Mat& srcImg, vector<Object>& vecObj);*/ bool init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold); bool YoloInfer(const Mat& srcImg, vector<Rect>& vecObj); private: Detector* detector; }; }
#ifndef DETECTOR_H #define DETECTOR_H #include <opencv2/opencv.hpp> #include <inference_engine.hpp> #include <opencv2/dnn/dnn.hpp> #include <iostream> #include <chrono> #include <cmath> #include <stdlib.h> using namespace std; using namespace cv; using namespace InferenceEngine; typedef struct { float prob; cv::Rect rect; int classid; }Object; class Detector { public: Detector(); ~Detector(); //bool init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold,int classNum); //bool process_frame(Mat& inframe, vector<Object> &detected_objects); //bool yoloInfer(const Mat& srcImg, vector<Object>& vecObj); bool init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold); bool process_frame(Mat& inframe, vector<Rect>& detected_objects); bool yoloInfer(const Mat& srcImg, vector<Rect>& vecObj); private: int imgSize = 640; int _classNum = 2; double sigmoid(double x); vector<int> get_anchors(int net_grid); bool parse_yolov5(const Blob::Ptr &blob, int net_grid, float cof_threshold, vector<Rect>& o_rect, vector<float>& o_rect_cof, vector<int>& classids); ExecutableNetwork _network; OutputsDataMap _outputinfo; string _input_name; string _xml_path; string _bin_path; double _cof_threshold = 0.1; double _nms_area_threshold = 0.5; }; #endif
#pragma once
// 包括 SDKDDKVer.h 将定义可用的最高版本的 Windows 平台。
// 如果要为以前的 Windows 平台生成应用程序,请包括 WinSDKVer.h,并将
// WIN32_WINNT 宏设置为要支持的平台,然后再包括 SDKDDKVer.h。
#include <SDKDDKVer.h>
// stdafx.h : 标准系统包含文件的包含文件, // 或是经常使用但不常更改的 // 特定于项目的包含文件 // #pragma once #include "targetver.h" #define WIN32_LEAN_AND_MEAN // 从 Windows 头文件中排除极少使用的信息 // Windows 头文件: #include <windows.h> // TODO: 在此处引用程序需要的其他头文件
// BatchTest.cpp : 此文件包含 "main" 函数。程序执行将在此处开始并结束。 // #include <iostream> #include "Zy_OV_YOLOV5.h" #pragma comment(lib,"..//x64//Release//Zy_OV_YOLOV5.lib") int main() { YoloModel yolomodel; string xml_path = "ScratchModel.xml"; string bin_path = "ScratchModel.bin"; /*string xml_path = "ScratchModel.xml"; string bin_path = "ScratchModel.bin";*/ double scratch_th = 0.25; //bool initflag = yolomodel.init(xml_path, bin_path, scratch_th, 0.45, 2); // 分类是2 bool initflag = yolomodel.init(xml_path, bin_path, scratch_th, 0.45); cout << "scratch th == " << scratch_th << endl; if (initflag == true) { cout << "Yolo初始化成功" << endl; } string picPath = "./ScratchImg"; // classifyImg 分类数据 vector<cv::String> vecCVStr = {}; cv::glob(picPath, vecCVStr); for (int i = 0; i < vecCVStr.size(); i++) { string each_pic_path = vecCVStr[i].c_str(); string picFileName = each_pic_path.substr(picPath.length()+1, each_pic_path.length() - picPath.length()); Mat src = imread(each_pic_path,IMREAD_GRAYSCALE); //vector<Object> ResObj = {}; transpose(src, src); //yolomodel.YoloInfer(src, ResObj); vector<Rect> ResObj = {}; //transpose(src, src); yolomodel.YoloInfer(src, ResObj); Mat colorMat = src.clone(); if (src.channels() == 1) { cvtColor(src, colorMat, COLOR_GRAY2BGR); } if (ResObj.size() == 0) { system("pause"); } if (true) { float maxokprob = 0.0f; float maxngprob = 0.0f; /*for (int i = 0; i < ResObj.size(); i++) { if (ResObj[i].classid == 0) { maxngprob = std::max(ResObj[i].prob, maxngprob); } if (ResObj[i].classid == 1) { maxokprob = std::max(ResObj[i].prob, maxokprob); } }*/ if (maxokprob > maxngprob) { cv::putText(colorMat, "OK", Point(5, 30), 3, 1.2, Scalar(0, 255, 0)); cv::putText(colorMat, "prob:" + to_string(maxokprob), Point(5, 50), 3, 0.6, Scalar(0, 255, 0)); imwrite("D:\\Xray\\ok\\" + picFileName, colorMat); } else { cv::putText(colorMat, "NG", Point(5, 30), 3, 1.2, Scalar(0, 0, 255)); cv::putText(colorMat, "prob:" + to_string(maxngprob), Point(5, 50), 3, 0.6, Scalar(0, 0, 255)); imwrite("D:\\Xray\\ng\\" + picFileName, colorMat); } } //if (ResObj.size() == 1) //{ // if (ResObj[0].classid == 0) // { // cv::putText(colorMat, "NG", Point(5, 30), 3, 1.2, Scalar(0, 0, 255)); // imwrite("D:\\Xray\\ng\\" + picFileName, colorMat); // //rectangle(colorMat, ResObj[i].rect, Scalar(0, 0, 255), 1, LINE_8, 0); // } // if (ResObj[0].classid == 1) // { // cv::putText(colorMat, "OK", Point(5, 30), 3, 1.2, Scalar(0, 255, 0)); // imwrite("D:\\Xray\\ok\\" + picFileName, colorMat); // //rectangle(colorMat, ResObj[i].rect, Scalar(0, 255, 0), 1, LINE_8, 0); // } //} //resize(colorMat, colorMat, Size(0, 0), 0.5, 0.5); //transpose(colorMat, colorMat); namedWindow("colorMat", WINDOW_GUI_NORMAL); imshow("colorMat", colorMat); waitKey(0); } }
// 下列 ifdef 块是创建使从 DLL 导出更简单的 // 宏的标准方法。此 DLL 中的所有文件都是用命令行上定义的 ZY_OV_YOLOV5_EXPORTS // 符号编译的。在使用此 DLL 的 // 任何其他项目上不应定义此符号。这样,源文件中包含此文件的任何其他项目都会将 // ZY_OV_YOLOV5_API 函数视为是从 DLL 导入的,而此 DLL 则将用此宏定义的 // 符号视为是被导出的。 #ifdef ZY_OV_YOLOV5_EXPORTS #define ZY_OV_YOLOV5_API __declspec(dllexport) #else #define ZY_OV_YOLOV5_API __declspec(dllimport) #endif #include <iostream> #include <opencv2\opencv.hpp> using namespace std; using namespace cv; // 此类是从 Zy_OV_YOLOV5.dll 导出的 class ZY_OV_YOLOV5_API CZy_OV_YOLOV5 { public: CZy_OV_YOLOV5(void); // TODO: 在此添加您的方法。 }; extern ZY_OV_YOLOV5_API int nZy_OV_YOLOV5; ZY_OV_YOLOV5_API int fnZy_OV_YOLOV5(void); typedef struct { float prob; cv::Rect rect; int classid; } Object; extern "C" { class ZY_OV_YOLOV5_API YoloModel { public: YoloModel(); /*bool init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold, int classNum); bool YoloInfer(const Mat& srcImg, vector<Object>& vecObj);*/ bool init(string xml_path, string bin_path, double cof_threshold, double nms_area_threshold); bool YoloInfer(const Mat& srcImg, vector<Rect>& vecObj); private: class Detector* detector; }; }
openvino转换: 下载链接
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。