赞
踩
记录一下我将训练好的YOLOv5模型移植到Android手机上的详细步骤,真的是踩坑无数加秃头的一个过程,步骤如下:
需要的环境:
pycharm/vscode,AndroidStudio,Visual studio2017
class Focus(nn.Module):
# Focus wh information into c-space
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
super(Focus, self).__init__()
self.conv = Conv(c1 * 4, c2, k, s, p, g, act)
def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2)
return self.conv(torch.cat([x,x,x,x],1))
# return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1))
python -m onnxsim yolov5s.onnx yolov5ssim.onnx
,自己把“yolov5s.onnx”修改成自己的名字就好了,如果没有安装onnxsim,先安装pip install onnx-simplifier
再运行上面的简化指令python -m onnxsim yolov5s.onnx yolov5ssim.onnx
d:
,cd D:\JHFimportantthing\protobuf-3.4.0
,然后依次输出如下的指令:注意里面的路径需要修改成自己的mkdir build-vs2015
cd build-vs2015
cmake -G"NMake Makefiles" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=%cd%/install -Dprotobuf_BUILD_TESTS=OFF -Dprotobuf_MSVC_STATIC_RUNTIME=OFF ../cmake
nmake
nmake install
这个也很重要,但是也很简单,根据上述链接,然后下载安装即可,可以自己搜教程啦
mkdir build-vs2015
cd build-vs2015
cmake -G"NMake Makefiles" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=%cd%/install -DProtobuf_INCLUDE_DIR=D:/JHFimportantthing/protobuf-3.4.0/build-vs2015/install/include -DProtobuf_LIBRARIES=D:/JHFimportantthing/protobuf-3.4.0/build-vs2015/install/lib/libprotobuf.lib -DProtobuf_PROTOC_EXECUTABLE=D:/JHFimportantthing/protobuf-3.4.0/build-vs2015/install/bin/protoc.exe ..
nmake
nmake install
./onnx2ncnn best-sim.onnx best.param best.bin
,这里的名字自己修改,然后成功之后会生成两个新的文件,.bin和 .param,./ncnnoptimize best.param best.bin best-opt.param best-opt.bin 65536
,这个指令是压缩模型。打开上一步生成的 .param文件,直接拉到最后,将图中的3个数字改为-1,这一步是为了防止Android移植后,检测结果正确显示用,如果不改的话,会出现n多个框,密密麻麻的覆盖你的原本图片。
#ifndef MYCAMERA_YOLOV5_H #define MYCAMERA_YOLOV5_H #include "ncnn/net.h" namespace cv{ typedef struct{ int width; int height; }Size; } typedef struct { std::string name; int stride; std::vector<cv::Size> anchors; }YoloLayerData; typedef struct BoxInfo { float x1; float y1; float x2; float y2; float score; int label; } BoxInfo; class YoloV5 { public: YoloV5(AAssetManager* mgr, const char* param, const char* bin); ~YoloV5(); std::vector<BoxInfo> detect(JNIEnv* env, jobject image, float threshold, float nms_threshold); /* std::vector<std::string> labels{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};*/ std::vector<std::string> labels{"mxymx","cngd","gudu","xcbgd","ydgm"}; private: static std::vector<BoxInfo> decode_infer(ncnn::Mat &data, int stride,const cv::Size& frame_size, int net_size,int num_classes,const std::vector<cv::Size>& anchors,float threshold); static void nms(std::vector<BoxInfo>& result,float nms_threshold); ncnn::Net* Net; int input_size = 640; //int num_class = 80; int num_class = 5; /* std::vector<YoloLayerData> layers{ {"394",32,{{116,90},{156,198},{373,326}}}, {"375",16,{{30,61},{62,45},{59,119}}}, {"output",8,{{10,13},{16,30},{33,23}}}, };*/ std::vector<YoloLayerData> layers{ {"output",32,{{116,90},{156,198},{373,326}}}, {"385",16,{{30,61},{62,45},{59,119}}}, {"404",8,{{10,13},{16,30},{33,23}}}, }; public: static YoloV5 *detector; static bool hasGPU; }; #endif //YOLOV5_H
.cpp文件
// // Created by 13071 on 2021/1/14. // #include "YoloV5.h" bool YoloV5::hasGPU = false; YoloV5* YoloV5::detector = nullptr; YoloV5::YoloV5(AAssetManager* mgr, const char *param, const char *bin) { Net = new ncnn::Net(); Net->load_param(mgr,param); Net->load_model(mgr,bin); } YoloV5::~YoloV5() { delete Net; } std::vector<BoxInfo> YoloV5::detect(JNIEnv* env, jobject image, float threshold, float nms_threshold) { AndroidBitmapInfo img_size; AndroidBitmap_getInfo(env, image, &img_size); ncnn::Mat in = ncnn::Mat::from_android_bitmap_resize(env,image,ncnn::Mat::PIXEL_BGR2RGB,input_size/2,input_size/2); float norm[3] = {1/255.f,1/255.f,1/255.f}; float mean[3] = {0,0,0}; in.substract_mean_normalize(mean,norm); auto ex = Net->create_extractor(); ex.set_light_mode(true); ex.set_num_threads(4); ex.input(0, in); std::vector<BoxInfo> result; for(const auto& layer: layers){ ncnn::Mat blob; ex.extract(layer.name.c_str(),blob); auto boxes = decode_infer(blob,layer.stride,{(int)img_size.width,(int)img_size.height},input_size,num_class,layer.anchors,threshold); result.insert(result.begin(),boxes.begin(),boxes.end()); } nms(result,nms_threshold); return result; } inline float fast_exp(float x) { union {uint32_t i;float f;} v{}; v.i=(1<<23)*(1.4426950409*x+126.93490512f); return v.f; } inline float sigmoid(float x){ return 1.0f / (1.0f + fast_exp(-x)); } std::vector<BoxInfo> YoloV5::decode_infer(ncnn::Mat &data, int stride, const cv::Size &frame_size, int net_size, int num_classes,const std::vector<cv::Size>& anchors, float threshold) { std::vector<BoxInfo> result; int grid_size = int(sqrt(data.h)); float *mat_data[data.c]; for(int i=0;i<data.c;i++){ mat_data[i] = data.channel(i); } float cx,cy,w,h; for(int shift_y=0;shift_y<grid_size;shift_y++){ for(int shift_x=0;shift_x<grid_size;shift_x++){ int loc = shift_x+shift_y*grid_size; for(int i=0;i<3;i++){ float *record = mat_data[i]; float *cls_ptr = record + 5; for(int cls = 0; cls<num_classes;cls++){ float score = sigmoid(cls_ptr[cls]) * sigmoid(record[4]); if(score>threshold){ cx = (sigmoid(record[0]) * 2.f - 0.5f + (float)shift_x) * (float) stride; cy = (sigmoid(record[1]) * 2.f - 0.5f + (float)shift_y) * (float) stride; w = pow(sigmoid(record[2]) * 2.f,2)*anchors[i].width; h = pow(sigmoid(record[3]) * 2.f,2)*anchors[i].height; //printf("[grid size=%d, stride = %d]x y w h %f %f %f %f\n",grid_size,stride,record[0],record[1],record[2],record[3]); BoxInfo box; box.x1 = std::max(0,std::min(frame_size.width,int((cx - w / 2.f) * (float)frame_size.width / (float)net_size))); box.y1 = std::max(0,std::min(frame_size.height,int((cy - h / 2.f) * (float)frame_size.height / (float)net_size))); box.x2 = std::max(0,std::min(frame_size.width,int((cx + w / 2.f) * (float)frame_size.width / (float)net_size))); box.y2 = std::max(0,std::min(frame_size.height,int((cy + h / 2.f) * (float)frame_size.height / (float)net_size))); box.score = score; box.label = cls; result.push_back(box); } } } for(auto& ptr:mat_data){ ptr+=(num_classes + 5); } } } return result; } void YoloV5::nms(std::vector<BoxInfo> &input_boxes, float NMS_THRESH) { std::sort(input_boxes.begin(), input_boxes.end(), [](BoxInfo a, BoxInfo b){return a.score > b.score;}); std::vector<float>vArea(input_boxes.size()); for (int i = 0; i < int(input_boxes.size()); ++i) { vArea[i] = (input_boxes.at(i).x2 - input_boxes.at(i).x1 + 1) * (input_boxes.at(i).y2 - input_boxes.at(i).y1 + 1); } for (int i = 0; i < int(input_boxes.size()); ++i) { for (int j = i + 1; j < int(input_boxes.size());) { float xx1 = std::max(input_boxes[i].x1, input_boxes[j].x1); float yy1 = std::max(input_boxes[i].y1, input_boxes[j].y1); float xx2 = std::min(input_boxes[i].x2, input_boxes[j].x2); float yy2 = std::min(input_boxes[i].y2, input_boxes[j].y2); float w = std::max(float(0), xx2 - xx1 + 1); float h = std::max(float(0), yy2 - yy1 + 1); float inter = w * h; float ovr = inter / (vArea[i] + vArea[j] - inter); if (ovr >= NMS_THRESH) { input_boxes.erase(input_boxes.begin() + j); vArea.erase(vArea.begin() + j); } else { j++; } } } }
#include <jni.h> #include <string> #include <ncnn/gpu.h> #include <android/asset_manager_jni.h> #include <android/log.h> #include "YoloV5.h" JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) { ncnn::create_gpu_instance(); if(ncnn::get_gpu_count() > 0){ YoloV5::hasGPU = true; } return JNI_VERSION_1_4; } JNIEXPORT void JNI_OnUnload(JavaVM* vm, void* reserved) { ncnn::destroy_gpu_instance(); }
import android.graphics.Color; import android.graphics.RectF; import java.util.Random; public class Box { public float x0,y0,x1,y1; private int label; private float score; private static String[] labels={"mxymx","cngd","gudu","xcbgd","ydgm"}; public Box(float x0, float y0, float x1, float y1, int label, float score){ this.x0 = x0; this.y0 = y0; this.x1 = x1; this.y1 = y1; this.label = label; this.score = score; } public RectF getRect(){ return new RectF(x0,y0,x1,y1); } public String getLabel(){ return labels[label]; } public float getScore(){ return score; } public int getColor(){ Random random = new Random(label); return Color.argb(255,random.nextInt(256),random.nextInt(256),random.nextInt(256)); } }
import android.content.res.AssetManager;
import android.graphics.Bitmap;
public class YOLOv5 {
static {
System.loadLibrary("yolov5");
}
public static native void init(AssetManager manager);
public static native Box[] detect(Bitmap bitmap, double threshold, double nms_threshold);
}
这里会有个错误,就是public static native void init(AssetManager manager); public static native Box[] detect(Bitmap bitmap, double threshold, double nms_threshold);
会提示没有c++关联reports native method declarations in java where no responding JIN function
,这里先clean project,然后这两行上面会有一个紫色的灯泡提示,按照上面的提示,会自动在接口文件jin_interface.cpp生成一个链接,这样,紫色的灯泡就会变成C++的链接提示,点击一下会自动调到jin_interface.cpp中进行修改,内容分别如下:请选择性的paste
Java_com_example_mycamera_YOLOv5_init(JNIEnv *env, jclass clazz, jobject manager) { // TODO: implement init() if(YoloV5::detector == nullptr){ AAssetManager* mgr = AAssetManager_fromJava(env, manager); //YoloV5::detector = new YoloV5(mgr,"yolov5.param","yolov5.bin"); YoloV5::detector = new YoloV5(mgr,"last-opt.param","last-opt.bin"); } }extern "C" JNIEXPORT jobjectArray JNICALL Java_com_example_mycamera_YOLOv5_detect(JNIEnv *env, jclass clazz, jobject bitmap, jdouble threshold, jdouble nms_threshold) { // TODO: implement detect() auto result = YoloV5::detector->detect(env,bitmap,threshold,nms_threshold); auto box_cls = env->FindClass("com/example/mycamera/Box"); auto cid = env->GetMethodID(box_cls, "<init>", "(FFFFIF)V"); jobjectArray ret = env->NewObjectArray( result.size(), box_cls, nullptr); int i = 0; for(auto& box:result){ env->PushLocalFrame(1); jobject obj = env->NewObject(box_cls, cid,box.x1,box.y1,box.x2,box.y2,box.label,box.score); obj = env->PopLocalFrame(obj); env->SetObjectArrayElement( ret, i++, obj); } return ret; }
# For more information about using CMake with Android Studio, read the # documentation: https://d.android.com/studio/projects/add-native-code.html # Sets the minimum version of CMake required to build the native library. cmake_minimum_required(VERSION 3.4.1) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fopenmp") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fopenmp") if(DEFINED ANDROID_NDK_MAJOR AND ${ANDROID_NDK_MAJOR} GREATER 20) set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -static-openmp") endif() # Creates and names a library, sets it as either STATIC # or SHARED, and provides the relative paths to its source code. # You can define multiple libraries, and CMake builds them for you. # Gradle automatically packages shared libraries with your APK. add_library( # Sets the name of the library. yolov5 # Sets the library as a shared library. SHARED # Provides a relative path to your source file(s). jni_interface.cpp YoloV5.cpp) include_directories( ncnn/include ) # Searches for a specified prebuilt library and stores the path as a # variable. Because CMake includes system libraries in the search path by # default, you only need to specify the name of the public NDK library # you want to add. CMake verifies that the library exists before # completing its build. find_library(log-lib log) find_library(android-lib android) find_library(vulkan-lib vulkan) find_library(jnigraphics-lib jnigraphics) add_library( ncnn STATIC IMPORTED ) set_target_properties( # Specifies the target library. ncnn # Specifies the parameter you want to define. PROPERTIES IMPORTED_LOCATION # Provides the path to the library you want to import. ${CMAKE_SOURCE_DIR}/ncnn/${ANDROID_ABI}/libncnn.a ) # Specifies libraries CMake should link to your target library. You # can link multiple libraries, such as libraries you define in this # build script, prebuilt third-party libraries, or system libraries. target_link_libraries( # Specifies the target library. yolov5 # Links the target library to the log library # included in the NDK. ${log-lib} ${vulkan-lib} ${android-lib} ${jnigraphics-lib} ncnn)
Box[] result = YOLOv5.detect(bitmap,threshold,nms_threshold); croppedBitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true); Canvas canvas = new Canvas(croppedBitmap); final Paint boxPaint = new Paint(); boxPaint.setAlpha(200); boxPaint.setStyle(Paint.Style.STROKE); boxPaint.setStrokeWidth(4 * bitmap.getWidth()/800); boxPaint.setTextSize(40 * bitmap.getWidth()/800); for(Box box:result){ boxPaint.setColor(box.getColor()); boxPaint.setStyle(Paint.Style.FILL); canvas.drawText(box.getLabel(),box.x0,box.y0,boxPaint); boxPaint.setStyle(Paint.Style.STROKE); canvas.drawRect(box.getRect(),boxPaint); } albumsPicture.setImageBitmap(croppedBitmap);//将最终的图显示在ImageVieView控件
externalNativeBuild {
cmake {
cppFlags ""
}
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
version "3.10.2"
}
}
minSdkVersion 24
耗时4天的终端移植终于完成啦,参考了一些博主博客的过程和一些博主的工程,中间又是各种问题,最终成功了,其实并不容易,但是好开心,昨天晚上差点蹦起来,哈哈哈哈~~~~
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。