// Tencent is pleased to support the open source community by making ncnn available. // // Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // https://opensource.org/licenses/BSD-3-Clause // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #include "layer.h" #include "net.h" #if defined(USE_NCNN_SIMPLEOCV) #include "simpleocv.h" #else #include #include #include #endif #include #include #include struct Object { cv::Rect_ rect; int label; float prob; }; static inline float intersection_area(const Object& a, const Object& b) { cv::Rect_ inter = a.rect & b.rect; return inter.area(); } static void qsort_descent_inplace(std::vector& faceobjects, int left, int right) { int i = left; int j = right; float p = faceobjects[(left + right) / 2].prob; while (i <= j) { while (faceobjects[i].prob > p) i++; while (faceobjects[j].prob < p) j--; if (i <= j) { // swap std::swap(faceobjects[i], faceobjects[j]); i++; j--; } } #pragma omp parallel sections { #pragma omp section { if (left < j) qsort_descent_inplace(faceobjects, left, j); } #pragma omp section { if (i < right) qsort_descent_inplace(faceobjects, i, right); } } } static void qsort_descent_inplace(std::vector& faceobjects) { if (faceobjects.empty()) return; qsort_descent_inplace(faceobjects, 0, faceobjects.size() - 1); } static void nms_sorted_bboxes(const std::vector& faceobjects, std::vector& picked, float nms_threshold, bool agnostic = false) { picked.clear(); const int n = faceobjects.size(); std::vector areas(n); for (int i = 0; i < n; i++) { areas[i] = faceobjects[i].rect.area(); } for (int i = 0; i < n; i++) { const Object& a = faceobjects[i]; int keep = 1; for (int j = 0; j < (int)picked.size(); j++) { const Object& b = faceobjects[picked[j]]; if (!agnostic && a.label != b.label) continue; // intersection over union float inter_area = intersection_area(a, b); float union_area = areas[i] + areas[picked[j]] - inter_area; // float IoU = inter_area / union_area if (inter_area / union_area > nms_threshold) keep = 0; } if (keep) picked.push_back(i); } } static inline float sigmoid(float x) { return static_cast(1.f / (1.f + exp(-x))); } static void generate_proposals(const ncnn::Mat& anchors, int stride, const ncnn::Mat& in_pad, const ncnn::Mat& feat_blob, float prob_threshold, std::vector& objects) { const int num_grid_x = feat_blob.w; const int num_grid_y = feat_blob.h; const int num_anchors = anchors.w / 2; const int num_class = 80; for (int q = 0; q < num_anchors; q++) { const float anchor_w = anchors[q * 2]; const float anchor_h = anchors[q * 2 + 1]; for (int i = 0; i < num_grid_y; i++) { for (int j = 0; j < num_grid_x; j++) { // find class index with max class score int class_index = 0; float class_score = -FLT_MAX; for (int k = 0; k < num_class; k++) { float score = feat_blob.channel(q * 85 + 5 + k).row(i)[j]; if (score > class_score) { class_index = k; class_score = score; } } float box_score = feat_blob.channel(q * 85 + 4).row(i)[j]; float confidence = sigmoid(box_score) * sigmoid(class_score); if (confidence >= prob_threshold) { // yolov5/models/yolo.py Detect forward // y = x[i].sigmoid() // y[..., 0:2] = (y[..., 0:2] * 2. - 0.5 + self.grid[i].to(x[i].device)) * self.stride[i] # xy // y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh float dx = sigmoid(feat_blob.channel(q * 85 + 0).row(i)[j]); float dy = sigmoid(feat_blob.channel(q * 85 + 1).row(i)[j]); float dw = sigmoid(feat_blob.channel(q * 85 + 2).row(i)[j]); float dh = sigmoid(feat_blob.channel(q * 85 + 3).row(i)[j]); float pb_cx = (dx * 2.f - 0.5f + j) * stride; float pb_cy = (dy * 2.f - 0.5f + i) * stride; float pb_w = pow(dw * 2.f, 2) * anchor_w; float pb_h = pow(dh * 2.f, 2) * anchor_h; float x0 = pb_cx - pb_w * 0.5f; float y0 = pb_cy - pb_h * 0.5f; float x1 = pb_cx + pb_w * 0.5f; float y1 = pb_cy + pb_h * 0.5f; Object obj; obj.rect.x = x0; obj.rect.y = y0; obj.rect.width = x1 - x0; obj.rect.height = y1 - y0; obj.label = class_index; obj.prob = confidence; objects.push_back(obj); } } } } } static int detect_yolov7(const cv::Mat& bgr, std::vector& objects) { ncnn::Net yolov7; yolov7.opt.use_vulkan_compute = true; // yolov7.opt.use_bf16_storage = true; // git clone https://github.com/WongKinYiu/yolov7 // cd yolov7 // wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt // python models/export.py --weights yolov7.pt // pnnx yolov7.torchscript.pt inputshape=[1,3,640,640] inputshape=[1,3,320,320] yolov7.load_param("yolov7.param"); yolov7.load_model("yolov7.bin"); const int target_size = 640; const float prob_threshold = 0.25f; const float nms_threshold = 0.45f; int img_w = bgr.cols; int img_h = bgr.rows; // yolov5/models/common.py DetectMultiBackend const int max_stride = 64; // letterbox pad to multiple of max_stride int w = img_w; int h = img_h; float scale = 1.f; if (w > h) { scale = (float)target_size / w; w = target_size; h = h * scale; } else { scale = (float)target_size / h; h = target_size; w = w * scale; } ncnn::Mat in = ncnn::Mat::from_pixels_resize(bgr.data, ncnn::Mat::PIXEL_BGR2RGB, img_w, img_h, w, h); // pad to target_size rectangle // yolov5/utils/datasets.py letterbox int wpad = (w + max_stride - 1) / max_stride * max_stride - w; int hpad = (h + max_stride - 1) / max_stride * max_stride - h; ncnn::Mat in_pad; ncnn::copy_make_border(in, in_pad, hpad / 2, hpad - hpad / 2, wpad / 2, wpad - wpad / 2, ncnn::BORDER_CONSTANT, 114.f); const float norm_vals[3] = {1 / 255.f, 1 / 255.f, 1 / 255.f}; in_pad.substract_mean_normalize(0, norm_vals); ncnn::Extractor ex = yolov7.create_extractor(); ex.input("in0", in_pad); std::vector proposals; // anchor setting from yolov5/models/yolov5s.yaml // stride 8 { ncnn::Mat out; ex.extract("out0", out); ncnn::Mat anchors(6); anchors[0] = 12.f; anchors[1] = 16.f; anchors[2] = 19.f; anchors[3] = 36.f; anchors[4] = 40.f; anchors[5] = 28.f; std::vector objects8; generate_proposals(anchors, 8, in_pad, out, prob_threshold, objects8); proposals.insert(proposals.end(), objects8.begin(), objects8.end()); } // stride 16 { ncnn::Mat out; ex.extract("out1", out); ncnn::Mat anchors(6); anchors[0] = 36.f; anchors[1] = 75.f; anchors[2] = 76.f; anchors[3] = 55.f; anchors[4] = 72.f; anchors[5] = 146.f; std::vector objects16; generate_proposals(anchors, 16, in_pad, out, prob_threshold, objects16); proposals.insert(proposals.end(), objects16.begin(), objects16.end()); } // stride 32 { ncnn::Mat out; ex.extract("out2", out); ncnn::Mat anchors(6); anchors[0] = 142.f; anchors[1] = 110.f; anchors[2] = 192.f; anchors[3] = 243.f; anchors[4] = 459.f; anchors[5] = 401.f; std::vector objects32; generate_proposals(anchors, 32, in_pad, out, prob_threshold, objects32); proposals.insert(proposals.end(), objects32.begin(), objects32.end()); } // sort all proposals by score from highest to lowest qsort_descent_inplace(proposals); // apply nms with nms_threshold std::vector picked; nms_sorted_bboxes(proposals, picked, nms_threshold); int count = picked.size(); objects.resize(count); for (int i = 0; i < count; i++) { objects[i] = proposals[picked[i]]; // adjust offset to original unpadded float x0 = (objects[i].rect.x - (wpad / 2)) / scale; float y0 = (objects[i].rect.y - (hpad / 2)) / scale; float x1 = (objects[i].rect.x + objects[i].rect.width - (wpad / 2)) / scale; float y1 = (objects[i].rect.y + objects[i].rect.height - (hpad / 2)) / scale; // clip x0 = std::max(std::min(x0, (float)(img_w - 1)), 0.f); y0 = std::max(std::min(y0, (float)(img_h - 1)), 0.f); x1 = std::max(std::min(x1, (float)(img_w - 1)), 0.f); y1 = std::max(std::min(y1, (float)(img_h - 1)), 0.f); objects[i].rect.x = x0; objects[i].rect.y = y0; objects[i].rect.width = x1 - x0; objects[i].rect.height = y1 - y0; } return 0; } static void draw_objects(const cv::Mat& bgr, const std::vector& objects) { static const char* class_names[] = { "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush" }; cv::Mat image = bgr.clone(); for (size_t i = 0; i < objects.size(); i++) { const Object& obj = objects[i]; fprintf(stderr, "%d = %.5f at %.2f %.2f %.2f x %.2f\n", obj.label, obj.prob, obj.rect.x, obj.rect.y, obj.rect.width, obj.rect.height); cv::rectangle(image, obj.rect, cv::Scalar(255, 0, 0)); char text[256]; sprintf(text, "%s %.1f%%", class_names[obj.label], obj.prob * 100); int baseLine = 0; cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); int x = obj.rect.x; int y = obj.rect.y - label_size.height - baseLine; if (y < 0) y = 0; if (x + label_size.width > image.cols) x = image.cols - label_size.width; cv::rectangle(image, cv::Rect(cv::Point(x, y), cv::Size(label_size.width, label_size.height + baseLine)), cv::Scalar(255, 255, 255), -1); cv::putText(image, text, cv::Point(x, y + label_size.height), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0)); } cv::imshow("image", image); cv::waitKey(0); } int main(int argc, char** argv) { if (argc != 2) { fprintf(stderr, "Usage: %s [imagepath]\n", argv[0]); return -1; } const char* imagepath = argv[1]; cv::Mat m = cv::imread(imagepath, 1); if (m.empty()) { fprintf(stderr, "cv::imread %s failed\n", imagepath); return -1; } std::vector objects; detect_yolov7(m, objects); draw_objects(m, objects); return 0; }