hpc203
{
"type": "http://schema.org/Person",
"name": "",
"description": "",
"followers": "",
"url": "",
"location": "",
"languages": [
"Python",
"Python",
"Python",
"Python",
"Python",
"C++"
],
"users": [
{
"name": "@hpc203",
"avatar": "https://avatars.githubusercontent.com/u/28389623?s=64&v=4"
},
{
"name": "@hpc203",
"avatar": "https://avatars.githubusercontent.com/u/28389623?s=64&v=4"
},
{
"name": "View hpc203's full-sized avatar",
"avatar": "https://avatars.githubusercontent.com/u/28389623?v=4"
}
],
"topics": []
}
xTool
{
"avatar": "https://avatars.githubusercontent.com/u/28389623?v=4",
"name": "",
"username": "hpc203",
"description": "深度学习算法工程师\n\nhttps://zhuanlan.zhihu.com/p/466677699",
"location": "深圳",
"vcard": "<svg class=\"octicon octicon-organization\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"><path d=\"M1.75 16A1.75 1.75 0 0 1 0 14.25V1.75C0 .784.784 0 1.75 0h8.5C11.216 0 12 .784 12 1.75v12.5c0 .085-.006.168-.018.25h2.268a.25.25 0 0 0 .25-.25V8.285a.25.25 0 0 0-.111-.208l-1.055-.703a.749.749 0 1 1 .832-1.248l1.055.703c.487.325.779.871.779 1.456v5.965A1.75 1.75 0 0 1 14.25 16h-3.5a.766.766 0 0 1-.197-.026c-.099.017-.2.026-.303.026h-3a.75.75 0 0 1-.75-.75V14h-1v1.25a.75.75 0 0 1-.75.75Zm-.25-1.75c0 .138.112.25.25.25H4v-1.25a.75.75 0 0 1 .75-.75h2.5a.75.75 0 0 1 .75.75v1.25h2.25a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25ZM3.75 6h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5ZM3 3.75A.75.75 0 0 1 3.75 3h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 3.75Zm4 3A.75.75 0 0 1 7.75 6h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 7 6.75ZM7.75 3h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5ZM3 9.75A.75.75 0 0 1 3.75 9h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 9.75ZM7.75 9h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5Z\"></path></svg>\n <span class=\"p-org\"><div>xTool</div></span>\n",
"vcardDetails": [
{
"name": "https://blog.csdn.net/nihate",
"url": "https://blog.csdn.net/nihate"
}
],
"orgs": [],
"sponsors": [],
"pinned": [
{
"name": "yolov5-dnn-cpp-python",
"description": "用opencv的dnn模块做yolov5目标检测,包含C++和Python两个版本的程序",
"language": ""
},
{
"name": "10kinds-light-face-detector-align-recognition",
"description": "10种轻量级人脸检测算法的比拼,其中还包含人脸关键点检测与对齐,人脸特征向量提取和计算距离相似度",
"language": ""
},
{
"name": "YOLOP-opencv-dnn",
"description": "使用OpenCV部署全景驾驶感知网络YOLOP,可同时处理交通目标检测、可驾驶区域分割、车道线检测,三项视觉感知任务,包含C++和Python两种版本的程序实现。本套程序只依赖opencv库就可以运行, 从而彻底摆脱对任何深度学习框架的依赖。",
"language": ""
},
{
"name": "yolov8-face-landmarks-opencv-dnn",
"description": "使用OpenCV部署yolov8检测人脸和关键点以及人脸质量评价,包含C++和Python两个版本的程序,只依赖opencv库就可以运行,彻底摆脱对任何深度学习框架的依赖。",
"language": ""
},
{
"name": "virtual_try_on_use_deep_learning",
"description": "使用深度学习算法实现虚拟试衣镜,结合了人体姿态估计、人体分割、几何匹配和GAN,四种模型。仅仅只依赖opencv库就能运行",
"language": ""
},
{
"name": "yolov5-v6.1-opencv-onnxrun",
"description": "分别使用OpenCV、ONNXRuntime部署yolov5-v6.1目标检测,包含C++和Python两个版本的程序。支持yolov5s,yolov5m,yolov5l,yolov5n,yolov5x,yolov5s6,yolov5m6,yolov5l6,yolov5n6,yolov5x6的十种结构的yolov5-v6.1",
"language": ""
}
],
"pinnedHtml": [
"\n <div class=\"d-flex v-align-middle mr-2\">\n <span data-view-component=\"true\" class=\"position-relative\"><a id=\"330319635\" href=\"/hpc203/yolov5-dnn-cpp-python\" data-view-component=\"true\" class=\"min-width-0 Link text-bold flex-auto wb-break-all\"><span class=\"repo\">\n yolov5-dnn-cpp-python\n </span></a> <tool-tip id=\"tooltip-a08d2273-1ebc-4406-9761-6dd1e1043d84\" for=\"330319635\" popover=\"manual\" data-direction=\"n\" data-type=\"description\" data-view-component=\"true\" class=\"sr-only position-absolute\">yolov5-dnn-cpp-python</tool-tip></span> <span class=\"flex-auto text-right\">\n <span></span><span class=\"Label Label--secondary v-align-middle \">Public</span>\n </span>\n </div>\n\n\n <p class=\"pinned-item-desc color-fg-muted text-small d-block mt-2 mb-3\">\n 用opencv的dnn模块做yolov5目标检测,包含C++和Python两个版本的程序\n </p>\n\n <p class=\"mb-0 f6 color-fg-muted\">\n <span class=\"d-inline-block mr-3\">\n <span class=\"repo-language-color\" style=\"background-color: #3572A5\"></span>\n <span itemprop=\"programmingLanguage\">Python</span>\n</span>\n\n <a href=\"/hpc203/yolov5-dnn-cpp-python/stargazers\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"stars\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-star\">\n <path d=\"M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z\"></path>\n</svg>\n 540\n </a>\n <a href=\"/hpc203/yolov5-dnn-cpp-python/forks\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"forks\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-repo-forked\">\n <path d=\"M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z\"></path>\n</svg>\n 127\n </a>\n </p>\n ",
"\n <div class=\"d-flex v-align-middle mr-2\">\n <span data-view-component=\"true\" class=\"position-relative\"><a id=\"298522446\" href=\"/hpc203/10kinds-light-face-detector-align-recognition\" data-view-component=\"true\" class=\"min-width-0 Link text-bold flex-auto wb-break-all\"><span class=\"repo\">\n 10kinds-light-face-detector-align-recognition\n </span></a> <tool-tip id=\"tooltip-80e39d42-9bb6-46c1-8b08-38d4fa4d6d5a\" for=\"298522446\" popover=\"manual\" data-direction=\"n\" data-type=\"description\" data-view-component=\"true\" class=\"sr-only position-absolute\">10kinds-light-face-detector-align-recognition</tool-tip></span> <span class=\"flex-auto text-right\">\n <span></span><span class=\"Label Label--secondary v-align-middle \">Public</span>\n </span>\n </div>\n\n\n <p class=\"pinned-item-desc color-fg-muted text-small d-block mt-2 mb-3\">\n 10种轻量级人脸检测算法的比拼,其中还包含人脸关键点检测与对齐,人脸特征向量提取和计算距离相似度\n </p>\n\n <p class=\"mb-0 f6 color-fg-muted\">\n <span class=\"d-inline-block mr-3\">\n <span class=\"repo-language-color\" style=\"background-color: #3572A5\"></span>\n <span itemprop=\"programmingLanguage\">Python</span>\n</span>\n\n <a href=\"/hpc203/10kinds-light-face-detector-align-recognition/stargazers\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"stars\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-star\">\n <path d=\"M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z\"></path>\n</svg>\n 484\n </a>\n <a href=\"/hpc203/10kinds-light-face-detector-align-recognition/forks\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"forks\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-repo-forked\">\n <path d=\"M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z\"></path>\n</svg>\n 113\n </a>\n </p>\n ",
"\n <div class=\"d-flex v-align-middle mr-2\">\n <span data-view-component=\"true\" class=\"position-relative\"><a id=\"400935286\" href=\"/hpc203/YOLOP-opencv-dnn\" data-view-component=\"true\" class=\"min-width-0 Link text-bold flex-auto wb-break-all\"><span class=\"repo\">\n YOLOP-opencv-dnn\n </span></a> <tool-tip id=\"tooltip-d1247500-067f-40fa-ae66-20fbc7c40d4a\" for=\"400935286\" popover=\"manual\" data-direction=\"n\" data-type=\"description\" data-view-component=\"true\" class=\"sr-only position-absolute\">YOLOP-opencv-dnn</tool-tip></span> <span class=\"flex-auto text-right\">\n <span></span><span class=\"Label Label--secondary v-align-middle \">Public</span>\n </span>\n </div>\n\n\n <p class=\"pinned-item-desc color-fg-muted text-small d-block mt-2 mb-3\">\n 使用OpenCV部署全景驾驶感知网络YOLOP,可同时处理交通目标检测、可驾驶区域分割、车道线检测,三项视觉感知任务,包含C++和Python两种版本的程序实现。本套程序只依赖opencv库就可以运行, 从而彻底摆脱对任何深度学习框架的依赖。\n </p>\n\n <p class=\"mb-0 f6 color-fg-muted\">\n <span class=\"d-inline-block mr-3\">\n <span class=\"repo-language-color\" style=\"background-color: #3572A5\"></span>\n <span itemprop=\"programmingLanguage\">Python</span>\n</span>\n\n <a href=\"/hpc203/YOLOP-opencv-dnn/stargazers\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"stars\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-star\">\n <path d=\"M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z\"></path>\n</svg>\n 308\n </a>\n <a href=\"/hpc203/YOLOP-opencv-dnn/forks\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"forks\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-repo-forked\">\n <path d=\"M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z\"></path>\n</svg>\n 65\n </a>\n </p>\n ",
"\n <div class=\"d-flex v-align-middle mr-2\">\n <span data-view-component=\"true\" class=\"position-relative\"><a id=\"639759978\" href=\"/hpc203/yolov8-face-landmarks-opencv-dnn\" data-view-component=\"true\" class=\"min-width-0 Link text-bold flex-auto wb-break-all\"><span class=\"repo\">\n yolov8-face-landmarks-opencv-dnn\n </span></a> <tool-tip id=\"tooltip-687ef086-a334-49c8-a287-1a8a25904c90\" for=\"639759978\" popover=\"manual\" data-direction=\"n\" data-type=\"description\" data-view-component=\"true\" class=\"sr-only position-absolute\">yolov8-face-landmarks-opencv-dnn</tool-tip></span> <span class=\"flex-auto text-right\">\n <span></span><span class=\"Label Label--secondary v-align-middle \">Public</span>\n </span>\n </div>\n\n\n <p class=\"pinned-item-desc color-fg-muted text-small d-block mt-2 mb-3\">\n 使用OpenCV部署yolov8检测人脸和关键点以及人脸质量评价,包含C++和Python两个版本的程序,只依赖opencv库就可以运行,彻底摆脱对任何深度学习框架的依赖。\n </p>\n\n <p class=\"mb-0 f6 color-fg-muted\">\n <span class=\"d-inline-block mr-3\">\n <span class=\"repo-language-color\" style=\"background-color: #3572A5\"></span>\n <span itemprop=\"programmingLanguage\">Python</span>\n</span>\n\n <a href=\"/hpc203/yolov8-face-landmarks-opencv-dnn/stargazers\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"stars\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-star\">\n <path d=\"M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z\"></path>\n</svg>\n 287\n </a>\n <a href=\"/hpc203/yolov8-face-landmarks-opencv-dnn/forks\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"forks\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-repo-forked\">\n <path d=\"M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z\"></path>\n</svg>\n 52\n </a>\n </p>\n ",
"\n <div class=\"d-flex v-align-middle mr-2\">\n <span data-view-component=\"true\" class=\"position-relative\"><a id=\"408675611\" href=\"/hpc203/virtual_try_on_use_deep_learning\" data-view-component=\"true\" class=\"min-width-0 Link text-bold flex-auto wb-break-all\"><span class=\"repo\">\n virtual_try_on_use_deep_learning\n </span></a> <tool-tip id=\"tooltip-2cd463f3-0071-44ca-8706-1aad1a8abc23\" for=\"408675611\" popover=\"manual\" data-direction=\"n\" data-type=\"description\" data-view-component=\"true\" class=\"sr-only position-absolute\">virtual_try_on_use_deep_learning</tool-tip></span> <span class=\"flex-auto text-right\">\n <span></span><span class=\"Label Label--secondary v-align-middle \">Public</span>\n </span>\n </div>\n\n\n <p class=\"pinned-item-desc color-fg-muted text-small d-block mt-2 mb-3\">\n 使用深度学习算法实现虚拟试衣镜,结合了人体姿态估计、人体分割、几何匹配和GAN,四种模型。仅仅只依赖opencv库就能运行\n </p>\n\n <p class=\"mb-0 f6 color-fg-muted\">\n <span class=\"d-inline-block mr-3\">\n <span class=\"repo-language-color\" style=\"background-color: #3572A5\"></span>\n <span itemprop=\"programmingLanguage\">Python</span>\n</span>\n\n <a href=\"/hpc203/virtual_try_on_use_deep_learning/stargazers\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"stars\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-star\">\n <path d=\"M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z\"></path>\n</svg>\n 266\n </a>\n <a href=\"/hpc203/virtual_try_on_use_deep_learning/forks\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"forks\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-repo-forked\">\n <path d=\"M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z\"></path>\n</svg>\n 61\n </a>\n </p>\n ",
"\n <div class=\"d-flex v-align-middle mr-2\">\n <span data-view-component=\"true\" class=\"position-relative\"><a id=\"463824867\" href=\"/hpc203/yolov5-v6.1-opencv-onnxrun\" data-view-component=\"true\" class=\"min-width-0 Link text-bold flex-auto wb-break-all\"><span class=\"repo\">\n yolov5-v6.1-opencv-onnxrun\n </span></a> <tool-tip id=\"tooltip-002d45f6-c93c-443d-9e65-0c7234e874a1\" for=\"463824867\" popover=\"manual\" data-direction=\"n\" data-type=\"description\" data-view-component=\"true\" class=\"sr-only position-absolute\">yolov5-v6.1-opencv-onnxrun</tool-tip></span> <span class=\"flex-auto text-right\">\n <span></span><span class=\"Label Label--secondary v-align-middle \">Public</span>\n </span>\n </div>\n\n\n <p class=\"pinned-item-desc color-fg-muted text-small d-block mt-2 mb-3\">\n 分别使用OpenCV、ONNXRuntime部署yolov5-v6.1目标检测,包含C++和Python两个版本的程序。支持yolov5s,yolov5m,yolov5l,yolov5n,yolov5x,yolov5s6,yolov5m6,yolov5l6,yolov5n6,yolov5x6的十种结构的yolov5-v6.1\n </p>\n\n <p class=\"mb-0 f6 color-fg-muted\">\n <span class=\"d-inline-block mr-3\">\n <span class=\"repo-language-color\" style=\"background-color: #f34b7d\"></span>\n <span itemprop=\"programmingLanguage\">C++</span>\n</span>\n\n <a href=\"/hpc203/yolov5-v6.1-opencv-onnxrun/stargazers\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"stars\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-star\">\n <path d=\"M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z\"></path>\n</svg>\n 250\n </a>\n <a href=\"/hpc203/yolov5-v6.1-opencv-onnxrun/forks\" class=\"pinned-item-meta Link--muted\">\n <svg aria-label=\"forks\" role=\"img\" height=\"16\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" data-view-component=\"true\" class=\"octicon octicon-repo-forked\">\n <path d=\"M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z\"></path>\n</svg>\n 69\n </a>\n </p>\n "
]
}
{
"accept-ranges": "bytes",
"cache-control": "max-age=0, private, must-revalidate",
"content-encoding": "gzip",
"content-security-policy": "default-src 'none'; base-uri 'self'; child-src github.githubassets.com github.com/assets-cdn/worker/ github.com/assets/ gist.github.com/assets-cdn/worker/; connect-src 'self' uploads.github.com www.githubstatus.com collector.github.com raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com github.githubassets.com objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com wss://alive.github.com wss://alive-staging.github.com api.githubcopilot.com api.individual.githubcopilot.com api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src github.githubassets.com; form-action 'self' github.com gist.github.com copilot-workspace.githubnext.com objects-origin.githubusercontent.com; frame-ancestors 'none'; frame-src viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src 'self' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com github-cloud.s3.amazonaws.com objects.githubusercontent.com release-assets.githubusercontent.com secured-user-images.githubusercontent.com/ user-images.githubusercontent.com/ private-user-images.githubusercontent.com opengraph.githubassets.com marketplace-screenshots.githubusercontent.com/ copilotprodattachments.blob.core.windows.net/github-production-copilot-attachments/ github-production-user-asset-6210df.s3.amazonaws.com customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com *.githubusercontent.com; manifest-src 'self'; media-src github.com user-images.githubusercontent.com/ secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com github-production-user-asset-6210df.s3.amazonaws.com gist.github.com github.githubassets.com; script-src github.githubassets.com; style-src 'unsafe-inline' github.githubassets.com; upgrade-insecure-requests; worker-src github.githubassets.com github.com/assets-cdn/worker/ github.com/assets/ gist.github.com/assets-cdn/worker/",
"content-type": "text/html; charset=utf-8",
"date": "Fri, 02 Jan 2026 12:48:14 GMT",
"etag": "598406bc2f757c3ec2b2b353eb5257db",
"referrer-policy": "origin-when-cross-origin, strict-origin-when-cross-origin",
"server": "github.com",
"set-cookie": "logged_in=no; Path=/; Domain=github.com; Expires=Sat, 02 Jan 2027 12:48:13 GMT; HttpOnly; Secure; SameSite=Lax",
"strict-transport-security": "max-age=31536000; includeSubdomains; preload",
"transfer-encoding": "chunked",
"vary": "X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, X-Requested-With,Accept-Encoding, Accept, X-Requested-With",
"x-content-type-options": "nosniff",
"x-frame-options": "deny",
"x-github-request-id": "A3BA:36C5F8:1919F96:21A558E:6957BE8C",
"x-xss-protection": "0"
}