{"id":204881,"date":"2025-05-29T12:23:51","date_gmt":"2025-05-29T04:23:51","guid":{"rendered":"https:\/\/server.hk\/cnblog\/204881\/"},"modified":"2025-05-29T12:23:51","modified_gmt":"2025-05-29T04:23:51","slug":"python-%e4%b8%ad%e7%9a%84%e4%ba%ba%e5%b7%a5%e6%99%ba%e8%83%bd%e8%bd%ae%e8%83%8e%e5%87%b9%e6%a7%bd%e5%88%86%e6%9e%90%ef%bc%81","status":"publish","type":"post","link":"https:\/\/server.hk\/cnblog\/204881\/","title":{"rendered":"Python \u4e2d\u7684\u4eba\u5de5\u667a\u80fd\u8f6e\u80ce\u51f9\u69fd\u5206\u6790\uff01"},"content":{"rendered":"<p><b><\/b>     <\/p>\n<h1>Python \u4e2d\u7684\u4eba\u5de5\u667a\u80fd\u8f6e\u80ce\u51f9\u69fd\u5206\u6790\uff01<\/h1>\n<p><span style=\"font-size: 15px\">\u6709\u5fd7\u8005\uff0c\u4e8b\u7adf\u6210\uff01\u5982\u679c\u4f60\u5728\u5b66\u4e60<span style=\"color: #FF6600;, Helvetica, Arial, sans-serif;font-size: 14px;background-color: #FFFFFF\">\u6587\u7ae0<\/span>\uff0c\u90a3\u4e48\u672c\u6587<span style=\"color: #FF6600;, Helvetica, Arial, sans-serif;font-size: 14px;background-color: #FFFFFF\">\u300aPython \u4e2d\u7684\u4eba\u5de5\u667a\u80fd\u8f6e\u80ce\u51f9\u69fd\u5206\u6790\uff01\u300b<\/span>\uff0c\u5c31\u5f88\u9002\u5408\u4f60\uff01\u6587\u7ae0\u8bb2\u89e3\u7684\u77e5\u8bc6\u70b9\u4e3b\u8981\u5305\u62ec<span style=\"color: #FF6600;, Helvetica, Arial, sans-serif;font-size: 14px;background-color: #FFFFFF\"><\/span>\uff0c\u82e5\u662f\u4f60\u5bf9\u672c\u6587\u611f\u5174\u8da3\uff0c\u6216\u8005\u662f\u60f3\u641e\u61c2\u5176\u4e2d\u67d0\u4e2a\u77e5\u8bc6\u70b9\uff0c\u5c31\u8bf7\u4f60\u7ee7\u7eed\u5f80\u4e0b\u770b\u5427~<\/span><\/p>\n<p>\u8f6e\u80ce\u80ce\u9762\u5206\u6790\u662f\u8bc6\u522b\u78e8\u635f\u548c\u786e\u4fdd\u5b89\u5168\u7684\u4e00\u9879\u5173\u952e\u4efb\u52a1\uff0c\u5c24\u5176\u662f\u5bf9\u4e8e\u957f\u9014\u884c\u9a76\u7684\u8f66\u8f86\u3002\u4f7f\u7528\u4eba\u5de5\u667a\u80fd (ai) \u548c python\uff0c\u6211\u4eec\u53ef\u4ee5\u5feb\u901f\u51c6\u786e\u5730\u81ea\u52a8\u5316\u6b64\u8fc7\u7a0b\u3002\u5728\u8fd9\u91cc\uff0c\u6211\u4eec\u5c55\u793a\u4e86\u57fa\u4e8e vgg16 \u67b6\u6784\u7684\u5377\u79ef\u795e\u7ecf\u7f51\u7edc (cnn) \u6a21\u578b\u5982\u4f55\u5c06\u8f6e\u80ce\u5206\u7c7b\u4e3a\u201c\u65b0\u201d\u6216\u201c\u65e7\u201d\uff0c\u800c opencv \u5219\u5e2e\u52a9\u5206\u6790\u56fe\u50cf\u4ee5\u6d4b\u91cf\u80ce\u9762\u6df1\u5ea6\u3002<\/p>\n<p><em><strong>\u4f7f\u7528\u7684\u6280\u672f<\/strong><\/em><\/p>\n<ul>\n<li>\n<p>python\uff1a<br \/> \u9002\u7528\u4e8e\u4eba\u5de5\u667a\u80fd\u548c\u673a\u5668\u5b66\u4e60\u7684\u6d41\u884c\u7f16\u7a0b\u8bed\u8a00\uff0c\u5c24\u5176\u662f\u5176\u9ad8\u7ea7\u5e93\u3002<\/p>\n<\/li>\n<li>\n<p>opencv\uff1a<br \/> \u7528\u4e8e\u5904\u7406\u56fe\u50cf\u3001\u68c0\u6d4b\u8f6e\u5ed3\u548c\u6d4b\u91cf\u8f6e\u80ce\u80ce\u9762\u9762\u79ef\u3002<\/p>\n<\/li>\n<li>\n<p>tensorflow \u548c keras\uff1a<br \/> \u6df1\u5ea6\u5b66\u4e60\u5e93\u3002\u6211\u4eec\u4f7f\u7528 keras \u6765\u5904\u7406 vgg16 \u6a21\u578b\uff0c\u8fd9\u662f\u4e00\u4e2a\u7528\u4e8e\u56fe\u50cf\u8bc6\u522b\u7684\u9884\u8bad\u7ec3 cnn\u3002<\/p>\n<\/li>\n<li>\n<p>matplotlib\uff1a<br \/> \u7528\u4e8e\u6570\u636e\u53ef\u89c6\u5316\u548c\u56fe\u5f62\u521b\u5efa\u7684\u5e93\uff0c\u4f7f\u5206\u7c7b\u7ed3\u679c\u66f4\u6613\u4e8e\u89e3\u91ca\u3002<\/p>\n<\/li>\n<\/ul>\n<p><strong>\u4ee3\u7801\uff1a<\/strong> <\/p>\n<p><strong><em>1\u3002\u52a0\u8f7d\u548c\u9884\u5904\u7406\u56fe\u50cf\uff1a<\/em><\/strong><br \/> \u4e0a\u4f20\u8f6e\u80ce\u56fe\u50cf\u5e76\u5c06\u5176\u5927\u5c0f\u8c03\u6574\u4e3a\u6a21\u578b\u8f93\u5165\u6240\u9700\u7684\u6807\u51c6\u683c\u5f0f\uff08150&#215;150 \u50cf\u7d20\uff09\u3002\u8fd9\u79cd\u5927\u5c0f\u8c03\u6574\u4fdd\u6301\u4e86\u7eb5\u6a2a\u6bd4\uff0c\u5e76\u5c06\u50cf\u7d20\u503c\u6807\u51c6\u5316\u5728 0 \u548c 1 \u4e4b\u95f4\uff0c\u4ee5\u4fbf\u6a21\u578b\u66f4\u5bb9\u6613\u5904\u7406\u3002<\/p>\n<pre>import cv2\nimport numpy as np\nfrom tensorflow.keras.applications.vgg16 import preprocess_input\n\ndef process_image(image_path, target_size=(150, 150)):\n    image = cv2.imread(image_path)\n    if image is none:\n        print(f\"erro ao carregar a imagem: {image_path}. verifique o caminho e a integridade do arquivo.\")\n        return none, none\n\n    image_resized = cv2.resize(image, target_size, interpolation=cv2.inter_area)\n    image_array = np.array(image_resized) \/ 255.0  \n    image_array = np.expand_dims(image_array, axis=0)\n    image_preprocessed = preprocess_input(image_array)\n\n    return image_resized, image_preprocessed\n\n<\/pre>\n<p><strong><em>2\u3002\u4f7f\u7528\u8bad\u7ec3\u6a21\u578b\u8fdb\u884c\u5206\u7c7b\uff1a<\/em><\/strong><br \/> \u6211\u4eec\u52a0\u8f7d\u4e86\u9884\u5148\u8bad\u7ec3\u7684\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\u6a21\u578b\uff0c\u8be5\u6a21\u578b\u7ecf\u8fc7\u5fae\u8c03\u4ee5\u5c06\u8f6e\u80ce\u5206\u7c7b\u4e3a\u201c\u65b0\u201d\u6216\u201c\u65e7\u201d\u3002\u8be5\u6a21\u578b\u63d0\u4f9b\u4e86\u4e00\u4e2a\u7f6e\u4fe1\u5ea6\u5206\u6570\uff0c\u8868\u660e\u8f6e\u80ce\u662f\u65b0\u8f6e\u80ce\u7684\u6982\u7387\u3002<\/p>\n<pre>from tensorflow.keras.models import load_model\n\nmodel = load_model('pneu_classificador.keras')\nprediction = model.predict(image_preprocessed)\n\n<\/pre>\n<p><strong><em>3\u3002\u51f9\u69fd\u6df1\u5ea6\u8f6e\u5ed3\u5206\u6790\uff1a<\/em><\/strong><br \/> \u4f7f\u7528\u8ba1\u7b97\u673a\u89c6\u89c9\u6280\u672f\u6267\u884c\u51f9\u69fd\u6df1\u5ea6\u68c0\u6d4b\u3002\u7070\u5ea6\u56fe\u50cf\u7ecf\u8fc7\u6a21\u7cca\u8fc7\u6ee4\u5668\u548c canny \u8fb9\u7f18\u68c0\u6d4b\uff0c\u8fd9\u6709\u52a9\u4e8e\u8bc6\u522b\u51f9\u69fd\u8f6e\u5ed3\u3002\u7136\u540e\u6211\u4eec\u8ba1\u7b97\u8f6e\u5ed3\u7684\u603b\u9762\u79ef\uff0c\u8fd9\u4f7f\u6211\u4eec\u80fd\u591f\u4f30\u8ba1\u78e8\u635f\u3002<\/p>\n<pre>def detect_tread_depth(image):\n    gray = cv2.cvtcolor(image, cv2.color_bgr2gray)\n    blurred = cv2.gaussianblur(gray, (5, 5), 0)\n    edges = cv2.canny(blurred, 30, 100)\n    contours, _ = cv2.findcontours(edges, cv2.retr_external, cv2.chain_approx_simple)\n    total_area = sum(cv2.contourarea(c) for c in contours if cv2.contourarea(c) &gt; 100)\n    return total_area\n\n<\/pre>\n<p><strong><em>4\u3002\u7ed3\u679c\u53ef\u89c6\u5316\u548c\u5206\u6790\uff1a<\/em><\/strong><br \/> \u5bf9\u6bcf\u4e2a\u8f6e\u80ce\u8fdb\u884c\u5206\u7c7b\u548c\u5206\u6790\u540e\uff0c\u7528 matplotlib \u663e\u793a\u7ed3\u679c\u3002\u6211\u4eec\u6bd4\u8f83\u4e86\u6bcf\u5f20\u56fe\u50cf\u4e2d\u68c0\u6d4b\u5230\u7684\u5206\u7c7b\u7f6e\u4fe1\u5ea6\u5f97\u5206\u548c\u51f9\u69fd\u533a\u57df\u3002<\/p>\n<pre>import matplotlib.pyplot as plt\n\nconfidence_scores = []\ntotal_area_green_values = []\npredicted_classes = []\n\nfor image_file in os.listdir(ver_dir):\n    image_path = os.path.join(ver_dir, image_file)\n    image_resized, image_preprocessed = process_image(image_path)\n    if image_preprocessed is not None:\n        prediction = model.predict(image_preprocessed)\n        confidence_score = prediction[0][0]\n        total_area_green = detect_tread_depth(image_resized)\n\n        predicted_class = \"novo\" if total_area_green &gt; 500 else \"usado\"\n        confidence_scores.append(confidence_score)\n        total_area_green_values.append(total_area_green)\n        predicted_classes.append(predicted_class)\n\n        plt.imshow(cv2.cvtColor(image_resized, cv2.COLOR_BGR2RGB))\n        plt.title(f\"Pneu {predicted_class} (\u00c1rea: {total_area_green:.2f}, Confian\u00e7a: {confidence_score:.2f})\")\n        plt.axis('off')\n        plt.show()\n\nfig, axs = plt.subplots(2, 1, figsize=(10, 10))\n\naxs[0].bar(os.listdir(ver_dir), confidence_scores, color='skyblue')\naxs[0].set_title('Confian\u00e7a na Classifica\u00e7\u00e3o')\naxs[0].set_ylim(0, 1)\naxs[0].tick_params(axis='x', rotation=45)\n\naxs[1].bar(os.listdir(ver_dir), total_area_green_values, color='lightgreen')\naxs[1].set_title('\u00c1rea Verde Detectada')\naxs[1].tick_params(axis='x', rotation=45)\n\nplt.tight_layout()\nplt.show()\n\n<\/pre>\n<p><img decoding=\"async\" src=\"https:\/\/www.17golang.com\/uploads\/20241118\/1731922269673b095d47eef.jpg\" class=\"aligncenter\"><\/p>\n<p><img decoding=\"async\" src=\"https:\/\/www.17golang.com\/uploads\/20241118\/1731922269673b095d4a6e6.jpg\" class=\"aligncenter\"><\/p>\n<p><img decoding=\"async\" src=\"https:\/\/www.17golang.com\/uploads\/20241118\/1731922269673b095d4deb2.jpg\" class=\"aligncenter\"><\/p>\n<p>\u6211\u7684\u8fd9\u4e2a\u9879\u76ee\u6f14\u793a\u4e86\u5982\u4f55\u4f7f\u7528\u4eba\u5de5\u667a\u80fd\u548c\u8ba1\u7b97\u673a\u89c6\u89c9\u81ea\u52a8\u8fdb\u884c\u8f6e\u80ce\u78e8\u635f\u5206\u6790\uff0c\u4ece\u800c\u5b9e\u73b0\u51c6\u786e\u3001\u5feb\u901f\u7684\u5206\u7c7b\u3002 vgg16 \u67b6\u6784\u548c opencv \u7684\u4f7f\u7528\u662f\u5c06\u795e\u7ecf\u7f51\u7edc\u6a21\u578b\u51c6\u786e\u6027\u4e0e\u89c6\u89c9\u8111\u6c9f\u5206\u6790\u76f8\u7ed3\u5408\u7684\u5173\u952e\u3002\u8be5\u7cfb\u7edf\u53ef\u4ee5\u6269\u5c55\u4e3a\u8de8\u8f66\u961f\u8fdb\u884c\u6301\u7eed\u76d1\u63a7\uff0c\u6709\u52a9\u4e8e\u51cf\u5c11\u4e8b\u6545\u5e76\u4f18\u5316\u8f6e\u80ce\u7ba1\u7406\u3002<\/p>\n<p>\u672c\u7bc7\u5173\u4e8e\u300aPython \u4e2d\u7684\u4eba\u5de5\u667a\u80fd\u8f6e\u80ce\u51f9\u69fd\u5206\u6790\uff01\u300b\u7684\u4ecb\u7ecd\u5c31\u5230\u6b64\u7ed3\u675f\u5566\uff0c\u4f46\u662f\u5b66\u65e0\u6b62\u5883\uff0c\u60f3\u8981\u4e86\u89e3\u5b66\u4e60\u66f4\u591a\u5173\u4e8e\u6587\u7ae0\u7684\u76f8\u5173\u77e5\u8bc6\uff0c\u8bf7\u5173\u6ce8\u516c\u4f17\u53f7\uff01<\/p>\n<p>      \u7248\u672c\u58f0\u660e \u672c\u6587\u8f6c\u8f7d\u4e8e\uff1adev.to \u5982\u6709\u4fb5\u72af\uff0c\u8bf7\u8054\u7cfb\u5220\u9664<\/p>\n","protected":false},"excerpt":{"rendered":"<p>Python \u4e2d\u7684\u4eba\u5de5\u667a\u80fd\u8f6e\u80ce\u51f9&#46;&#46;&#46;<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[4925],"tags":[],"class_list":["post-204881","post","type-post","status-publish","format-standard","hentry","category-4925"],"_links":{"self":[{"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/posts\/204881","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/comments?post=204881"}],"version-history":[{"count":0,"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/posts\/204881\/revisions"}],"wp:attachment":[{"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/media?parent=204881"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/categories?post=204881"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/server.hk\/cnblog\/wp-json\/wp\/v2\/tags?post=204881"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}