{"id":422,"date":"2022-11-03T17:24:22","date_gmt":"2022-11-03T09:24:22","guid":{"rendered":"http:\/\/wordpress.loc\/?p=422"},"modified":"2022-12-17T16:45:05","modified_gmt":"2022-12-17T08:45:05","slug":"esp32_ch2-2","status":"publish","type":"post","link":"https:\/\/www.nmking.io\/index.php\/2022\/11\/03\/422\/","title":{"rendered":"ESP32-CAM Python\u7269\u4ef6\u8fa8\u8b58-\u63a1\u7528YOLOV3"},"content":{"rendered":"\n<figure class=\"wp-block-pullquote\"><blockquote><p>\u5229\u7528ESP32-CAM\u4f86\u505a\u667a\u6167\u8fa8\u8b58\u4e00\u76f4\u662f\u6709\u8da3\u7684\u8b70\u984c<br>\u76ee\u524d\u4f7f\u7528ESP32CAM\u4f86\u505a\u667a\u6167\u8fa8\u8b58\u53ef\u5206\u6210\u4ee5\u4e0b3\u7a2e\u65b9\u5f0f<\/p><\/blockquote><\/figure>\n\n\n\n<div style=\"height:100px\" aria-hidden=\"true\" class=\"wp-block-spacer\"><\/div>\n\n\n\n<div class=\"wp-block-ht-block-toc is-style-rounded htoc htoc--position-wide toc-list-style-plain\" data-htoc-state=\"expanded\"><span class=\"htoc__title\"><span class=\"ht_toc_title\">ESP32-CAM Python\u7269\u4ef6\u8fa8\u8b58\u65b9\u6cd5<\/span><span class=\"htoc__toggle\"><svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"16\" height=\"16\"><g fill=\"#444\"><path d=\"M15 7H1c-.6 0-1 .4-1 1s.4 1 1 1h14c.6 0 1-.4 1-1s-.4-1-1-1z\"><\/path><path d=\"M15 1H1c-.6 0-1 .4-1 1s.4 1 1 1h14c.6 0 1-.4 1-1s-.4-1-1-1zM15 13H1c-.6 0-1 .4-1 1s.4 1 1 1h14c.6 0 1-.4 1-1s-.4-1-1-1z\"><\/path><\/g><\/svg><\/span><\/span><div class=\"htoc__itemswrap\"><ul class=\"ht_toc_list\"><li class=\"\"><a href=\"#htoc-esp32-cam\">ESP32-CAM \u667a\u6167\u8fa8\u8b58\u65b9\u5f0f\u8aaa\u660e<\/a><\/li><li class=\"\"><a href=\"#htoc-\">\u64cd\u4f5c\u524d\u6e96\u5099<\/a><\/li><li class=\"\"><a href=\"#htoc-python-esp32-cam\">Python \u63a5\u53d6esp32-cam\u5f71\u50cf<\/a><\/li><li class=\"\"><a href=\"#htoc-yolo\">\u5229\u7528yolo\u7269\u4ef6\u8fa8\u8b58<\/a><\/li><\/ul><\/div><\/div>\n\n\n\n<p>\u6211\u7684ESP32\u5be6\u505a\u66f8\u7c4d\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/www.nmking.io\/index.php\/2022\/11\/10\/esp32_ch1-2\/\" target=\"_blank\">\u6211\u51fa\u66f8\u4e86 ESP32 \u7269\u806f\u7db2\u5c08\u984c<\/a><br>\u535a\u5ba2\u4f86\u7db2\u5740\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/www.books.com.tw\/products\/0010901195\" target=\"_blank\">https:\/\/www.books.com.tw\/products\/0010901195<\/a><\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity is-style-wide\"\/>\n\n\n\n<div style=\"height:100px\" aria-hidden=\"true\" class=\"wp-block-spacer\"><\/div>\n\n\n\n<h2 class=\"wp-block-heading\" id=\"htoc-esp32-cam\">ESP32-CAM \u667a\u6167\u8fa8\u8b58\u65b9\u5f0f\u8aaa\u660e<\/h2>\n\n\n\n<ol class=\"wp-block-list\">\n<li>\u5728ESP32-CAM\u4e0a\u505a\u667a\u6167\u8fa8\u8b58 : \u9019\u7aef\u53d7\u9650ESP32\u8a18\u61b6\u9ad4\u53caCPU\u901f\u5ea6\uff0c\u5f88\u96e3\u9054\u6210\u9ad8\u901f\u5ea6\u7684\u5f71\u50cf\u8fa8\u8b58\u904b\u7b97<\/li>\n\n\n\n<li>\u5728\u96f2\u7aef\u505a\u667a\u6167\u8fa8\u8b58 : \u5c07ESP32CAM\u6240\u7372\u53d6\u7684\u5f71\u50cf\u4e0a\u50b3\u5230\u667a\u6167\u96f2\u7aef\u5e73\u53f0\u505a\u8fa8\u8b58\u5f8c\uff0c\u5c07\u7d50\u679c\u56de\u50b3\uff0c\u9019\u7a2e\u5247\u53ef\u8fa8\u8b58\u8f03\u70ba\u7cbe\u78ba\uff0c\u4f46\u7db2\u8def\u50b3\u8f38\u5247\u662f\u53e6\u5916\u9808\u8003\u91cf\u7684\u554f\u984c\u3002<\/li>\n\n\n\n<li>\u5229\u7528\u8fd1\u7aef\u667a\u6167\u8fa8\u8b58\uff1a\u4e5f\u5c31\u662f\u5728\u8fd1\u7aef\u5efa\u7acb\u667a\u6167\u5e73\u53f0\uff0c\u6536\u96c6ESP32-CAM\u6240\u50b3\u4f86\u7684\u5f71\u50cf\uff0c\u9019\u6a23\u5f71\u50cf\u4e0d\u7528\u4e0a\u50b3\u5230\u96f2\u7aef\uff0c\u5728\u5340\u57df\u7db2\u8def\u5c31\u53ef\u4ee5\u8fa8\u8b58\uff0c\u4e5f\u7b97\u4e00\u7a2e\u6298\u8877\u7684\u908a\u7de3\u904b\u7b97\uff0c\u77e5\u540d\u5927\u5e2b\u6cd5\u862d\u65af\u6240\u7528tensorflow.js\u5916\u639b\u5230ESP32-CAM\u5167\u9032\u884c\u9ad4\u614b\u8fa8\u8b58\u4e5f\u5c6c\u9019\u985e\uff0c\u4e3b\u8981\u662f\u900f\u904e\u89c0\u770b\u5f71\u50cf\u7684\u88dd\u7f6e\u9032\u884c\u904b\u7b97\uff0c\u4f8b\u5982\u624b\u6a5f\u6216\u96fb\u8166\u3002<\/li>\n<\/ol>\n\n\n\n<h2 class=\"wp-block-heading\" id=\"htoc-\">\u64cd\u4f5c\u524d\u6e96\u5099<\/h2>\n\n\n\n<p>\u4ee5\u5f80\u6211\u4e5f\u4ecb\u7d39\u4e86\u5229\u7528.net\u74b0\u5883\u63a5\u53d6\u591a\u53f0ESP32-CAM\u7684\u5f71\u50cf\u9032\u884c\u5206\u6790\uff08ESP32-CAM mpeg Video Stream \u7528.net\u63a5\u6536\u4e26\u9304\u88fd\u5f71\u7247\u6a94 \uff09\uff0c\u4f46\u667a\u6167\u8fa8\u8b58\u7684\u5f8c\u53f0\u90fd\u662f\u63a1\u7528\u5fae\u8edfAzure\u7684AI Cognitive Service API\u9032\u884c\u667a\u6167\u5224\u65b7\u3002<\/p>\n\n\n\n<p>\u9019\u6b21\u53c3\u8003\u7db2\u8def\u6587\u7ae0\uff0c\u7d42\u65bc\u5b8c\u6210\u5c07ESP32-CAM\u63a5\u53d6\u5230python\u5f8c\uff0c\u7528\u6642\u4e0b\u6700\u6d41\u884c\u7684yolo v3\u7269\u4ef6\u8fa8\u8b58\u6a21\u578b\u9032\u884c\u5206\u6790\u3002<br><br>\u8981\u5b8c\u6210\u672c\u6b21\u64cd\u4f5c\uff0c\u8acb\u5148\u95b1\u8b80\u4ee5\u4e0b\u6587\u7ae0<\/p>\n\n\n\n<ul class=\"wp-block-list\">\n<li>ESP32-CAM\uff1a\u9732\u5929\u62cd\u8ce3\u8acb\u641c\u5c0b\uff1a <a href=\"https:\/\/goods.ruten.com.tw\/item\/show?21910115309507\" target=\"_blank\" rel=\"noreferrer noopener\">https:\/\/goods.ruten.com.tw\/item\/show?21910115309507<\/a><\/li>\n\n\n\n<li>ESP32-CAM\u7684\u958b\u767c\u74b0\u5883\u5efa\u7acb\u8207\u6e2c\u8a66\uff0c\u8acb\u53c3\u8003\uff1a<a href=\"https:\/\/www.nmking.io\/index.php\/2022\/11\/03\/esp32_ch2\/\" target=\"_blank\" rel=\"noreferrer noopener\">ESP32-CAM (arduino)\u5f71\u50cf\u4f3a\u670d\u5668\u53ca\u81c9\u90e8\u8fa8\u8b58\u6559\u5b78\u539f\u59cb\u6a94Video Stream Server<\/a><\/li>\n\n\n\n<li>python yolo v3\u8fa8\u8b58\uff0c\u8acb\u53c3\u8003\uff1a<a href=\"https:\/\/medium.com\/@wayne18308\/yolo%E5%AF%A6%E4%BD%9C%E6%95%99%E5%AD%B8-train%E5%87%BA%E5%B1%AC%E6%96%BC%E8%87%AA%E5%B7%B1%E7%9A%84model%E4%BD%BF%E7%94%A8darkflow-windows-9b633b7350d8\" target=\"_blank\" rel=\"noreferrer noopener\">Yolo\u5be6\u4f5c\u6559\u5b78 \u2014 train\u51fa\u5c6c\u65bc\u81ea\u5df1\u7684model\u4f7f\u7528darkflow (windows)<\/a><\/li>\n\n\n\n<li>\u672c\u6b21ESP32-CAM\u6240\u9700\u4f7f\u7528\u7684\u7a0b\u5f0f\uff0c\u8acb\u53c3\u8003\u9019\u7bc7\uff1a<a href=\"https:\/\/www.nmking.io\/index.php\/2022\/11\/08\/esp32_ch2-3\/\" target=\"_blank\" rel=\"noreferrer noopener\">ESP32-CAM(ardunio)\u62cd\u7167auto take picture\u3001\u4e32\u6d41stream\u53ca\u7269\u4ef6\u8fa8\u8b58object<\/a><\/li>\n<\/ul>\n\n\n\n<figure class=\"wp-block-image size-large is-resized\"><img data-recalc-dims=\"1\" loading=\"lazy\" decoding=\"async\" src=\"https:\/\/i0.wp.com\/pic.pimg.tw\/youyouyou\/1572802897-3972276492_n.png?resize=838%2C651&#038;ssl=1\" alt=\"\" width=\"838\" height=\"651\"\/><figcaption class=\"wp-element-caption\">\u7269\u4ef6\u8fa8\u8b58\u6210\u679c<\/figcaption><\/figure>\n\n\n\n<p>\u91cd\u9ede\u4f86\u4e86\uff0c\u5982\u4f55\u4f7f\u7528python\u63a5\u53d6ESP32-CAM\u5f71\u50cf\uff0c\u4e00\u822c\u5728python\u63a5\u53d6webcam\u90fd\u662f\u7528opencv\u7684\u300ccap = cv.VideoCapture(0)\u300d\u4e00\u53e5\u5c31\u53ef\u4ee5\u641e\u5b9a\uff0c\u4e0d\u904eESP32-CAM\u7684\u5f71\u50cf\u5c6c\u65bcmjpeg\u5f71\u50cf\u4e32\u6d41\uff0c\u56e0\u6b64\u7121\u6cd5\u4f7f\u7528\u9019\u500b\u65b9\u6cd5\uff0c\u5f8c\u4f86\u767c\u73fe\u9700\u8981\u7528\u5230jpg\u7684\u683c\u5f0f\u300c FF D8 \u300d\u70ba\u958b\u982d\u8207\u300c FF D9\u300d\u70ba\u7d50\u5c3e\u7684\u65b9\u5f0f\uff0c\u5728\u4e32\u6d41\u4e2d\u627e\u51fa\u4e00\u5f35\u5b8c\u6574\u7684jpg\uff0c\u5728\u653e\u5165yolo\u4e2d\u9032\u884c\u5206\u6790\uff0c\u4ee5\u4e0b\u5206\u6210\u5169\u500b\u4e3b\u984c\u8aaa\u660e<\/p>\n\n\n\n<h2 class=\"wp-block-heading\" id=\"htoc-python-esp32-cam\">Python \u63a5\u53d6esp32-cam\u5f71\u50cf<\/h2>\n\n\n\n<p>\u672c\u90e8\u4efd\u50c5\u6709\u5229\u7528python\u63a5\u53d6ESP32-CAM\u5f71\u50cf\u7684\u7a0b\u5f0f\uff0c\u7121\u4efb\u4f55\u5176\u4ed6\u8655\u7406\uff0c\u53c3\u8003\u7bc4\u4f8b\u7a0b\u5f0f\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-code\"><code>import cv2 as cv\nimport numpy as np\nfrom urllib.request import urlopen\nimport os\nimport datetime\nimport time\nimport sys\n\n#change to your ESP32-CAM ip\nurl=\"http:\/\/192.168.1.149:9601\/stream\"\nCAMERA_BUFFRER_SIZE=4096\nstream=urlopen(url)\nbts=b''\ni=0\nwhile True:    \n    try:\n        bts+=stream.read(CAMERA_BUFFRER_SIZE)\n        jpghead=bts.find(b'\\xff\\xd8')\n        jpgend=bts.find(b'\\xff\\xd9')\n        if jpghead&gt;-1 and jpgend&gt;-1:\n            jpg=bts&#91;jpghead:jpgend+2]\n            bts=bts&#91;jpgend+2:]\n            img=cv.imdecode(np.frombuffer(jpg,dtype=np.uint8),cv.IMREAD_UNCHANGED)\n            #img=cv.flip(img,0) #&gt;0:\u5782\u76f4\u7ffb\u8f49, 0:\u6c34\u5e73\u7ffb\u8f49, &lt;0:\u5782\u76f4\u6c34\u5e73\u7ffb\u8f49            \n            #h,w=img.shape&#91;:2]\n            #print('\u5f71\u50cf\u5927\u5c0f \u9ad8:' + str(h) + '\u5bec\uff1a' + str(w))\n            img=cv.resize(img,(640,480))\n            cv.imshow(\"a\",img)\n        k=cv.waitKey(1)\n    except Exception as e:\n        print(\"Error:\" + str(e))\n        bts=b''\n        stream=urlopen(url)\n        continue\n    \n    k=cv.waitKey(1)\n    # \u6309a\u62cd\u7167\u5b58\u6a94\n    if k &amp; 0xFF == ord('a'):\n        cv.imwrite(str(i) + \".jpg\", img)\n        i=i+1\n    # \u6309q\u96e2\u958b\n    if k &amp; 0xFF == ord('q'):\n        break\ncv.destroyAllWindows()<\/code><\/pre>\n\n\n\n<figure class=\"wp-block-image size-large is-resized\"><img data-recalc-dims=\"1\" loading=\"lazy\" decoding=\"async\" src=\"https:\/\/i0.wp.com\/pic.pimg.tw\/youyouyou\/1572804259-2410858612_n.png?resize=839%2C572&#038;ssl=1\" alt=\"\" width=\"839\" height=\"572\"\/><figcaption class=\"wp-element-caption\">Python\u7a0b\u5f0f\u57f7\u884c\u7d50\u679c<\/figcaption><\/figure>\n\n\n\n<h2 class=\"wp-block-heading\" id=\"htoc-yolo\">\u5229\u7528yolo\u7269\u4ef6\u8fa8\u8b58<\/h2>\n\n\n\n<p>\u672c\u90e8\u4efd\u5c07\u4e0a\u8ff0ESP32CAM\u5f71\u50cf\u5229\u7528python\u77e5\u540d\u7684yolo\u6a21\u578b\u9032\u884c\u7269\u4ef6\u8fa8\u8b58\uff0c\u53c3\u8003\u7bc4\u4f8b\u7a0b\u5f0f\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-code\"><code># This code is written at BigVision LLC. It is based on the OpenCV project. It is subject to the license terms in the LICENSE file found in this distribution and at http:\/\/opencv.org\/license.html\n\n# Usage example:  python3 object_detection_yolo.py --video=run.mp4\n#                 python3 object_detection_yolo.py --image=bird.jpg\nimport cv2 as cv\nimport argparse\nimport sys\nimport numpy as np\nfrom urllib.request import urlopen\nimport os\nimport datetime\nimport time\n\n# Initialize the parameters\nconfThreshold = 0.5  #Confidence threshold\nnmsThreshold = 0.4   #Non-maximum suppression threshold\u7f6e\u4fe1\u5ea6\u9608\u503c\ninpWidth = 320       #Width of network's input image\uff0c\u6539\u4e3a320*320\u66f4\u5feb\ninpHeight = 320      #Height of network's input image\uff0c\u6539\u4e3a608*608\u66f4\u51c6\n\nparser = argparse.ArgumentParser(description='Object Detection using YOLO in OPENCV')\nparser.add_argument('--image', help='Path to image file.')\nparser.add_argument('--video', help='Path to video file.')\nargs = parser.parse_args()\n\n# Load names of classes\nclassesFile = \"YOLO\\\\coco.names\"\nclasses = None\nwith open(classesFile, 'rt') as f:\n    classes = f.read().rstrip('\\n').split('\\n')\n\n# Give the configuration and weight files for the model and load the network using them.\nmodelConfiguration = \"YOLO\\\\yolov3.cfg\";\nmodelWeights = \"YOLO\\\\yolov3.weights\";\n\nnet = cv.dnn.readNetFromDarknet(modelConfiguration, modelWeights)\nnet.setPreferableBackend(cv.dnn.DNN_BACKEND_OPENCV)\nnet.setPreferableTarget(cv.dnn.DNN_TARGET_CPU) #\u53ef\u5207\u6362\u5230GPU,cv.dnn.DNN_TARGET_OPENCL\uff0c\n# \u53ea\u652f\u6301Intel\u7684GPU,\u6ca1\u6709\u5219\u81ea\u52a8\u5207\u6362\u5230cpu\n\n# Get the names of the output layers\ndef getOutputsNames(net):\n    # Get the names of all the layers in the network\n    layersNames = net.getLayerNames()\n    # Get the names of the output layers, i.e. the layers with unconnected outputs\n    return &#91;layersNames&#91;i&#91;0] - 1] for i in net.getUnconnectedOutLayers()]\n\n# Draw the predicted bounding box\ndef drawPred(classId, conf, left, top, right, bottom):\n    # Draw a bounding box.\n    cv.rectangle(frame, (left, top), (right, bottom), (255, 178, 50), 3)\n\n    label = '%.2f' % conf\n\n    # Get the label for the class name and its confidence\n    if classes:\n        assert(classId &lt; len(classes))\n        label = '%s:%s' % (classes&#91;classId], label)\n\n    #Display the label at the top of the bounding box\n    labelSize, baseLine = cv.getTextSize(label, cv.FONT_HERSHEY_SIMPLEX, 0.5, 1)\n    top = max(top, labelSize&#91;1])\n    cv.rectangle(frame, (left, top - round(1.5*labelSize&#91;1])), (left + round(1.5*labelSize&#91;0]), top + baseLine), (255, 255, 255), cv.FILLED)\n    cv.putText(frame, label, (left, top), cv.FONT_HERSHEY_SIMPLEX, 0.75, (0,0,0), 1)\n\n# Remove the bounding boxes with low confidence using non-maxima suppression\ndef postprocess(frame, outs):\n    frameHeight = frame.shape&#91;0]\n    frameWidth = frame.shape&#91;1]\n\n    classIds = &#91;]\n    confidences = &#91;]\n    boxes = &#91;]\n    # Scan through all the bounding boxes output from the network and keep only the\n    # ones with high confidence scores. Assign the box's class label as the class with the highest score.\n    classIds = &#91;]\n    confidences = &#91;]\n    boxes = &#91;]\n    for out in outs:\n        for detection in out:\n            scores = detection&#91;5:]\n            classId = np.argmax(scores)\n            confidence = scores&#91;classId]\n            if confidence &gt; confThreshold:\n                center_x = int(detection&#91;0] * frameWidth)\n                center_y = int(detection&#91;1] * frameHeight)\n                width = int(detection&#91;2] * frameWidth)\n                height = int(detection&#91;3] * frameHeight)\n                left = int(center_x - width \/ 2)\n                top = int(center_y - height \/ 2)\n                classIds.append(classId)\n                confidences.append(float(confidence))\n                boxes.append(&#91;left, top, width, height])\n\n    # Perform non maximum suppression to eliminate redundant overlapping boxes with\n    # lower confidences.\n    indices = cv.dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThreshold)\n    for i in indices:\n        i = i&#91;0]\n        box = boxes&#91;i]\n        left = box&#91;0]\n        top = box&#91;1]\n        width = box&#91;2]\n        height = box&#91;3]\n        drawPred(classIds&#91;i], confidences&#91;i], left, top, left + width, top + height)\n\n# Process inputs\nwinName = 'Deep learning object detection in OpenCV'\ncv.namedWindow(winName, cv.WINDOW_NORMAL)\n\noutputFile = \"yolo_out_py.avi\"\n# Webcam input\nurl=\"http:\/\/192.168.1.149:9601\/stream\"\nCAMERA_BUFFRER_SIZE=4096\nstream=urlopen(url)\nbts=b''\n\n# Get the video writer initialized to save the output video\n#if (not args.image):\n#   vid_writer = cv.VideoWriter(outputFile, cv.VideoWriter_fourcc('M','J','P','G'), 30, (round(cap.get(cv.CAP_PROP_FRAME_WIDTH)),round(cap.get(cv.CAP_PROP_FRAME_HEIGHT))))\n\nwhile cv.waitKey(1) &lt; 0:\n    bts+=stream.read(CAMERA_BUFFRER_SIZE)\n    jpghead=bts.find(b'\\xff\\xd8')\n    jpgend=bts.find(b'\\xff\\xd9')\n    if jpghead&gt;-1 and jpgend&gt;-1:\n        jpg=bts&#91;jpghead:jpgend+2]\n        bts=bts&#91;jpgend+2:]\n        img=cv.imdecode(np.frombuffer(jpg,dtype=np.uint8),cv.IMREAD_UNCHANGED)\n        v=cv.flip(img,0)\n        h=cv.flip(img,1)\n        p=cv.flip(img,-1)        \n        frame=p\n        h,w=frame.shape&#91;:2]\n        frame=cv.resize(frame,(1024,768))\n        blob = cv.dnn.blobFromImage(frame, 1\/255, (inpWidth, inpHeight), &#91;0,0,0], 1, crop=False)\n        net.setInput(blob)\n        # Runs the forward pass to get output of the output layers\n        outs = net.forward(getOutputsNames(net))\n        # Remove the bounding boxes with low confidence\n        postprocess(frame, outs)\n        # Put efficiency information. The function getPerfProfile returns the overall time for inference(t) and the timings for each of the layers(in layersTimes)\n        t, _ = net.getPerfProfile()\n        label = 'Inference time: %.2f ms' % (t * 1000.0 \/ cv.getTickFrequency())\n        cv.putText(frame, label, (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255))\n        cv.imshow(winName, frame)<\/code><\/pre>\n\n\n\n<figure class=\"wp-block-image size-large is-resized\"><img data-recalc-dims=\"1\" loading=\"lazy\" decoding=\"async\" src=\"https:\/\/i0.wp.com\/pic.pimg.tw\/youyouyou\/1572802897-3972276492_n.png?resize=840%2C652&#038;ssl=1\" alt=\"\" width=\"840\" height=\"652\"\/><figcaption class=\"wp-element-caption\">\u5b8c\u6210\u7269\u4ef6\u8fa8\u8b58\u6548\u679c<\/figcaption><\/figure>\n\n\n\n<p class=\"has-black-color has-text-color has-small-font-size\">\u53c3\u8003\u7db2\u5740\uff1a<br>1.http:\/\/hk.voidcc.com\/question\/p-nagarvzd-b.html<br>2.http:\/\/t.ly\/pvLyM<\/p>\n","protected":false},"excerpt":{"rendered":"<p>\u5229\u7528ESP32-CAM\u4f86\u505a\u667a\u6167\u8fa8\u8b58\u4e00\u76f4\u662f\u6709\u8da3\u7684\u8b70\u984c\u76ee\u524d\u4f7f\u7528ESP32CAM\u4f86\u505a\u667a\u6167\u8fa8\u8b58\u53ef\u5206\u6210\u4ee5\u4e0b3\u7a2e\u65b9\u5f0f \u6211\u7684 [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"jetpack_post_was_ever_published":false,"_jetpack_newsletter_access":"","_jetpack_dont_email_post_to_subs":false,"_jetpack_newsletter_tier_id":0,"_jetpack_memberships_contains_paywalled_content":false,"_jetpack_memberships_contains_paid_content":false,"footnotes":"","jetpack_publicize_message":"","jetpack_publicize_feature_enabled":true,"jetpack_social_post_already_shared":false,"jetpack_social_options":{"image_generator_settings":{"template":"highway","default_image_id":0,"font":"","enabled":false},"version":2}},"categories":[1,26,5],"tags":[],"class_list":["post-422","post","type-post","status-publish","format-standard","hentry","category-esp32","category-esp32cam","category-python"],"blocksy_meta":[],"jetpack_publicize_connections":[],"jetpack_featured_media_url":"","jetpack-related-posts":[],"jetpack_sharing_enabled":true,"_links":{"self":[{"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/posts\/422","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/comments?post=422"}],"version-history":[{"count":11,"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/posts\/422\/revisions"}],"predecessor-version":[{"id":498,"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/posts\/422\/revisions\/498"}],"wp:attachment":[{"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/media?parent=422"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/categories?post=422"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.nmking.io\/index.php\/wp-json\/wp\/v2\/tags?post=422"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}