{"id":53753,"date":"2025-02-16T09:18:05","date_gmt":"2025-02-16T01:18:05","guid":{"rendered":"https:\/\/fwq.ai\/blog\/53753\/"},"modified":"2025-02-16T09:18:05","modified_gmt":"2025-02-16T01:18:05","slug":"8%e4%b8%aa%e6%9c%80%e5%bc%ba%e5%a4%a7%e7%9a%84ocr%e5%b7%a5%e5%85%b7%e4%b8%8e%e6%9c%8d%e5%8a%a1","status":"publish","type":"post","link":"https:\/\/fwq.ai\/blog\/53753\/","title":{"rendered":"8\u4e2a\u6700\u5f3a\u5927\u7684OCR\u5de5\u5177\u4e0e\u670d\u52a1"},"content":{"rendered":"<p>\u5728\u5feb\u8282\u594f\u7684 IT \u4e16\u754c\u4e2d\uff0c\u5149\u5b66\u5b57\u7b26\u8bc6\u522b (OCR) \u5df2\u6210\u4e3a\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u7684\u4e0d\u53ef\u6216\u7f3a\u7684\u5de5\u5177\u3002\u4f46\u662f\uff0c\u5f53\u8fd9\u4e9b\u56fe\u50cf\u8d28\u91cf\u4f4e\u4e0b\u3001\u6a21\u7cca\u6216\u4e0d\u5b8c\u7f8e\u65f6\u4f1a\u53d1\u751f\u4ec0\u4e48\uff1f\u8fd9\u6b63\u662f\u6211\u5728\u5f53\u524d\u5de5\u4f5c\u4e2d\u9762\u4e34\u7684\u6311\u6218\uff0c\u5b83\u4fc3\u4f7f\u6211\u6df1\u5165\u7814\u7a76 OCR \u5de5\u5177\u548c\u670d\u52a1\u7684\u4e16\u754c\u4ee5\u5bfb\u627e\u89e3\u51b3\u65b9\u6848\u3002<\/p>\n<p>\u6211\u63a2\u7d22\u4e86\u5404\u79cd OCR \u5de5\u5177\u548c\u670d\u52a1\uff0c\u5e76\u6839\u636e\u5b83\u4eec\u7684\u7279\u70b9\uff08\u4f8b\u5982\u4ef7\u683c\u3001\u51c6\u786e\u6027\u548c\u5b9e\u65bd\u5de5\u4f5c\u91cf\uff09\u5c06\u5b83\u4eec\u5206\u4e3a\u4e09\u4e2a\u4e0d\u540c\u7684\u7c7b\u522b\uff1a<\/p>\n<ul>\n<li>\u751f\u6210\u5f0fAI\uff1a\u5728\u8fd9\u91cc\uff0c\u6211\u7814\u7a76\u4e86 Gemini \u548c OpenAI \u7b49\u5c16\u7aef\u89e3\u51b3\u65b9\u6848\uff0c\u5b83\u4eec\u5229\u7528\u751f\u6210\u6a21\u578b\u7684\u5f3a\u5927\u529f\u80fd\u6765\u89e3\u51b3\u590d\u6742\u7684 OCR \u4efb\u52a1\u3002<\/li>\n<li>\u89c6\u89c9 AI\uff1a\u6b64\u7c7b\u522b\u4e13\u6ce8\u4e8e\u4e13\u95e8\u7684\u8ba1\u7b97\u673a\u89c6\u89c9\u670d\u52a1\uff0c\u65e8\u5728\u7cbe\u786e\u5904\u7406\u57fa\u4e8e\u56fe\u50cf\u7684\u6311\u6218\u5e76\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u7279\u5f81\u3002<\/li>\n<li>\u5f00\u6e90\u5e93\uff1a\u5bf9\u4e8e\u90a3\u4e9b\u5e0c\u671b\u6784\u5efa\u5185\u90e8\u89e3\u51b3\u65b9\u6848\u7684\u4eba\uff0c\u6211\u8bc4\u4f30\u4e86\u53ef\u7528\u7684\u6700\u4f73\u5f00\u6e90\u5de5\u5177\uff0c\u5e73\u8861\u4e86\u7075\u6d3b\u6027\u548c\u63a7\u5236\u529b\u3002<\/li>\n<\/ul>\n<p>\u5728\u6574\u4e2a\u7814\u7a76\u8fc7\u7a0b\u4e2d\uff0c\u6211\u9488\u5bf9\u4e00\u7ec4\u4f4e\u8d28\u91cf\u56fe\u50cf\u6d4b\u8bd5\u4e86\u8fd9\u4e9b\u5de5\u5177\uff0c\u4ee5\u8bc4\u4f30\u5b83\u4eec\u5728\u5b9e\u9645\u573a\u666f\u4e2d\u7684\u8868\u73b0\u3002\u6211\u5f3a\u70c8\u5efa\u8bae\u4f60\u521b\u5efa\u56fe\u50cf\u6570\u636e\u96c6\uff0c\u4ee5\u786e\u4fdd\u5b83\u6700\u80fd\u6ee1\u8db3\u4f60\u7684\u7279\u5b9a\u9700\u6c42\u548c\u76ee\u6807\u3002<\/p>\n<p>\u5982\u679c\u4f60\u5bf9\u8be6\u7ec6\u4fe1\u606f\u611f\u5230\u597d\u5947\uff0c\u53ef\u4ee5\u5728\u627e\u5230\u7528\u4e8e\u672c\u7814\u7a76\u7684\u5b8c\u6574\u4ee3\u7801\u3002\u6211\u7528\u6765\u6d4b\u8bd5\u89e3\u51b3\u65b9\u6848\u7684\u56fe\u50cf\u53ef\u5728\u83b7\u5f97\uff0c\u57fa\u51c6\u6d4b\u8bd5\u4ee3\u7801\u53ef\u5728\u8bbf\u95ee\u3002<\/p>\n<h2>1\u3001OpenAI &#8211; \u751f\u6210\u5f0fAI<\/h2>\n<p>\u5728\u6211\u63a2\u7d22 OCR \u5de5\u5177\u7684\u8fc7\u7a0b\u4e2d\uff0c\u4e00\u79cd\u6709\u8da3\u7684\u65b9\u6cd5\u662f\u5229\u7528 ChatGPT \u548c\u5b9a\u5236\u63d0\u793a\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u3002<\/p>\n<pre><code>from openai import OpenAI # pip install openai\nfrom base64 import b64encode\n\n\n# Generate APIKey https:\/\/platform.openai.com\/api-keys\nOPENAI_API_KEY = \"\" \n\nimage_path = \"image.jpg\"\n\nwith open(image_path, \"rb\") as image_file:\n   base64_image = b64encode(image_file.read()).decode(\"utf-8\")\n\n\nclient = OpenAI(api_key=OPENAI_API_KEY)\n\nresponse = client.chat.completions.create(\n    model=\"gpt-4o-mini\",\n    messages=[\n        {\n            \"role\": \"user\",\n            \"content\": [\n                {\n                    \"type\": \"text\",\n                    \"text\": \"OCR this image. Do not include any markdown or code formatting.\",\n                },\n                {\n                    \"type\": \"image_url\",\n                    \"image_url\": {\n                        \"url\": f\"data:image\/jpeg;base64,{base64_image}\"\n                    },\n                },\n            ],\n        }\n    ],\n)\n\nprint(response.choices[0].message.content)<\/code><\/pre>\n<p>\u73b0\u5728\u90fd\u5177\u6709\u89c6\u89c9\u529f\u80fd\uff0c\u5141\u8bb8\u5b83\u4eec\u5c06\u56fe\u50cf\u4f5c\u4e3a\u8f93\u5165\u8fdb\u884c\u5904\u7406\u5e76\u56de\u7b54\u6709\u5173\u5b83\u4eec\u7684\u95ee\u9898\u3002<\/p>\n<h2>2\u3001Gemini &#8211; \u751f\u6210\u5f0fAI<\/h2>\n<p>Gemini \u7684\u591a\u6a21\u6001\u529f\u80fd\u4f7f\u5176\u80fd\u591f\u5904\u7406\u89c6\u89c9\u6570\u636e\u4ee5\u53ca\u4e0a\u4e0b\u6587\u63d0\u793a\uff0c\u4f7f\u5176\u5bf9\u4e8e\u5177\u6709\u6311\u6218\u6027\u7684 OCR \u4efb\u52a1\u7279\u522b\u6709\u6548\u3002<\/p>\n<pre><code>from PIL import Image # pip install Pillow\nimport google.generativeai as genai # pip install google.generativeai\n\n# Generate APIkey - https:\/\/aistudio.google.com\/apikey\nGEMINI_API_KEY = \"\" \n\nimage_path = \"image.jpg\" # Put here your image path\nmodel_name=\"gemini-1.5-flash\"\n\ngenai.configure(api_key=GEMINI_API_KEY)\n\nimage_file = Image.open(image_path)\nmodel = genai.GenerativeModel(model_name=model_name)\n\nprompt = \"OCR this image. Do not include any markdown or code formatting.\"\n\nresponse = model.generate_content([prompt, image_file])\n\nprint(response.text)<\/code><\/pre>\n<p>\u901a\u8fc7\u5236\u4f5c\u6709\u9488\u5bf9\u6027\u7684\u63d0\u793a\uff0c\u4f8b\u5982\u6307\u5b9a\u8bed\u8a00\u3001\u5e03\u5c40\uff0c\u751a\u81f3\u662f\u5185\u5bb9\u7684\u9884\u671f\u7ed3\u6784\u3002<\/p>\n<h2>3\u3001Google Cloud &#8211; Vision AI<\/h2>\n<p>\u662f\u4e00\u6b3e\u5f3a\u5927\u7684 OCR \u4efb\u52a1\u5de5\u5177\uff0c\u5c24\u5176\u662f\u5728\u5904\u7406\u4f4e\u8d28\u91cf\u56fe\u50cf\u65f6\u3002\u5176\u6587\u672c\u68c0\u6d4b\u529f\u80fd\u65e8\u5728\u5e94\u5bf9\u5404\u79cd\u6311\u6218\uff0c\u4ece\u503e\u659c\u89d2\u5ea6\u5230\u4e0d\u540c\u7684\u5149\u7167\u6761\u4ef6\u3002<\/p>\n<pre><code>import os\nfrom google.cloud import vision # pip install google-cloud-vision\n\n\n# Put here your credentials json file\n# https:\/\/developers.google.com\/workspace\/guides\/create-credentials?hl=en\nos.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"\"\n\nclient = vision.ImageAnnotatorClient()\n\nwith open(\"image.jpg\", \"rb\") as image_file:\n    content = image_file.read()\n    image = vision.Image(content=content)\n\nresponse = client.text_detection(image=image)\n\nfor annotation in response.text_annotations:\n    print(\"Detected Text:\", annotation.description)<\/code><\/pre>\n<p>\u51ed\u501f\u5176\u5148\u8fdb\u7684\u673a\u5668\u5b66\u4e60\u6a21\u578b\uff0cVision AI \u53ef\u4ee5\u51c6\u786e\u5730\u4ece\u7167\u7247\u3001\u626b\u63cf\u6587\u6863\u751a\u81f3\u6742\u4e71\u7684\u4ea7\u54c1\u5305\u88c5\u4e2d\u8bc6\u522b\u548c\u63d0\u53d6\u6587\u672c\u3002<\/p>\n<h2>4\u3001Azure &#8211; Vision AI<\/h2>\n<p>\u662f\u4e00\u6b3e\u529f\u80fd\u5f3a\u5927\u7684\u5de5\u5177\uff0c\u53ef\u7528\u4e8e\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\uff0c\u63d0\u4f9b\u5f3a\u5927\u7684 OCR \u529f\u80fd\uff0c\u53ef\u5904\u7406\u5404\u79cd\u573a\u666f\u3002<\/p>\n<pre><code># pip install azure-ai-vision-imageanalysis\nfrom azure.ai.vision.imageanalysis import ImageAnalysisClient\nfrom azure.ai.vision.imageanalysis.models import VisualFeatures\nfrom azure.core.credentials import AzureKeyCredential\n\n\n# Generate APIkey - https:\/\/azure.microsoft.com\/en-us\/products\/ai-services\/ai-vision\nAZURE_VISION_API_KEY = \"\"\n\nimage_path = \"image.jpg\"\n\nwith open(image_path, \"rb\") as f:\n    image_data = f.read()\n\n# You need to create an Azure Computer Vision AI services \n# https:\/\/portal.azure.com\/\nregion = \"eastus\"\nendpoint = \"https:\/\/&lt;instance name&gt;.cognitiveservices.azure.com\/\"\n\nclient = ImageAnalysisClient(\n    endpoint=endpoint,\n    credential=AzureKeyCredential(AZURE_VISION_API_KEY),\n    region=region,\n)\n\nresponse = client.analyze(\n    image_data,\n    visual_features=[VisualFeatures.READ],\n)\n\nif response.read is not None:\n    for line in response.read.blocks[0].lines:\n        print(\"Detected Text:\", line.text)<\/code><\/pre>\n<p>\u5176\u5148\u8fdb\u7684\u7b97\u6cd5\u53ef\u4ee5\u5904\u7406\u4e0d\u540c\u8d28\u91cf\u6c34\u5e73\u7684\u56fe\u50cf\uff0c\u4ece\u6e05\u6670\u3001\u5149\u7ebf\u5145\u8db3\u7684\u7167\u7247\u5230\u4f4e\u8d28\u91cf\u3001\u5608\u6742\u7684\u8f93\u5165\u3002<\/p>\n<h2>5\u3001Tesseract &#8211; \u5f00\u6e90\u5e93<\/h2>\n<p> \u662f\u4e00\u4e2a\u5f3a\u5927\u4e14\u591a\u529f\u80fd\u7684\u9009\u9879\uff0c\u5b83\u662f\u514d\u8d39\u7684\u5f00\u6e90\u5e93\uff0c\u53ef\u4f9b\u4e2a\u4eba\u548c\u5546\u4e1a\u4f7f\u7528\u3002\u83b7\u5f97\u66f4\u597d\u7ed3\u679c\u7684\u5173\u952e\u662f\u9884\u5904\u7406\uff0c\u4f8b\u5982\u4e8c\u503c\u5316\uff08\u8f6c\u6362\u4e3a\u9ed1\u767d\uff09\u3001\u964d\u566a\u548c\u5bf9\u6bd4\u5ea6\u8c03\u6574\u53ef\u4ee5\u663e\u8457\u6539\u5584 OCR \u7ed3\u679c\u3002<\/p>\n<pre><code>import pytesseract # pip install pytesseract\nimport cv2 # pip install opencv-contrib-python\n\n\nimage = cv2.imread('image.jpg')\ngray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n\n# Apply dinarization converting the image to black-and-white.\n_, binary = cv2.threshold(gray, 150, 255, cv2.THRESH_BINARY)\n\n# Tesseract allows you to specify the language of the text and configure settings \n# like page segmentation mode (PSM). For low-quality images, \n# using --psm 6 (assume a single uniform block of text) \n# or --psm 11 (sparse text) can yield better results.\nconfig = \"-l por --oem 1 --psm 11\"\ntext = pytesseract.image_to_string(image, config=config)\nprint(text)<\/code><\/pre>\n<p>\u867d\u7136 Tesseract \u529f\u80fd\u5f3a\u5927\uff0c\u4f46\u5e76\u4e0d\u5b8c\u7f8e\u3002\u5b83\u53ef\u80fd\u96be\u4ee5\u5904\u7406\u9ad8\u5ea6\u626d\u66f2\u7684\u6587\u672c\u6216\u590d\u6742\u7684\u5e03\u5c40\u3002\u5728\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u5c06 Tesseract \u4e0e\u5176\u4ed6\u9884\u5904\u7406\u6280\u672f\u751a\u81f3\u81ea\u5b9a\u4e49\u8bad\u7ec3\u76f8\u7ed3\u5408\u53ef\u4ee5\u5e2e\u52a9\u5f25\u8865\u5dee\u8ddd\u3002<\/p>\n<h2>6\u3001EasyOCR &#8211; &nbsp;\u5f00\u6e90\u5e93<\/h2>\n<p>\u5728\u4ece\u4f4e\u8d28\u91cf\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u65b9\u9762\uff0cEasyOCR \u662f\u4e00\u6b3e\u529f\u80fd\u5f3a\u5927\u3001\u65e2\u6613\u4e8e\u8bbf\u95ee\u53c8\u6709\u6548\u7684\u5f00\u6e90\u5de5\u5177\u3002EasyOCR \u57fa\u4e8e PyTorch \u6784\u5efa\uff0c\u652f\u6301 80 \u591a\u79cd\u8bed\u8a00\uff0c\u65e8\u5728\u5904\u7406\u5177\u6709\u6311\u6218\u6027\u7684\u73b0\u5b9e\u573a\u666f\uff0c\u5305\u62ec\u6a21\u7cca\u3001\u503e\u659c\u6216\u5608\u6742\u7684\u56fe\u50cf\u3002<\/p>\n<pre><code>import easyocr # !pip install easyocr\n\n\nreader = easyocr.Reader(['pt'])\nresults = reader.readtext('image.jpg')\n\nfor (bbox, text, confidence) in results:\n    print(f\"Detected text: {text} (Confidence: {confidence:.2f})\")<\/code><\/pre>\n<p>EasyOCR \u5e26\u6709\u9884\u5148\u8bad\u7ec3\u7684\u6a21\u578b\uff0c\u53ef\u7acb\u5373\u4f7f\u7528\uff0c\u975e\u5e38\u9002\u5408\u5feb\u901f\u90e8\u7f72\u4e14\u51c6\u786e\u5ea6\u9ad8\u3002<\/p>\n<h2>7\u3001Surya &#8211; \u5f00\u6e90\u5e93<\/h2>\n<p>\u5f97\u76ca\u4e8e\u5176\u5148\u8fdb\u7684\u9884\u5904\u7406\u6280\u672f\uff0c \u5728\u5904\u7406\u5608\u6742\u3001\u626d\u66f2\u6216\u4f4e\u5206\u8fa8\u7387\u56fe\u50cf\u65b9\u9762\u8868\u73b0\u51fa\u8272\u3002<\/p>\n<pre><code>from PIL import Image # pip install Pillow\n\n# pip install surya-ocr\nfrom surya.recognition import RecognitionPredictor\nfrom surya.detection import DetectionPredictor\n\n\nimage_path = \"image.jpg\"\n\nimage = Image.open(image_path)\n\nlangs = [\"pt\"]\nrecognition_predictor = RecognitionPredictor()\ndetection_predictor = DetectionPredictor()\n\npredictions = recognition_predictor([image], [langs], detection_predictor)\nfor prediction in predictions:\n  for line in prediction.text_lines:\n      print(line.text)<\/code><\/pre>\n<p>\u5b83\u652f\u6301\u591a\u79cd\u8bed\u8a00\u548c\u811a\u672c\uff0c\u4f7f\u5176\u9002\u7528\u4e8e\u5404\u79cd\u7528\u4f8b\u3002Surya \u5229\u7528\u5148\u8fdb\u7684\u673a\u5668\u5b66\u4e60\u6a21\u578b\u51c6\u786e\u8bc6\u522b\u548c\u63d0\u53d6\u6587\u672c\uff0c\u5373\u4f7f\u5728\u6b21\u4f18\u6761\u4ef6\u4e0b\u4e5f\u662f\u5982\u6b64\u3002<\/p>\n<h2>8\u3001DocTR &#8211; \u5f00\u6e90\u5e93<\/h2>\n<p>\u5728\u4ece\u4f4e\u8d28\u91cf\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u65b9\u9762\uff0c\uff08\u6587\u6863\u6587\u672c\u8bc6\u522b\uff09\u8131\u9896\u800c\u51fa\uff0c\u6210\u4e3a\u5f3a\u5927\u7684\u5f00\u6e90\u89e3\u51b3\u65b9\u6848\u3002DocTR \u5efa\u7acb\u5728 TensorFlow \u548c PyTorch \u7b49\u6df1\u5ea6\u5b66\u4e60\u6846\u67b6\u4e4b\u4e0a\uff0c\u5728\u6587\u6863\u7406\u89e3\u4efb\u52a1\u4e2d\u63d0\u4f9b\u6700\u5148\u8fdb\u7684\u6027\u80fd\u3002\u5b83\u914d\u5907\u4e86\u9884\u8bad\u7ec3\u6a21\u578b\uff0c\u80fd\u591f\u8bc6\u522b\u591a\u79cd\u8bed\u8a00\u548c\u4e0d\u540c\u56fe\u50cf\u6761\u4ef6\u4e0b\u7684\u6587\u672c\u3002<\/p>\n<pre><code>from doctr.io import DocumentFile # !pip install \"python-doctr[torch]\"\nfrom doctr.models import ocr_predictor\n\n\nimages_path = \"image.jpg\"\ndoc = DocumentFile.from_images(images_path)\n\nmodel = ocr_predictor(det_arch=\"db_resnet50\", reco_arch=\"crnn_vgg16_bn\", pretrained=True)\n\nresult = model(doc)\n\nfor page in result.pages:\n  for block in page.blocks:\n      for line in block.lines:\n          texts = [word.value for word in line.words]\n          print(texts)<\/code><\/pre>\n<p>DocTR \u7684\u72ec\u7279\u4e4b\u5904\u5728\u4e8e\u5176\u4e24\u6b65\u6d41\u7a0b\uff1a<\/p>\n<ul>\n<li>\u6587\u672c\u68c0\u6d4b\uff1a\u5b83\u9996\u5148\u8bc6\u522b\u56fe\u50cf\u4e2d\u7684\u6587\u672c\u533a\u57df\uff0c\u5373\u4f7f\u5728\u590d\u6742\u7684\u5e03\u5c40\u4e2d\u4e5f\u662f\u5982\u6b64\u3002<\/li>\n<li>\u6587\u672c\u8bc6\u522b\uff1a\u7136\u540e\u5229\u7528\u5728\u4e0d\u540c\u6570\u636e\u96c6\u4e0a\u8bad\u7ec3\u7684\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\uff0c\u89e3\u8bfb\u8fd9\u4e9b\u533a\u57df\u5185\u7684\u6587\u672c\u3002<\/li>\n<\/ul>\n<p>\u8fd9\u79cd\u53cc\u91cd\u65b9\u6cd5\u786e\u4fdd\u4e86\u5f3a\u5927\u7684\u6027\u80fd\uff0c\u7279\u522b\u662f\u5bf9\u4e8e\u4f4e\u8d28\u91cf\u56fe\u50cf\uff0c\u4f20\u7edf\u7684\u4f20\u7edf\u7684 OCR \u5de5\u5177\u7ecf\u5e38\u4f1a\u9047\u5230\u56f0\u96be\u3002<\/p>\n<h2>8\u3001\u7ed3\u675f\u8bed<\/h2>\n<p>\u6211\u4f7f\u7528\u5404\u79cd OCR \u5de5\u5177\u548c\u670d\u52a1\u7684\u7ecf\u5386\u65e2\u6709\u542f\u53d1\u6027\u53c8\u5f88\u5b9e\u7528\uff0c\u8ba9\u6211\u6df1\u5165\u4e86\u89e3\u4e86\u4e0d\u540c\u65b9\u6cd5\u7684\u4f18\u52bf\u548c\u5c40\u9650\u6027\u3002<\/p>\n<p>\u6211\u7684\u6570\u636e\u96c6\u5305\u62ec\u4ea7\u54c1\u5305\u88c5\u7684\u7167\u7247\uff0c\u5177\u6709\u591a\u4e2a\u6587\u672c\u65b9\u5411\u3001\u4e0d\u540c\u7684\u5b57\u4f53\u548c\u989c\u8272\uff0c\u8fd9\u5e26\u6765\u4e86\u6311\u6218\u3002<\/p>\n<p>Google Cloud \u548c Azure \u7684 Vision AI \u670d\u52a1\u5728\u4ece\u4f4e\u8d28\u91cf\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u65b9\u9762\u8868\u73b0\u51fa\u8272\u3002\u8fd9\u4e9b\u662f\u89e3\u51b3\u8fd9\u4e2a\u95ee\u9898\u7684\u6700\u4f73\u5de5\u5177\u3002<\/p>\n<p>Gemini \u5728\u5904\u7406\u590d\u6742\u7684 OCR \u4efb\u52a1\u65b9\u9762\u8868\u73b0\u51fa\u8272\u3002\u8fd9\u4e9b\u5de5\u5177\u4e0e\u7cbe\u5fc3\u5236\u4f5c\u7684\u63d0\u793a\u642d\u914d\u4f7f\u7528\u65f6\u7279\u522b\u6709\u6548\u3002<\/p>\n<p>\u5728\u5f00\u6e90\u65b9\u9762\uff0cDocTR \u63d0\u4f9b\u4e86\u6700\u597d\u7684\u7ed3\u679c\uff0c\u5bf9\u4e8e\u90a3\u4e9b\u613f\u610f\u6295\u5165\u65f6\u95f4\u7684\u4eba\u6765\u8bf4\uff0c\u8fd9\u7c7b\u5de5\u5177\u53ef\u80fd\u662f\u4e00\u79cd\u7ecf\u6d4e\u9ad8\u6548\u4e14\u7528\u9014\u5e7f\u6cdb\u7684\u66ff\u4ee3\u65b9\u6848\u3002<\/p>\n<p>\u8fd9\u4e9b\u7ed3\u679c\u57fa\u4e8e\u6211\u7684\u6570\u636e\u96c6\uff1b\u4f46\u662f\uff0c\u4e0d\u540c\u7684\u6570\u636e\u96c6\u53ef\u80fd\u4f1a\u4ea7\u751f\u4e0d\u540c\u7684\u7ed3\u679c\uff0c\u5c24\u5176\u662f\u5728\u5f00\u6e90\u5de5\u5177\u65b9\u9762\u3002<\/p>\n<p>\u5bf9\u4e8e\u90a3\u4e9b\u6709\u5174\u8da3\u8fdb\u4e00\u6b65\u63a2\u7d22\u7684\u4eba\uff0c\u5b8c\u6574\u7684\u4ee3\u7801\u3001\u4e00\u4e9b\u6d4b\u8bd5\u56fe\u50cf\u548c\u57fa\u51c6\u6d4b\u8bd5\u7ec6\u8282\u53ef\u4ee5\u5728\u8fd9\u4e2a \u4e2d\u67e5\u770b\u3002<\/p>\n<hr>\n","protected":false},"excerpt":{"rendered":"<p>\u5728\u5feb\u8282\u594f\u7684 IT \u4e16\u754c\u4e2d\uff0c\u5149\u5b66\u5b57\u7b26\u8bc6\u522b (OCR) \u5df2\u6210\u4e3a\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u7684\u4e0d\u53ef\u6216\u7f3a\u7684\u5de5\u5177\u3002\u4f46\u662f\uff0c\u5f53\u8fd9\u4e9b\u56fe\u50cf\u8d28\u91cf\u4f4e\u4e0b\u3001\u6a21\u7cca\u6216\u4e0d\u5b8c\u7f8e\u65f6\u4f1a\u53d1\u751f\u4ec0\u4e48\uff1f\u8fd9\u6b63\u662f\u6211\u5728\u5f53\u524d\u5de5\u4f5c\u4e2d\u9762\u4e34\u7684\u6311\u6218\uff0c\u5b83\u4fc3\u4f7f\u6211\u6df1\u5165\u7814\u7a76 OCR \u5de5\u5177\u548c\u670d\u52a1\u7684\u4e16\u754c\u4ee5\u5bfb\u627e\u89e3\u51b3\u65b9\u6848\u3002 \u6211\u63a2\u7d22\u4e86\u5404\u79cd OCR \u5de5\u5177\u548c\u670d\u52a1\uff0c\u5e76\u6839\u636e\u5b83\u4eec\u7684\u7279\u70b9\uff08\u4f8b\u5982\u4ef7\u683c\u3001\u51c6\u786e\u6027\u548c\u5b9e\u65bd\u5de5\u4f5c\u91cf\uff09\u5c06\u5b83\u4eec\u5206\u4e3a\u4e09\u4e2a\u4e0d\u540c\u7684\u7c7b\u522b\uff1a \u751f\u6210\u5f0fAI\uff1a\u5728\u8fd9\u91cc\uff0c\u6211\u7814\u7a76\u4e86 Gemini \u548c OpenAI \u7b49\u5c16\u7aef\u89e3\u51b3\u65b9\u6848\uff0c\u5b83\u4eec\u5229\u7528\u751f\u6210\u6a21\u578b\u7684\u5f3a\u5927\u529f\u80fd\u6765\u89e3\u51b3\u590d\u6742\u7684 OCR \u4efb\u52a1\u3002 \u89c6\u89c9 AI\uff1a\u6b64\u7c7b\u522b\u4e13\u6ce8\u4e8e\u4e13\u95e8\u7684\u8ba1\u7b97\u673a\u89c6\u89c9\u670d\u52a1\uff0c\u65e8\u5728\u7cbe\u786e\u5904\u7406\u57fa\u4e8e\u56fe\u50cf\u7684\u6311\u6218\u5e76\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u7279\u5f81\u3002 \u5f00\u6e90\u5e93\uff1a\u5bf9\u4e8e\u90a3\u4e9b\u5e0c\u671b\u6784\u5efa\u5185\u90e8\u89e3\u51b3\u65b9\u6848\u7684\u4eba\uff0c\u6211\u8bc4\u4f30\u4e86\u53ef\u7528\u7684\u6700\u4f73\u5f00\u6e90\u5de5\u5177\uff0c\u5e73\u8861\u4e86\u7075\u6d3b\u6027\u548c\u63a7\u5236\u529b\u3002 \u5728\u6574\u4e2a\u7814\u7a76\u8fc7\u7a0b\u4e2d\uff0c\u6211\u9488\u5bf9\u4e00\u7ec4\u4f4e\u8d28\u91cf\u56fe\u50cf\u6d4b\u8bd5\u4e86\u8fd9\u4e9b\u5de5\u5177\uff0c\u4ee5\u8bc4\u4f30\u5b83\u4eec\u5728\u5b9e\u9645\u573a\u666f\u4e2d\u7684\u8868\u73b0\u3002\u6211\u5f3a\u70c8\u5efa\u8bae\u4f60\u521b\u5efa\u56fe\u50cf\u6570\u636e\u96c6\uff0c\u4ee5\u786e\u4fdd\u5b83\u6700\u80fd\u6ee1\u8db3\u4f60\u7684\u7279\u5b9a\u9700\u6c42\u548c\u76ee\u6807\u3002 \u5982\u679c\u4f60\u5bf9\u8be6\u7ec6\u4fe1\u606f\u611f\u5230\u597d\u5947\uff0c\u53ef\u4ee5\u5728\u627e\u5230\u7528\u4e8e\u672c\u7814\u7a76\u7684\u5b8c\u6574\u4ee3\u7801\u3002\u6211\u7528\u6765\u6d4b\u8bd5\u89e3\u51b3\u65b9\u6848\u7684\u56fe\u50cf\u53ef\u5728\u83b7\u5f97\uff0c\u57fa\u51c6\u6d4b\u8bd5\u4ee3\u7801\u53ef\u5728\u8bbf\u95ee\u3002 1\u3001OpenAI &#8211; \u751f\u6210\u5f0fAI \u5728\u6211\u63a2\u7d22 OCR \u5de5\u5177\u7684\u8fc7\u7a0b\u4e2d\uff0c\u4e00\u79cd\u6709\u8da3\u7684\u65b9\u6cd5\u662f\u5229\u7528 ChatGPT \u548c\u5b9a\u5236\u63d0\u793a\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u3002 from openai import OpenAI # pip install openai from base64 import b64encode # Generate APIKey https:\/\/platform.openai.com\/api-keys OPENAI_API_KEY = &#8220;&#8221; image_path = &#8220;image.jpg&#8221; with open(image_path, &#8220;rb&#8221;) [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[13],"tags":[],"class_list":["post-53753","post","type-post","status-publish","format-standard","hentry","category-ai"],"_links":{"self":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/posts\/53753","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/comments?post=53753"}],"version-history":[{"count":0,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/posts\/53753\/revisions"}],"wp:attachment":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/media?parent=53753"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/categories?post=53753"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/tags?post=53753"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}