{"id":53781,"date":"2025-02-16T13:03:29","date_gmt":"2025-02-16T05:03:29","guid":{"rendered":"https:\/\/fwq.ai\/blog\/53781\/"},"modified":"2025-02-16T13:03:29","modified_gmt":"2025-02-16T05:03:29","slug":"%e8%92%b8%e9%a6%8fdeepseek-r1%e5%88%b0%e8%87%aa%e5%b7%b1%e7%9a%84%e6%a8%a1%e5%9e%8b","status":"publish","type":"post","link":"https:\/\/fwq.ai\/blog\/53781\/","title":{"rendered":"\u84b8\u998fDeepSeek-R1\u5230\u81ea\u5df1\u7684\u6a21\u578b"},"content":{"rendered":"<p>\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u5f7b\u5e95\u6539\u53d8\u4e86\u4eba\u5de5\u667a\u80fd\u9886\u57df\uff0c\u4f46\u5176\u5e9e\u5927\u7684\u89c4\u6a21\u548c\u8ba1\u7b97\u9700\u6c42\u53ef\u80fd\u4f1a\u6210\u4e3a\u5b9e\u9645\u5e94\u7528\u7684\u74f6\u9888\u3002\u6a21\u578b\u84b8\u998f\u662f\u4e00\u79cd\u5f3a\u5927\u7684\u6280\u672f\uff0c\u5b83\u901a\u8fc7\u5c06\u77e5\u8bc6\u4ece\u5927\u578b\u590d\u6742\u6a21\u578b\uff08\u6559\u5e08\uff09\u8f6c\u79fb\u5230\u8f83\u5c0f\u3001\u66f4\u9ad8\u6548\u7684\u6a21\u578b\uff08\u5b66\u751f\uff09\u6765\u89e3\u51b3\u8fd9\u4e00\u6311\u6218\u3002<\/p>\n<p>\u5728\u672c\u535a\u5ba2\u4e2d\uff0c\u6211\u4eec\u5c06\u4ecb\u7ecd\u5982\u4f55\u4f7f\u7528 LoRA\uff08\u4f4e\u79e9\u81ea\u9002\u5e94\uff09\u7b49\u4e13\u95e8\u6280\u672f\u5c06 DeepSeek-R1 \u7684\u63a8\u7406\u80fd\u529b\u84b8\u998f\u5230\u8f83\u5c0f\u7684\u6a21\u578b\uff08\u5982 Microsoft \u7684 Phi-3-Mini\uff09\u4e2d\u3002<\/p>\n<h2>1\u3001\u4ec0\u4e48\u662f\u84b8\u998f\uff1f<\/h2>\n<p>\u84b8\u998f\u662f\u4e00\u79cd\u673a\u5668\u5b66\u4e60\u6280\u672f\uff0c\u5176\u4e2d\u8f83\u5c0f\u7684\u6a21\u578b\uff08\u201c\u5b66\u751f\u201d\uff09\u7ecf\u8fc7\u8bad\u7ec3\u4ee5\u6a21\u4eff\u8f83\u5927\u7684\u9884\u8bad\u7ec3\u6a21\u578b\uff08\u201c\u8001\u5e08\u201d\uff09\u7684\u884c\u4e3a\u3002\u76ee\u6807\u662f\u4fdd\u7559\u8001\u5e08\u7684\u5927\u90e8\u5206\u8868\u73b0\uff0c\u540c\u65f6\u663e\u7740\u964d\u4f4e\u8ba1\u7b97\u6210\u672c\u548c\u5185\u5b58\u5360\u7528\u3002<\/p>\n<p>\u8fd9\u4e2a\u60f3\u6cd5\u6700\u65e9\u662f\u5728 Geoffrey Hinton \u5173\u4e8e\u77e5\u8bc6\u84b8\u998f\u7684\u5f00\u521b\u6027\u8bba\u6587\u4e2d\u63d0\u51fa\u7684\u3002\u5b83\u4e0d\u662f\u76f4\u63a5\u5728\u539f\u59cb\u6570\u636e\u4e0a\u8bad\u7ec3\u5b66\u751f\u6a21\u578b\uff0c\u800c\u662f\u4ece\u8001\u5e08\u6a21\u578b\u7684\u8f93\u51fa\u6216\u4e2d\u95f4\u8868\u793a\u4e2d\u5b66\u4e60\u3002\u8fd9\u5b9e\u9645\u4e0a\u662f\u53d7\u5230\u4eba\u7c7b\u6559\u80b2\u7684\u542f\u53d1\u3002<\/p>\n<p>\u4e3a\u4ec0\u4e48\u5b83\u5f88\u91cd\u8981\uff1a<\/p>\n<ul>\n<li>\u6210\u672c\u6548\u7387\uff1a\u8f83\u5c0f\u7684\u6a21\u578b\u9700\u8981\u66f4\u5c11\u7684\u8ba1\u7b97\u8d44\u6e90\u3002<\/li>\n<li>\u901f\u5ea6\uff1a\u975e\u5e38\u9002\u5408\u5ef6\u8fdf\u654f\u611f\u7684\u5e94\u7528\u7a0b\u5e8f\uff08\u4f8b\u5982 API\u3001\u8fb9\u7f18\u8bbe\u5907\uff09\u3002<\/li>\n<li>\u4e13\u4e1a\u5316\uff1a\u65e0\u9700\u91cd\u65b0\u8bad\u7ec3\u5de8\u578b\u6a21\u578b\u5373\u53ef\u9488\u5bf9\u7279\u5b9a\u9886\u57df\u5b9a\u5236\u6a21\u578b\u3002<\/li>\n<\/ul>\n<h2>2\u3001\u84b8\u998f\u7c7b\u578b<\/h2>\n<p>\u6a21\u578b\u84b8\u998f\u6709\u51e0\u79cd\u65b9\u6cd5\uff0c\u6bcf\u79cd\u65b9\u6cd5\u90fd\u6709\u5404\u81ea\u7684\u4f18\u70b9\uff1a<\/p>\n<p>\u6570\u636e\u84b8\u998f\uff1a<\/p>\n<ul>\n<li>\u5728\u6570\u636e\u84b8\u998f\u4e2d\uff0c\u6559\u5e08\u6a21\u578b\u751f\u6210\u5408\u6210\u6570\u636e\u6216\u4f2a\u6807\u7b7e\uff0c\u7136\u540e\u7528\u4e8e\u8bad\u7ec3\u5b66\u751f\u6a21\u578b\u3002<\/li>\n<li>\u8fd9\u79cd\u65b9\u6cd5\u53ef\u4ee5\u5e94\u7528\u4e8e\u5e7f\u6cdb\u7684\u4efb\u52a1\uff0c\u5373\u4f7f\u662f\u90a3\u4e9b logits \u4fe1\u606f\u91cf\u8f83\u5c11\u7684\u4efb\u52a1\uff08\u4f8b\u5982\u5f00\u653e\u5f0f\u63a8\u7406\u4efb\u52a1\uff09\u3002<\/li>\n<\/ul>\n<p>Logits\u84b8\u998f\uff1a<\/p>\n<ul>\n<li>Logits \u662f\u5e94\u7528 softmax \u51fd\u6570\u4e4b\u524d\u795e\u7ecf\u7f51\u7edc\u7684\u539f\u59cb\u8f93\u51fa\u5206\u6570\u3002<\/li>\n<li>\u5728 logits\u84b8\u998f\u4e2d\uff0c\u5b66\u751f\u6a21\u578b\u7ecf\u8fc7\u8bad\u7ec3\u4ee5\u5339\u914d\u6559\u5e08\u7684 logits\uff0c\u800c\u4e0d\u4ec5\u4ec5\u662f\u6700\u7ec8\u9884\u6d4b\u3002<\/li>\n<li>\u8fd9\u79cd\u65b9\u6cd5\u4fdd\u7559\u4e86\u66f4\u591a\u5173\u4e8e\u6559\u5e08\u4fe1\u5fc3\u6c34\u5e73\u548c\u51b3\u7b56\u8fc7\u7a0b\u7684\u4fe1\u606f\u3002<\/li>\n<\/ul>\n<p>\u7279\u5f81\u84b8\u998f\uff1a<\/p>\n<ul>\n<li>\u7279\u5f81\u63d0\u70bc\u6d89\u53ca\u5c06\u77e5\u8bc6\u4ece\u6559\u5e08\u6a21\u578b\u7684\u4e2d\u95f4\u5c42\u8f6c\u79fb\u5230\u5b66\u751f\u3002<\/li>\n<li>\u901a\u8fc7\u5bf9\u9f50\u4e24\u4e2a\u6a21\u578b\u7684\u9690\u85cf\u8868\u793a\uff0c\u5b66\u751f\u53ef\u4ee5\u5b66\u4e60\u66f4\u4e30\u5bcc\u3001\u66f4\u62bd\u8c61\u7684\u7279\u5f81\u3002<\/li>\n<\/ul>\n<h2>3\u3001Deepseek \u7684\u84b8\u998f\u6a21\u578b<\/h2>\n<p>\u4e3a\u4e86\u4f7f\u8bbf\u95ee\u66f4\u52a0\u6c11\u4e3b\u5316\uff0cDeepSeek AI \u53d1\u5e03\u4e86\u57fa\u4e8e Qwen\uff08Qwen\uff0c2024b\uff09\u548c Llama\uff08AI@Meta\uff0c2024\uff09\u7b49\u6d41\u884c\u67b6\u6784\u7684\u516d\u4e2a\u84b8\u998f\u53d8\u4f53\u3002\u4ed6\u4eec\u4f7f\u7528 DeepSeek-R1 \u7b56\u5212\u7684 800k \u4e2a\u6837\u672c\u76f4\u63a5\u5fae\u8c03\u5f00\u6e90\u6a21\u578b\u3002<\/p>\n<p>\u5c3d\u7ba1\u6bd4 DeepSeek-R1 \u5c0f\u5f97\u591a\uff0c\u4f46\u84b8\u998f\u6a21\u578b\u5728\u5404\u79cd\u57fa\u51c6\u6d4b\u8bd5\u4e2d\u90fd\u8868\u73b0\u51fa\u8272\uff0c\u901a\u5e38\u53ef\u4ee5\u5339\u654c\u751a\u81f3\u8d85\u8d8a\u66f4\u5927\u6a21\u578b\u7684\u80fd\u529b\u3002\u5982\u4e0b\u56fe\u6240\u793a<\/p>\n<p>  Deepseek \u63d0\u70bc\u6a21\u578b\u57fa\u51c6\u6d4b\u8bd5\uff08 <\/p>\n<h2>4\u3001\u4e3a\u4ec0\u4e48\u8981\u84b8\u998f\u81ea\u5df1\u7684\u6a21\u578b\uff1f<\/h2>\n<ul>\n<li>\u7279\u5b9a\u4efb\u52a1\u4f18\u5316<\/li>\n<\/ul>\n<p>\u9884\u84b8\u998f\u6a21\u578b\u5728\u5e7f\u6cdb\u7684\u6570\u636e\u96c6\u4e0a\u8fdb\u884c\u8bad\u7ec3\uff0c\u4ee5\u5728\u5404\u79cd\u4efb\u52a1\u4e2d\u8868\u73b0\u826f\u597d\u3002\u7136\u800c\uff0c\u73b0\u5b9e\u4e16\u754c\u7684\u5e94\u7528\u7a0b\u5e8f\u901a\u5e38\u9700\u8981\u4e13\u4e1a\u5316\u3002<\/p>\n<p>\u793a\u4f8b\u573a\u666f\uff1a\u4f60\u6b63\u5728\u6784\u5efa\u4e00\u4e2a\u91d1\u878d\u9884\u6d4b\u804a\u5929\u673a\u5668\u4eba\u3002\u5728\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u4f7f\u7528 DeepSeek-R1 \u4e3a\u91d1\u878d\u6570\u636e\u96c6\u751f\u6210\u63a8\u7406\u8f68\u8ff9\uff08\u4f8b\u5982\uff0c\u80a1\u7968\u4ef7\u683c\u9884\u6d4b\u3001\u98ce\u9669\u5206\u6790\uff09\uff0c\u5e76\u5c06\u8fd9\u4e9b\u77e5\u8bc6\u84b8\u998f\u6210\u4e00\u4e2a\u5df2\u7ecf\u4e86\u89e3\u91d1\u878d\u7ec6\u5fae\u5dee\u522b\u7684\u8f83\u5c0f\u6a21\u578b\uff08\u4f8b\u5982\uff1afinance-LLM\uff09\u3002<\/p>\n<ul>\n<li>\u5927\u89c4\u6a21\u6210\u672c\u6548\u7387<\/li>\n<\/ul>\n<p>\u867d\u7136\u9884\u84b8\u998f\u6a21\u578b\u6548\u7387\u5f88\u9ad8\uff0c\u4f46\u5b83\u4eec\u53ef\u80fd\u4ecd\u7136\u4e0d\u9002\u5408\u4f60\u7684\u7279\u5b9a\u5de5\u4f5c\u91cf\u3002\u84b8\u998f\u4f60\u81ea\u5df1\u7684\u6a21\u578b\u53ef\u4ee5\u8ba9\u4f60\u9488\u5bf9\u786e\u5207\u7684\u8d44\u6e90\u9650\u5236\u8fdb\u884c\u4f18\u5316\u3002<\/p>\n<ul>\n<li>\u57fa\u51c6\u6027\u80fd \u2260 \u771f\u5b9e\u4e16\u754c\u6027\u80fd<\/li>\n<\/ul>\n<p>\u9884\u84b8\u998f\u6a21\u578b\u5728\u57fa\u51c6\u6d4b\u8bd5\u4e2d\u8868\u73b0\u51fa\u8272\uff0c\u4f46\u57fa\u51c6\u6d4b\u8bd5\u901a\u5e38\u4e0d\u80fd\u4ee3\u8868\u771f\u5b9e\u4e16\u754c\u7684\u4efb\u52a1\u3002\u56e0\u6b64\uff0c\u4f60\u901a\u5e38\u9700\u8981\u4e00\u4e2a\u5728\u771f\u5b9e\u4e16\u754c\u573a\u666f\u4e2d\u8868\u73b0\u6bd4\u4efb\u4f55\u9884\u84b8\u998f\u6a21\u578b\u90fd\u66f4\u597d\u7684\u6a21\u578b\u3002<\/p>\n<ul>\n<li>\u8fed\u4ee3\u6539\u8fdb<\/li>\n<\/ul>\n<p>\u9884\u84b8\u998f\u6a21\u578b\u662f\u9759\u6001\u7684\u2014\u2014\u5b83\u4eec\u4e0d\u4f1a\u968f\u7740\u65f6\u95f4\u7684\u63a8\u79fb\u800c\u6539\u8fdb\u3002\u901a\u8fc7\u84b8\u998f\u81ea\u5df1\u7684\u6a21\u578b\uff0c\u4f60\u53ef\u4ee5\u5728\u65b0\u6570\u636e\u53ef\u7528\u65f6\u4e0d\u65ad\u5b8c\u5584\u5b83\u3002<\/p>\n<h2>5\u3001\u5c06 DeepSeek-R1 \u77e5\u8bc6\u84b8\u998f\u6210\u81ea\u5b9a\u4e49\u5c0f\u6a21\u578b<\/h2>\n<p>\u9996\u5148\u5b89\u88c5\u5e93\uff1a<\/p>\n<pre><code>pip install -q torch transformers datasets accelerate bitsandbytes flash-attn --no-build-isolation<\/code><\/pre>\n<h3>5.1 \u751f\u6210\u548c\u683c\u5f0f\u5316\u6570\u636e\u96c6<\/h3>\n<p>\u4f60\u53ef\u4ee5\u901a\u8fc7\u5728\u4f60\u7684\u73af\u5883\u4e2d\u4f7f\u7528 ollama \u6216\u4efb\u4f55\u5176\u4ed6\u90e8\u7f72\u6846\u67b6\u90e8\u7f72 deepseek-r1 \u6765\u751f\u6210\u81ea\u5b9a\u4e49\u57df\u76f8\u5173\u6570\u636e\u96c6\u3002\u4f46\u662f\uff0c\u5bf9\u4e8e\u672c\u6559\u7a0b\uff0c\u6211\u4eec\u5c06\u4f7f\u7528 Magpie-Reasoning-V2 \u6570\u636e\u96c6\uff0c\u5176\u4e2d\u5305\u542b DeepSeek-R1 \u751f\u6210\u7684 250K \u601d\u8def\u94fe (CoT) \u63a8\u7406\u6837\u672c\uff0c\u8fd9\u4e9b\u793a\u4f8b\u6db5\u76d6\u4e86\u6570\u5b66\u63a8\u7406\u3001\u7f16\u7801\u548c\u4e00\u822c\u95ee\u9898\u89e3\u51b3\u7b49\u5404\u79cd\u4efb\u52a1\u3002<\/p>\n<blockquote><p>\n  \u6570\u636e\u96c6\u7ed3\u6784\n<\/p><\/blockquote>\n<p>\u6bcf\u4e2a\u793a\u4f8b\u5305\u62ec\uff1a<\/p>\n<ul>\n<li>\u6307\u4ee4\uff1a\u4efb\u52a1\u63cf\u8ff0\uff08\u4f8b\u5982\uff0c\u201c\u89e3\u51b3\u8fd9\u4e2a\u6570\u5b66\u95ee\u9898\u201d\uff09\u3002<\/li>\n<li>\u54cd\u5e94\uff1aDeepSeek-R1 \u7684\u5206\u6b65\u63a8\u7406 (CoT)\u3002<\/li>\n<\/ul>\n<p>\u793a\u4f8b\uff1a<\/p>\n<pre><code>{\n  \"instruction\": \"Solve for x: 2x + 5 = 15\",\n  \"response\": \"&lt;think&gt;First, subtract 5 from both sides: 2x = 10. Then, divide by 2: x = 5.&lt;\/think&gt;\"\n}<\/code><\/pre>\n<pre><code>from datasets import load_dataset\n\n# Load the dataset\ndataset = load_dataset(\"Magpie-Align\/Magpie-Reasoning-V2-250K-CoT-Deepseek-R1-Llama-70B\", token=\"YOUR_HF_TOKEN\")\ndataset = dataset[\"train\"]\n\n# Format the dataset\ndef format_instruction(example):\n    return {\n        \"text\": (\n            \"&lt;|user|&gt;\\n\"\n            f\"{example['instruction']}\\n\"\n            \"&lt;|end|&gt;\\n\"\n            \"&lt;|assistant|&gt;\\n\"\n            f\"{example['response']}\\n\"\n            \"&lt;|end|&gt;\"\n        )\n    }\n\nformatted_dataset = dataset.map(format_instruction, batched=False, remove_columns=subset_dataset.column_names)\nformatted_dataset = formatted_dataset.train_test_split(test_size=0.1)  # 90-10 train-test split<\/code><\/pre>\n<p>\u5c06\u6570\u636e\u96c6\u6784\u9020\u4e3a Phi-3 \u7684\u804a\u5929\u6a21\u677f\u683c\u5f0f\uff1a<\/p>\n<ul>\n<li><code>&lt;|user|&gt;<\/code>\uff1a\u6807\u8bb0\u7528\u6237\u67e5\u8be2\u7684\u5f00\u59cb\u3002<\/li>\n<li><code>&lt;|assistant|&gt;<\/code>\uff1a\u6807\u8bb0\u6a21\u578b\u54cd\u5e94\u7684\u5f00\u59cb\u3002<\/li>\n<li><code>&lt;|end|&gt;<\/code>\uff1a\u6807\u8bb0\u56de\u5408\u7684\u7ed3\u675f\u3002<\/li>\n<\/ul>\n<p>\u6bcf\u4e2a LLM \u90fd\u4f7f\u7528\u7279\u5b9a\u683c\u5f0f\u6765\u6267\u884c\u6307\u4ee4\u8ddf\u8e2a\u4efb\u52a1\u3002\u5c06\u6570\u636e\u96c6\u4e0e\u6b64\u7ed3\u6784\u5bf9\u9f50\u53ef\u786e\u4fdd\u6a21\u578b\u5b66\u4e60\u6b63\u786e\u7684\u5bf9\u8bdd\u6a21\u5f0f\u3002\u56e0\u6b64\uff0c\u8bf7\u786e\u4fdd\u6839\u636e\u8981\u63d0\u53d6\u7684\u6a21\u578b\u683c\u5f0f\u5316\u6570\u636e\u3002<\/p>\n<h3>5.2 \u52a0\u8f7d\u6a21\u578b\u548c\u6807\u8bb0\u5668<\/h3>\n<p>\u5411\u6807\u8bb0\u5668\u6dfb\u52a0\u7279\u6b8a\u6807\u8bb0 <code>&lt;think&gt;<\/code> \u548c <code>&lt;\/think&gt;<\/code> \u3002<\/p>\n<p>\u4e3a\u4e86\u589e\u5f3a\u6a21\u578b\u7684\u63a8\u7406\u80fd\u529b\uff0c\u6211\u4eec\u5f15\u5165\u4e86\u8fd9\u4e9b\u6807\u8bb0\u3002<\/p>\n<ul>\n<li><code>&lt;think&gt;<\/code>\uff1a\u6807\u8bb0\u63a8\u7406\u7684\u5f00\u59cb\u3002<\/li>\n<li><code>&lt;\/think&gt;<\/code>\uff1a\u6807\u8bb0\u63a8\u7406\u7684\u7ed3\u675f\u3002<\/li>\n<\/ul>\n<p>\u8fd9\u4e9b\u6807\u8bb0\u5e2e\u52a9\u6a21\u578b\u5b66\u4e60\u751f\u6210\u7ed3\u6784\u5316\u7684\u3001\u5206\u6b65\u7684\u89e3\u51b3\u65b9\u6848\u3002<\/p>\n<pre><code>from transformers import AutoTokenizer, AutoModelForCausalLM\n\nmodel_id = \"microsoft\/phi-3-mini-4k-instruct\"\ntokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)\n\n# Add custom tokens\nCUSTOM_TOKENS = [\"&lt;think&gt;\", \"&lt;\/think&gt;\"]\ntokenizer.add_special_tokens({\"additional_special_tokens\": CUSTOM_TOKENS})\ntokenizer.pad_token = tokenizer.eos_token\n\n# Load model with flash attention\nmodel = AutoModelForCausalLM.from_pretrained(\n    model_id,\n    trust_remote_code=True,\n    device_map=\"auto\",\n    torch_dtype=torch.float16,\n    attn_implementation=\"flash_attention_2\"\n)\nmodel.resize_token_embeddings(len(tokenizer))  # Resize for custom tokens<\/code><\/pre>\n<h3>5.3 \u914d\u7f6e LoRA \u4ee5\u5b9e\u73b0\u9ad8\u6548\u5fae\u8c03<\/h3>\n<p>LoRA \u901a\u8fc7\u51bb\u7ed3\u57fa\u7840\u6a21\u578b\u5e76\u4ec5\u8bad\u7ec3\u5c0f\u578b\u9002\u914d\u5668\u5c42\u6765\u51cf\u5c11\u5185\u5b58\u4f7f\u7528\u91cf\u3002<\/p>\n<pre><code>from peft import LoraConfig\n\npeft_config = LoraConfig(\n    r=8,  # Rank of the low-rank matrices\n    lora_alpha=16,  # Scaling factor\n    lora_dropout=0.2,  # Dropout rate\n    target_modules=[\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\"],  # Target attention layers\n    bias=\"none\",  # No bias terms\n    task_type=\"CAUSAL_LM\"  # Task type\n)<\/code><\/pre>\n<h3>5.4 \u8bbe\u7f6e\u8bad\u7ec3\u53c2\u6570<\/h3>\n<pre><code>from transformers import TrainingArguments\n\ntraining_args = TrainingArguments(\n    output_dir=\".\/phi-3-deepseek-finetuned\",\n    num_train_epochs=3,\n    per_device_train_batch_size=2,\n    per_device_eval_batch_size=2,\n    gradient_accumulation_steps=4,\n    eval_strategy=\"epoch\",\n    save_strategy=\"epoch\",\n    logging_strategy=\"steps\",\n    logging_steps=50,\n    learning_rate=2e-5,\n    fp16=True,\n    optim=\"paged_adamw_32bit\",\n    max_grad_norm=0.3,\n    warmup_ratio=0.03,\n    lr_scheduler_type=\"cosine\"\n)<\/code><\/pre>\n<h3>5.5 \u8bad\u7ec3\u6a21\u578b<\/h3>\n<p><code>SFTTrainer<\/code> \u7b80\u5316\u4e86\u6307\u4ee4\u8ddf\u968f\u6a21\u578b\u7684\u76d1\u7763\u5fae\u8c03\u3002 <code>data_collat\u200b\u200bor<\/code> \u6279\u91cf\u5904\u7406\u793a\u4f8b\uff0c <code>peft_config<\/code> \u652f\u6301\u57fa\u4e8e LoRA \u7684\u8bad\u7ec3\u3002<\/p>\n<pre><code>from trl import SFTTrainer\nfrom transformers import DataCollatorForLanguageModeling\n\n# Data collator\ndata_collator = DataCollatorForLanguageModeling(tokenizer=tokenizer, mlm=False)\n\n# Trainer\ntrainer = SFTTrainer(\n    model=model,\n    args=training_args,\n    train_dataset=formatted_dataset[\"train\"],\n    eval_dataset=formatted_dataset[\"test\"],\n    data_collator=data_collator,\n    peft_config=peft_config\n)\n\n# Start training\ntrainer.train()\ntrainer.save_model(\".\/phi-3-deepseek-finetuned\")\ntokenizer.save_pretrained(\".\/phi-3-deepseek-finetuned\")<\/code><\/pre>\n<h3>5.6 \u5408\u5e76\u4fdd\u5b58\u6700\u7ec8\u6a21\u578b<\/h3>\n<p>\u8bad\u7ec3\u540e\uff0c\u5fc5\u987b\u5c06 LoRA \u9002\u914d\u5668\u4e0e\u57fa\u7840\u6a21\u578b\u5408\u5e76\u4ee5\u8fdb\u884c\u63a8\u7406\u3002\u6b64\u6b65\u9aa4\u786e\u4fdd\u6a21\u578b\u53ef\u4ee5\u5728\u6ca1\u6709 PEFT \u7684\u60c5\u51b5\u4e0b\u72ec\u7acb\u4f7f\u7528\u3002<\/p>\n<pre><code>final_model = trainer.model.merge_and_unload()\nfinal_model.save_pretrained(\".\/phi-3-deepseek-finetuned-final\")\ntokenizer.save_pretrained(\".\/phi-3-deepseek-finetuned-final\")<\/code><\/pre>\n<h3>5.7 \u63a8\u7406<\/h3>\n<pre><code>from transformers import pipeline\n\n# Load fine-tuned model\nmodel = AutoModelForCausalLM.from_pretrained(\n    \".\/phi-3-deepseek-finetuned-final\",\n    device_map=\"auto\",\n    torch_dtype=torch.float16\n)\n\ntokenizer = AutoTokenizer.from_pretrained(\".\/phi-3-deepseek-finetuned-final\")\nmodel.resize_token_embeddings(len(tokenizer))\n\n# Create chat pipeline\nchat_pipeline = pipeline(\n    \"text-generation\",\n    model=model,\n    tokenizer=tokenizer,\n    device_map=\"auto\"\n)\n\n# Generate response\nprompt = \"\"\"&lt;|user|&gt;\nWhat's the probability of rolling a 7 with two dice?\n&lt;|end|&gt;\n&lt;|assistant|&gt;\n\"\"\"\n\noutput = chat_pipeline(\n    prompt,\n    max_new_tokens=5000,\n    temperature=0.7,\n    do_sample=True,\n    eos_token_id=tokenizer.eos_token_id\n)\n\nprint(output[0]['generated_text'])<\/code><\/pre>\n<p>\u4e0b\u9762\u4f60\u53ef\u4ee5\u770b\u5230 phi \u6a21\u578b\u5728\u84b8\u998f\u524d\u540e\u7684\u54cd\u5e94\u3002<\/p>\n<blockquote><p>\n  \u95ee\u9898\uff1a\u7528\u4e24\u4e2a\u9ab0\u5b50\u63b7\u51fa 7 \u7684\u6982\u7387\u662f\u591a\u5c11\uff1f\n<\/p><\/blockquote>\n<ul>\n<li>\u84b8\u998f\u524d\u7684\u63a8\u7406<\/li>\n<\/ul>\n<p>\u54cd\u5e94\u7b80\u5355\u660e\u4e86\u3002\u5b83\u76f4\u63a5\u63d0\u4f9b\u4e86\u8ba1\u7b97\u7b54\u6848\u7684\u6b65\u9aa4\u3002<\/p>\n<p>  \u84b8\u998f\u524d\u7684 Phi \u63a8\u7406 <\/p>\n<ul>\n<li>\u84b8\u998f\u540e\u7684\u63a8\u7406<\/li>\n<\/ul>\n<p>\u84b8\u998f\u540e\u7684\u54cd\u5e94\u5f15\u5165\u4e86\u4e00\u79cd\u66f4\u8be6\u7ec6\u548c\u7ed3\u6784\u5316\u7684\u65b9\u6cd5\uff0c\u5305\u62ec\u4e00\u4e2a\u660e\u786e\u7684\u201c\u601d\u8003\u201d\u90e8\u5206\uff0c\u6982\u8ff0\u4e86\u601d\u7ef4\u8fc7\u7a0b\u548c\u63a8\u7406\uff0c\u8fd9\u5bf9\u4e8e\u4e3a\u590d\u6742\u95ee\u9898\u751f\u6210\u51c6\u786e\u7684\u54cd\u5e94\u975e\u5e38\u6709\u5e2e\u52a9\u3002<\/p>\n<p>  \u84b8\u998f\u540e\u7684 Phi \u63a8\u7406 <\/p>\n<p>\u6700\u540e\uff0c\u5c06\u84b8\u998f\u540e\u7684\u6a21\u578b\u6743\u91cd\u63a8\u9001\u5230 \uff08repo_id\uff1a <code>GPD1\/DeepSeek-R1-Distill-phi-3-mini-4k-lorar8-alpha16\u201350000samples<\/code>\uff09\u3002<\/p>\n<hr>\n","protected":false},"excerpt":{"rendered":"<p>\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u5f7b\u5e95\u6539\u53d8\u4e86\u4eba\u5de5\u667a\u80fd\u9886\u57df\uff0c\u4f46\u5176\u5e9e\u5927\u7684\u89c4\u6a21\u548c\u8ba1\u7b97\u9700\u6c42\u53ef\u80fd\u4f1a\u6210\u4e3a\u5b9e\u9645\u5e94\u7528\u7684\u74f6\u9888\u3002\u6a21\u578b\u84b8\u998f\u662f\u4e00\u79cd\u5f3a\u5927\u7684\u6280\u672f\uff0c\u5b83\u901a\u8fc7\u5c06\u77e5\u8bc6\u4ece\u5927\u578b\u590d\u6742\u6a21\u578b\uff08\u6559\u5e08\uff09\u8f6c\u79fb\u5230\u8f83\u5c0f\u3001\u66f4\u9ad8\u6548\u7684\u6a21\u578b\uff08\u5b66\u751f\uff09\u6765\u89e3\u51b3\u8fd9\u4e00\u6311\u6218\u3002 \u5728\u672c\u535a\u5ba2\u4e2d\uff0c\u6211\u4eec\u5c06\u4ecb\u7ecd\u5982\u4f55\u4f7f\u7528 LoRA\uff08\u4f4e\u79e9\u81ea\u9002\u5e94\uff09\u7b49\u4e13\u95e8\u6280\u672f\u5c06 DeepSeek-R1 \u7684\u63a8\u7406\u80fd\u529b\u84b8\u998f\u5230\u8f83\u5c0f\u7684\u6a21\u578b\uff08\u5982 Microsoft \u7684 Phi-3-Mini\uff09\u4e2d\u3002 1\u3001\u4ec0\u4e48\u662f\u84b8\u998f\uff1f \u84b8\u998f\u662f\u4e00\u79cd\u673a\u5668\u5b66\u4e60\u6280\u672f\uff0c\u5176\u4e2d\u8f83\u5c0f\u7684\u6a21\u578b\uff08\u201c\u5b66\u751f\u201d\uff09\u7ecf\u8fc7\u8bad\u7ec3\u4ee5\u6a21\u4eff\u8f83\u5927\u7684\u9884\u8bad\u7ec3\u6a21\u578b\uff08\u201c\u8001\u5e08\u201d\uff09\u7684\u884c\u4e3a\u3002\u76ee\u6807\u662f\u4fdd\u7559\u8001\u5e08\u7684\u5927\u90e8\u5206\u8868\u73b0\uff0c\u540c\u65f6\u663e\u7740\u964d\u4f4e\u8ba1\u7b97\u6210\u672c\u548c\u5185\u5b58\u5360\u7528\u3002 \u8fd9\u4e2a\u60f3\u6cd5\u6700\u65e9\u662f\u5728 Geoffrey Hinton \u5173\u4e8e\u77e5\u8bc6\u84b8\u998f\u7684\u5f00\u521b\u6027\u8bba\u6587\u4e2d\u63d0\u51fa\u7684\u3002\u5b83\u4e0d\u662f\u76f4\u63a5\u5728\u539f\u59cb\u6570\u636e\u4e0a\u8bad\u7ec3\u5b66\u751f\u6a21\u578b\uff0c\u800c\u662f\u4ece\u8001\u5e08\u6a21\u578b\u7684\u8f93\u51fa\u6216\u4e2d\u95f4\u8868\u793a\u4e2d\u5b66\u4e60\u3002\u8fd9\u5b9e\u9645\u4e0a\u662f\u53d7\u5230\u4eba\u7c7b\u6559\u80b2\u7684\u542f\u53d1\u3002 \u4e3a\u4ec0\u4e48\u5b83\u5f88\u91cd\u8981\uff1a \u6210\u672c\u6548\u7387\uff1a\u8f83\u5c0f\u7684\u6a21\u578b\u9700\u8981\u66f4\u5c11\u7684\u8ba1\u7b97\u8d44\u6e90\u3002 \u901f\u5ea6\uff1a\u975e\u5e38\u9002\u5408\u5ef6\u8fdf\u654f\u611f\u7684\u5e94\u7528\u7a0b\u5e8f\uff08\u4f8b\u5982 API\u3001\u8fb9\u7f18\u8bbe\u5907\uff09\u3002 \u4e13\u4e1a\u5316\uff1a\u65e0\u9700\u91cd\u65b0\u8bad\u7ec3\u5de8\u578b\u6a21\u578b\u5373\u53ef\u9488\u5bf9\u7279\u5b9a\u9886\u57df\u5b9a\u5236\u6a21\u578b\u3002 2\u3001\u84b8\u998f\u7c7b\u578b \u6a21\u578b\u84b8\u998f\u6709\u51e0\u79cd\u65b9\u6cd5\uff0c\u6bcf\u79cd\u65b9\u6cd5\u90fd\u6709\u5404\u81ea\u7684\u4f18\u70b9\uff1a \u6570\u636e\u84b8\u998f\uff1a \u5728\u6570\u636e\u84b8\u998f\u4e2d\uff0c\u6559\u5e08\u6a21\u578b\u751f\u6210\u5408\u6210\u6570\u636e\u6216\u4f2a\u6807\u7b7e\uff0c\u7136\u540e\u7528\u4e8e\u8bad\u7ec3\u5b66\u751f\u6a21\u578b\u3002 \u8fd9\u79cd\u65b9\u6cd5\u53ef\u4ee5\u5e94\u7528\u4e8e\u5e7f\u6cdb\u7684\u4efb\u52a1\uff0c\u5373\u4f7f\u662f\u90a3\u4e9b logits \u4fe1\u606f\u91cf\u8f83\u5c11\u7684\u4efb\u52a1\uff08\u4f8b\u5982\u5f00\u653e\u5f0f\u63a8\u7406\u4efb\u52a1\uff09\u3002 Logits\u84b8\u998f\uff1a Logits \u662f\u5e94\u7528 softmax \u51fd\u6570\u4e4b\u524d\u795e\u7ecf\u7f51\u7edc\u7684\u539f\u59cb\u8f93\u51fa\u5206\u6570\u3002 \u5728 logits\u84b8\u998f\u4e2d\uff0c\u5b66\u751f\u6a21\u578b\u7ecf\u8fc7\u8bad\u7ec3\u4ee5\u5339\u914d\u6559\u5e08\u7684 logits\uff0c\u800c\u4e0d\u4ec5\u4ec5\u662f\u6700\u7ec8\u9884\u6d4b\u3002 \u8fd9\u79cd\u65b9\u6cd5\u4fdd\u7559\u4e86\u66f4\u591a\u5173\u4e8e\u6559\u5e08\u4fe1\u5fc3\u6c34\u5e73\u548c\u51b3\u7b56\u8fc7\u7a0b\u7684\u4fe1\u606f\u3002 \u7279\u5f81\u84b8\u998f\uff1a \u7279\u5f81\u63d0\u70bc\u6d89\u53ca\u5c06\u77e5\u8bc6\u4ece\u6559\u5e08\u6a21\u578b\u7684\u4e2d\u95f4\u5c42\u8f6c\u79fb\u5230\u5b66\u751f\u3002 \u901a\u8fc7\u5bf9\u9f50\u4e24\u4e2a\u6a21\u578b\u7684\u9690\u85cf\u8868\u793a\uff0c\u5b66\u751f\u53ef\u4ee5\u5b66\u4e60\u66f4\u4e30\u5bcc\u3001\u66f4\u62bd\u8c61\u7684\u7279\u5f81\u3002 3\u3001Deepseek \u7684\u84b8\u998f\u6a21\u578b \u4e3a\u4e86\u4f7f\u8bbf\u95ee\u66f4\u52a0\u6c11\u4e3b\u5316\uff0cDeepSeek AI \u53d1\u5e03\u4e86\u57fa\u4e8e Qwen\uff08Qwen\uff0c2024b\uff09\u548c Llama\uff08AI@Meta\uff0c2024\uff09\u7b49\u6d41\u884c\u67b6\u6784\u7684\u516d\u4e2a\u84b8\u998f\u53d8\u4f53\u3002\u4ed6\u4eec\u4f7f\u7528 DeepSeek-R1 \u7b56\u5212\u7684 800k \u4e2a\u6837\u672c\u76f4\u63a5\u5fae\u8c03\u5f00\u6e90\u6a21\u578b\u3002 \u5c3d\u7ba1\u6bd4 DeepSeek-R1 \u5c0f\u5f97\u591a\uff0c\u4f46\u84b8\u998f\u6a21\u578b\u5728\u5404\u79cd\u57fa\u51c6\u6d4b\u8bd5\u4e2d\u90fd\u8868\u73b0\u51fa\u8272\uff0c\u901a\u5e38\u53ef\u4ee5\u5339\u654c\u751a\u81f3\u8d85\u8d8a\u66f4\u5927\u6a21\u578b\u7684\u80fd\u529b\u3002\u5982\u4e0b\u56fe\u6240\u793a Deepseek \u63d0\u70bc\u6a21\u578b\u57fa\u51c6\u6d4b\u8bd5\uff08 4\u3001\u4e3a\u4ec0\u4e48\u8981\u84b8\u998f\u81ea\u5df1\u7684\u6a21\u578b\uff1f [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[13],"tags":[],"class_list":["post-53781","post","type-post","status-publish","format-standard","hentry","category-ai"],"_links":{"self":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/posts\/53781","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/comments?post=53781"}],"version-history":[{"count":0,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/posts\/53781\/revisions"}],"wp:attachment":[{"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/media?parent=53781"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/categories?post=53781"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/fwq.ai\/blog\/wp-json\/wp\/v2\/tags?post=53781"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}