{"id":20703,"date":"2026-05-04T12:47:23","date_gmt":"2026-05-04T04:47:23","guid":{"rendered":"https:\/\/92it.top\/?p=20703"},"modified":"2026-05-04T12:47:23","modified_gmt":"2026-05-04T04:47:23","slug":"qwen3-5-0-8b-2b-4b-9b-%e5%b0%8f%e6%a8%a1%e5%9e%8b%e6%9c%ac%e5%9c%b0%e9%83%a8%e7%bd%b2%e6%8c%87%e5%8d%97%ef%bc%8c%e5%be%ae%e8%b0%83%e6%95%99%e7%a8%8b","status":"publish","type":"post","link":"https:\/\/92it.top\/?p=20703","title":{"rendered":"Qwen3.5 0.8B\/2B\/4B\/9B \u5c0f\u6a21\u578b\u672c\u5730\u90e8\u7f72\u6307\u5357\uff0c\u5fae\u8c03\u6559\u7a0b"},"content":{"rendered":"\n<p>\u8f6c\u8f7d\uff1a<a href=\"https:\/\/cloud.tencent.com\/developer\/article\/2634241\">Qwen3.5 0.8B\/2B\/4B\/9B \u5c0f\u6a21\u578b\u672c\u5730\u90e8\u7f72\u6307\u5357\uff0c\u5fae\u8c03\u6559\u7a0b<\/a><\/p>\n\n\n\n<p><strong>\u524d\u8a00 \ud83d\udd16<\/strong><\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<p>\u4eca\u5929\u8fd9\u7bc7\u662f <strong>Qwen3.5 \u5c0f\u578b\u6a21\u578b\u7cfb\u5217\uff080.8B \/ 2B \/ 4B \/ 9B\uff09\u7684\u4e13\u5c5e\u672c\u5730\u90e8\u7f72\u6559\u7a0b<\/strong>\uff0c\u91cd\u70b9\u8bb2 Unsloth \u56e2\u961f\u7b2c\u4e00\u65f6\u95f4\u653e\u51fa\u7684 GGUF \u91cf\u5316\u7248\u672c\u600e\u4e48\u73a9\uff0c\u5982\u4f55\u514d\u8d39\u5fae\u8c03\u81ea\u5df1\u7684Qwen3.5<\/p>\n\n\n\n<p>\u624b\u628a\u624b\u6559\u4f60\u4ece\u4e0b\u8f7d\u5230\u8fd0\u884c\uff0cMac\u3001PC\u3001Linux \u5168\u8986\u76d6\u3002<\/p>\n\n\n\n<p><strong>\u4e3a\u4ec0\u4e48\u8981\u5173\u6ce8 Unsloth \u7684 GGUF\uff1f<\/strong><\/p>\n\n\n\n<p>\u5148\u8bf4\u4e00\u4e2a\u80cc\u666f\uff1aQwen \u5b98\u65b9\u53d1\u5e03\u7684\u662f HuggingFace \u683c\u5f0f\u7684\u6743\u91cd\uff08safetensors\uff09\uff0c\u8fd9\u79cd\u683c\u5f0f\u4e3b\u8981\u9762\u5411 <a href=\"https:\/\/cloud.tencent.com\/product\/gpu?from_column=20065&amp;from=20065\">GPU<\/a> \u63a8\u7406\uff08vLLM\u3001SGLang\u3001Transformers \u7b49\u6846\u67b6\uff09\u3002<strong>\u5bf9\u4e8e\u6ca1\u6709\u9ad8\u7aef GPU \u7684\u666e\u901a\u73a9\u5bb6\u6765\u8bf4\uff0cGGUF \u683c\u5f0f\u624d\u662f\u672c\u5730\u90e8\u7f72\u7684\u771f\u6b63\u5165\u53e3\u3002<\/strong><\/p>\n\n\n\n<p><strong>\u800c Unsloth \u5c31\u662f\u76ee\u524d\u5f00\u6e90\u793e\u533a\u505a GGUF \u91cf\u5316\u505a\u5f97\u6700\u597d\u7684\u56e2\u961f\u4e4b\u4e00\uff0c\u4ed6\u4eec\u6709\u4e00\u5957\u53eb Dynamic 2.0 \u7684\u91cf\u5316\u65b9\u6848<\/strong>\u2014\u2014\u6838\u5fc3\u601d\u8def\u662f\u628a\u6a21\u578b\u4e2d\u91cd\u8981\u7684\u5c42\uff08\u6bd4\u5982\u6ce8\u610f\u529b\u5c42\u7684\u5173\u952e\u6743\u91cd\uff09\u4fdd\u7559\u66f4\u9ad8\u7cbe\u5ea6\uff088-bit \u751a\u81f3 16-bit\uff09\uff0c\u4e0d\u91cd\u8981\u7684\u5c42\u5927\u80c6\u538b\u7f29\u3002\u8fd9\u6837\u505a\u7684\u597d\u5904\u662f\uff1a<strong>4-bit \u91cf\u5316\u4e0b\u7684\u8868\u73b0\uff0c\u51e0\u4e4e\u903c\u8fd1 FP16 \u539f\u59cb\u7cbe\u5ea6\u3002<\/strong><\/p>\n\n\n\n<p>\u8fd9\u6b21 Qwen3.5 \u5c0f\u6a21\u578b\u7cfb\u5217\u4e00\u53d1\u5e03\uff0cUnsloth \u5c31\u540c\u6b65\u653e\u51fa\u4e86\u5168\u7cfb GGUF\uff0c\u6548\u7387\u62c9\u6ee1\u3002<\/p>\n\n\n\n<p><strong>Unsloth GGUF \u4e0b\u8f7d\u5730\u5740<\/strong><\/p>\n\n\n\n<p>\u6bcf\u4e2a\u6a21\u578b\u90fd\u63d0\u4f9b\u4e86\u4ece 2-bit \u5230 8-bit \u7684\u591a\u79cd\u91cf\u5316\u7248\u672c\uff0c\u4f60\u53ef\u4ee5\u6839\u636e\u81ea\u5df1\u7684\u8bbe\u5907\u5185\u5b58\u6765\u9009\u3002<\/p>\n\n\n<div class=\"wp-block-image\">\n<figure class=\"aligncenter size-large is-resized\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"816\" src=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-1024x816.png\" alt=\"\" class=\"wp-image-20704\" style=\"width:422px;height:auto\" srcset=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-1024x816.png 1024w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-300x239.png 300w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-768x612.png 768w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-1536x1224.png 1536w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-830x662.png 830w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-230x183.png 230w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-350x279.png 350w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10-480x383.png 480w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-10.png 1558w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure><\/div>\n\n\n<p><strong>\u5185\u5b58\u9700\u6c42\u901f\u67e5\u8868<\/strong><\/p>\n\n\n\n<p>\u8fd9\u662f Unsloth \u5b98\u65b9\u7ed9\u51fa\u7684\u786c\u4ef6\u9700\u6c42\u53c2\u8003\uff08\u603b\u5185\u5b58 = RAM + VRAM \u6216\u7edf\u4e00\u5185\u5b58\uff09\uff1a<\/p>\n\n\n<div class=\"wp-block-image\">\n<figure class=\"aligncenter size-large is-resized\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"436\" src=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-1024x436.png\" alt=\"\" class=\"wp-image-20705\" style=\"width:536px;height:auto\" srcset=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-1024x436.png 1024w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-300x128.png 300w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-768x327.png 768w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-1536x654.png 1536w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-830x353.png 830w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-230x98.png 230w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-350x149.png 350w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11-480x204.png 480w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-11.png 1574w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure><\/div>\n\n\n<p><strong>\u7b80\u5355\u6765\u8bf4\uff1a<\/strong><\/p>\n\n\n\n<ul>\n<li><strong>0.8B \/ 2B<\/strong>\uff1a\u51e0\u4e4e\u4efb\u4f55\u8bbe\u5907\u90fd\u80fd\u8dd1\uff0c3GB \u5185\u5b58\u5c31\u591f<\/li>\n\n\n\n<li><strong>4B\uff08Q4 \u91cf\u5316\uff09<\/strong>\uff1a7GB \u5185\u5b58\uff0cMacBook Air M1 8GB \u7248\u5c31\u80fd\u73a9<\/li>\n\n\n\n<li><strong>9B\uff08Q4 \u91cf\u5316\uff09<\/strong>\uff1a9GB \u5185\u5b58\uff0cMacBook Pro 16GB \u6216 12GB+ \u663e\u5b58 GPU \u8f7b\u677e\u641e\u5b9a<\/li>\n<\/ul>\n\n\n\n<p>\u5bf9\u6bd4\u4e00\u4e0b 9B \u6a21\u578b Q4 \u91cf\u5316\u53ea\u9700\u8981 9GB \u5185\u5b58\u2014\u2014\u4f60\u7684\u65e7\u6b3e MacBook Pro 16GB \u5c31\u80fd\u6ee1\u8840\u8fd0\u884c\u4e00\u4e2a\u5728\u591a\u9879 benchmark \u4e0a\u540a\u6253 80B \u5927\u6a21\u578b\u7684&#8221;\u5c0f\u94a2\u70ae&#8221;\uff0c\u8fd9\u6ce2\u6027\u4ef7\u6bd4\u7b80\u76f4\u4e86\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\u5982\u4f55\u8fd0\u884cGGUF\u6a21\u578b\uff1f\ud83d\udd16<\/strong><\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<p><strong>\ud83d\udd39\u65b9\u6cd5\u4e00\uff1allama.cpp \u76f4\u63a5\u8dd1\uff08\u6700\u63a8\u8350\uff09<\/strong><\/p>\n\n\n\n<p><strong>1. \u7f16\u8bd1 llama.cpp<\/strong><\/p>\n\n\n\n<p>\u9996\u5148\u4f60\u9700\u8981\u6700\u65b0\u7248 llama.cpp\u3002\u5982\u679c\u4f60\u8fd8\u6ca1\u88c5\u8fc7\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># \u514b\u9686\u6700\u65b0\u4ee3\u7801\ngit clone https:\/\/github.com\/ggml-org\/llama.cpp.git\ncd llama.cpp\n\n# macOS \/ CPU \u7f16\u8bd1\ncmake -B build -DGGML_CUDA=OFF\ncmake --build build --config Release -j\n\n# \u5982\u679c\u6709 NVIDIA GPU\uff0c\u6539\u6210\uff1a\n# cmake -B build -DGGML_CUDA=ON\n# cmake --build build --config Release -j<\/pre>\n\n\n\n<p><strong>2. \u4e0b\u8f7d\u6a21\u578b<\/strong><\/p>\n\n\n\n<p>\u63a8\u8350\u7528 HuggingFace Hub \u4e0b\u8f7d\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">pip install huggingface_hub hf_transfer\n\n# \u4e0b\u8f7d 9B \u7684 Q4_K_M \u91cf\u5316\u7248\u672c\nhuggingface-cli download unsloth\/Qwen3.5-9B-GGUF \\\n  --include \"Qwen3.5-9B-Q4_K_M.gguf\" \\\n  --local-dir .\/models\n<\/pre>\n\n\n\n<p>\u5982\u679c\u4f60\u8981\u6362\u5176\u4ed6\u578b\u53f7\uff0c\u628a <code>9B<\/code> \u6539\u6210 <code>0.8B<\/code>\u3001<code>2B<\/code> \u6216 <code>4B<\/code> \u5373\u53ef\u3002<\/p>\n\n\n\n<p><strong>3. \u4ea4\u4e92\u5f0f\u5bf9\u8bdd\uff08Non-Thinking \u6a21\u5f0f\uff0c\u9ed8\u8ba4\uff09<\/strong><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">.\/build\/bin\/llama-cli \\\n  -m .\/models\/Qwen3.5-9B-Q4_K_M.gguf \\\n  --ctx-size 16384 \\\n  -cnv\n<\/pre>\n\n\n\n<p>\u5c31\u8fd9\u4e48\u7b80\u5355\uff0c\u76f4\u63a5\u5f00\u804a\u3002<\/p>\n\n\n\n<p><strong>4. \u542f\u7528 Thinking \u6a21\u5f0f<\/strong><\/p>\n\n\n\n<p>\u26a0\ufe0f <strong>\u5212\u91cd\u70b9<\/strong>\uff1aQwen3.5 \u5c0f\u6a21\u578b\u7cfb\u5217\uff080.8B &#8211; 9B\uff09<strong>\u9ed8\u8ba4\u5173\u95ed\u4e86 Thinking\uff08\u63a8\u7406\u601d\u8003\uff09\u6a21\u5f0f<\/strong>\uff01\u8fd9\u548c\u5927\u6a21\u578b\uff0827B+\uff09\u4e0d\u4e00\u6837\u3002<\/p>\n\n\n\n<p>\u5982\u679c\u4f60\u60f3\u8ba9\u5c0f\u6a21\u578b\u4e5f\u8f93\u51fa <code>&lt;think&gt;...&lt;\/think&gt;<\/code> \u63a8\u7406\u8fc7\u7a0b\uff0c\u9700\u8981\u901a\u8fc7 <code>llama-server<\/code> \u542f\u52a8\u5e76\u4f20\u5165\u989d\u5916\u53c2\u6570\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">.\/build\/bin\/llama-server \\\n  -m .\/models\/Qwen3.5-9B-Q4_K_M.gguf \\\n  --ctx-size 16384 \\\n  --chat-template-kwargs '{\"enable_thinking\":true}'\n<\/pre>\n\n\n\n<p>\u8fd9\u6837\u4f60\u5c31\u80fd\u5728\u672c\u5730\u83b7\u5f97\u4e00\u4e2a\u62e5\u6709\u5b8c\u6574\u601d\u8003\u94fe\u8def\u7684 9B \u5c0f\u94a2\u70ae\u4e86\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39<strong>\u65b9\u6cd5\u4e8c\uff1allama-server \u90e8\u7f72\u4e3a API \u670d\u52a1<\/strong><\/strong><\/p>\n\n\n\n<p>\u5982\u679c\u4f60\u60f3\u628a\u6a21\u578b\u90e8\u7f72\u6210 OpenAI \u517c\u5bb9\u7684 API \u670d\u52a1\uff08\u6bd4\u5982\u7ed9 Claude Code\u3001Cursor \u7b49\u5de5\u5177\u7528\uff09\uff0c\u63a8\u8350\u8fd9\u79cd\u65b9\u5f0f\uff1a<\/p>\n\n\n\n<p><strong>1. \u542f\u52a8 llama-server<\/strong><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># Non-Thinking \u6a21\u5f0f\uff08\u9ed8\u8ba4\uff0c\u63a8\u8350\u65e5\u5e38\u4f7f\u7528\uff09\n.\/build\/bin\/llama-server \\\n  -m .\/models\/Qwen3.5-9B-Q4_K_M.gguf \\\n  --ctx-size 16384 \\\n  --port 8080 \\\n  --n-gpu-layers 35\n\n# Thinking \u6a21\u5f0f\n.\/build\/bin\/llama-server \\\n  -m .\/models\/Qwen3.5-9B-Q4_K_M.gguf \\\n  --ctx-size 16384 \\\n  --port 8080 \\\n  --n-gpu-layers 35 \\\n  --chat-template-kwargs '{\"enable_thinking\":true}'\n<\/pre>\n\n\n\n<p><strong>2. \u7528 Python \u8c03\u7528<\/strong><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">from openai import OpenAI\n\nclient = OpenAI(\n    base_url=\"http:\/\/localhost:8080\/v1\",\n    api_key=\"EMPTY\"\n)\n\nresponse = client.chat.completions.create(\n    model=\"Qwen3.5-9B\",\n    messages=[\n        {\"role\": \"user\", \"content\": \"\u7528 Python \u5199\u4e00\u4e2a\u5feb\u901f\u6392\u5e8f\"}\n    ],\n    temperature=0.7,\n    top_p=0.8,\n    max_tokens=4096\n)\n\nprint(response.choices[0].message.content)\n<\/pre>\n\n\n\n<p>API \u5c31\u662f\u6807\u51c6\u7684 OpenAI \u683c\u5f0f\uff0c\u4efb\u4f55\u652f\u6301 OpenAI SDK \u7684\u5ba2\u6237\u7aef\u90fd\u80fd\u76f4\u63a5\u5bf9\u63a5\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u65b9\u6cd5\u4e09\uff1aGPU \u73a9\u5bb6\u7684\u9009\u62e9\uff08vLLM \/ SGLang\uff09<\/strong><\/p>\n\n\n\n<p>\u5982\u679c\u4f60\u6709\u72ec\u7acb GPU\uff08\u54ea\u6015\u662f\u4e00\u5f20 3060 12GB\uff09\uff0c\u53ef\u4ee5\u76f4\u63a5\u7528 vLLM \u6216 SGLang \u8dd1\u539f\u59cb\u7cbe\u5ea6\u6743\u91cd\uff0c\u4e0d\u9700\u8981 GGUF \u91cf\u5316\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># vLLM \u90e8\u7f72\nvllm serve Qwen\/Qwen3.5-9B \\\n  --port 8000 \\\n  --tensor-parallel-size 1 \\\n  --max-model-len 32768 \\\n  --reasoning-parser qwen3\n\n# SGLang \u90e8\u7f72\npython -m sglang.launch_server \\\n  --model-path Qwen\/Qwen3.5-9B \\\n  --port 8000 \\\n  --tp-size 1 \\\n  --mem-fraction-static 0.8 \\\n  --context-length 32768 \\\n  --reasoning-parser qwen3\n<\/pre>\n\n\n\n<p>\u76f8\u6bd4 GGUF\uff0cvLLM\/SGLang \u7684\u4f18\u52bf\u662f\uff1a<\/p>\n\n\n\n<ul>\n<li>\u96f6\u7cbe\u5ea6\u635f\u5931<\/li>\n\n\n\n<li>\u63a8\u7406\u901f\u5ea6\u66f4\u5feb\uff08GPU \u52a0\u901f\uff09<\/li>\n\n\n\n<li>\u652f\u6301\u66f4\u9ad8\u5e76\u53d1<\/li>\n\n\n\n<li>\u652f\u6301\u591a GPU \u5f20\u91cf\u5e76\u884c<\/li>\n<\/ul>\n\n\n\n<p>\u4f46\u524d\u63d0\u662f\u4f60\u5f97\u6709\u663e\u5361\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u63a8\u8350\u91c7\u6837\u53c2\u6570<\/strong><\/p>\n\n\n\n<p>Unsloth \u548c Qwen \u5b98\u65b9\u90fd\u7ed9\u4e86\u63a8\u8350\u53c2\u6570<\/p>\n\n\n<div class=\"wp-block-image\">\n<figure class=\"aligncenter size-large is-resized\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"619\" src=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-1024x619.png\" alt=\"\" class=\"wp-image-20707\" style=\"width:472px;height:auto\" srcset=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-1024x619.png 1024w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-300x181.png 300w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-768x464.png 768w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-1536x928.png 1536w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-830x502.png 830w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-230x139.png 230w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-350x212.png 350w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12-480x290.png 480w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-12.png 1572w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure><\/div>\n\n<div class=\"wp-block-image\">\n<figure class=\"aligncenter size-large is-resized\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"478\" src=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-1024x478.png\" alt=\"\" class=\"wp-image-20708\" style=\"width:480px;height:auto\" srcset=\"https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-1024x478.png 1024w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-300x140.png 300w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-768x359.png 768w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-1536x718.png 1536w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-830x388.png 830w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-230x107.png 230w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-350x164.png 350w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13-480x224.png 480w, https:\/\/92it.top\/wp-content\/uploads\/2026\/05\/\u56fe\u7247-13.png 1584w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure><\/div>\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong><strong>\u8fdb\u9636\uff1a\u7528 Unsloth \u514d\u8d39\u5fae\u8c03 Qwen3.5 \u5c0f\u6a21\u578b<\/strong><\/strong> <strong>\ud83d\udd16<\/strong><\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<p>\u5149\u80fd\u8dd1\u63a8\u7406\u8fd8\u4e0d\u8fc7\u763e\uff1fUnsloth \u8fd8\u63d0\u4f9b\u4e86\u5b8c\u6574\u7684 <strong>Qwen3.5 \u5fae\u8c03\u65b9\u6848<\/strong>\uff0c\u800c\u4e14\u5c0f\u6a21\u578b\uff080.8B \/ 2B \/ 4B \/ 9B\uff09\u53ef\u4ee5\u76f4\u63a5\u5728 <strong>Google Colab \u514d\u8d39 T4 GPU<\/strong> \u4e0a\u5b8c\u6210\u5fae\u8c03\uff01<\/p>\n\n\n\n<p>\u8fd9\u610f\u5473\u7740\uff1a<strong>\u4f60\u4e0d\u9700\u8981\u4efb\u4f55\u672c\u5730 GPU\uff0c\u6253\u5f00\u6d4f\u89c8\u5668\u5c31\u80fd\u8bad\u7ec3\u81ea\u5df1\u7684\u4e13\u5c5e\u6a21\u578b\u3002<\/strong><\/p>\n\n\n\n<p><strong>\u514d\u8d39 Colab Notebook\uff08\u4e00\u952e\u8fd0\u884c\uff09<\/strong><\/p>\n\n\n\n<p>Unsloth \u4e3a\u6bcf\u4e2a\u5c0f\u6a21\u578b\u90fd\u51c6\u5907\u4e86\u73b0\u6210\u7684 Colab Notebook\uff1a<\/p>\n\n\n\n<figure class=\"wp-block-table is-style-stripes\"><table><thead><tr><th>\u6a21\u578b<\/th><th>Colab \u94fe\u63a5<\/th><\/tr><\/thead><tbody><tr><td>Qwen3.5-0.8B<\/td><td>[\u6253\u5f00 Colab](https:\/\/colab.research.google.com\/github\/unslothai\/notebooks\/blob\/main\/nb\/Qwen3_5_(0.8B &#8220;\u6253\u5f00 Colab&#8221;).ipynb)<\/td><\/tr><tr><td>Qwen3.5-2B<\/td><td>[\u6253\u5f00 Colab](https:\/\/colab.research.google.com\/github\/unslothai\/notebooks\/blob\/main\/nb\/Qwen3_5_(2B &#8220;\u6253\u5f00 Colab&#8221;).ipynb)<\/td><\/tr><tr><td>Qwen3.5-4B<\/td><td>[\u6253\u5f00 Colab](https:\/\/colab.research.google.com\/github\/unslothai\/notebooks\/blob\/main\/nb\/Qwen3_5_(4B &#8220;\u6253\u5f00 Colab&#8221;).ipynb)<\/td><\/tr><tr><td>Qwen3.5-9B<\/td><td>[\u6253\u5f00 Colab](https:\/\/colab.research.google.com\/github\/unslothai\/notebooks\/blob\/main\/nb\/Qwen3_5_(9B &#8220;\u6253\u5f00 Colab&#8221;).ipynb)<\/td><\/tr><\/tbody><\/table><\/figure>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p>\u70b9\u5f00\u5c31\u80fd\u8dd1\uff0c\u96f6\u914d\u7f6e\uff0c\u5b8c\u5168\u514d\u8d39\u3002<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u672c\u5730\u5fae\u8c03\u4ee3\u7801\u793a\u4f8b<\/strong><\/p>\n\n\n\n<p>\u5982\u679c\u4f60\u66f4\u559c\u6b22\u5728\u81ea\u5df1\u673a\u5668\u4e0a\u8dd1\uff0c\u6216\u8005\u9700\u8981\u66f4\u5927\u7684\u6570\u636e\u96c6\u548c\u66f4\u957f\u7684\u8bad\u7ec3\u65f6\u95f4\uff0c\u4e5f\u53ef\u4ee5\u672c\u5730\u5fae\u8c03\u3002\u5148\u88c5\u597d Unsloth\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">pip install --upgrade --force-reinstall --no-cache-dir unsloth unsloth_zoo<\/pre>\n\n\n\n<p>\u7136\u540e\u662f\u4e00\u4e2a\u6700\u7b80\u7684 SFT\uff08\u76d1\u7763\u5fae\u8c03\uff09\u811a\u672c\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">from unsloth import FastLanguageModel\nimport torch\nfrom datasets import load_dataset\nfrom trl import SFTTrainer, SFTConfig\n\nmax_seq_length = 2048# \u5148\u4ece\u5c0f\u7684\u5f00\u59cb\uff0c\u8dd1\u901a\u518d\u52a0\u5927\n\n# \u52a0\u8f7d\u793a\u4f8b\u6570\u636e\u96c6\uff08\u66ff\u6362\u6210\u4f60\u81ea\u5df1\u7684\uff09\nurl = \"https:\/\/huggingface.co\/datasets\/laion\/OIG\/resolve\/main\/unified_chip2.jsonl\"\ndataset = load_dataset(\"json\", data_files={\"train\": url}, split=\"train\")\n\n# \u52a0\u8f7d Qwen3.5-9B\uff08\u53ef\u4ee5\u6362\u6210 0.8B\/2B\/4B\uff09\nmodel, tokenizer = FastLanguageModel.from_pretrained(\n    model_name = \"Qwen\/Qwen3.5-9B\",\n    max_seq_length = max_seq_length,\n    load_in_4bit = True,     # 4-bit QLoRA\uff0c\u7701\u663e\u5b58\n    full_finetuning = False,\n)\n\n# \u6302\u4e0a LoRA \u9002\u914d\u5668\nmodel = FastLanguageModel.get_peft_model(\n    model,\n    r = 16,\n    target_modules = [\n        \"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\",\n        \"gate_proj\", \"up_proj\", \"down_proj\",\n    ],\n    lora_alpha = 16,\n    lora_dropout = 0,\n    bias = \"none\",\n    use_gradient_checkpointing = \"unsloth\",  # \u964d\u4f4e\u663e\u5b58 + \u652f\u6301\u66f4\u957f\u4e0a\u4e0b\u6587\n    random_state = 3407,\n    max_seq_length = max_seq_length,\n)\n\n# \u5f00\u59cb\u8bad\u7ec3\ntrainer = SFTTrainer(\n    model = model,\n    train_dataset = dataset,\n    tokenizer = tokenizer,\n    args = SFTConfig(\n        max_seq_length = max_seq_length,\n        per_device_train_batch_size = 1,\n        gradient_accumulation_steps = 4,\n        warmup_steps = 10,\n        max_steps = 100,        # \u5148\u8dd1 100 \u6b65\u770b\u770b\u6548\u679c\n        logging_steps = 1,\n        output_dir = \"outputs_qwen35\",\n        optim = \"adamw_8bit\",\n        seed = 3407,\n    ),\n)\ntrainer.train()\n<\/pre>\n\n\n\n<p><strong>\u4ee3\u7801\u770b\u7740\u957f\uff0c\u4f46\u6838\u5fc3\u903b\u8f91\u5c31\u4e09\u6b65\uff1a\u52a0\u8f7d\u6a21\u578b \u2192 \u6302 LoRA \u2192 \u8bad\u7ec3\u3002<\/strong> Unsloth \u628a\u5e95\u5c42\u590d\u6742\u7684\u4f18\u5316\u5168\u5c01\u88c5\u597d\u4e86\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u663e\u5b58\u4e0d\u591f\u600e\u4e48\u529e\uff1f<\/strong><\/p>\n\n\n\n<p>Unsloth \u7ed9\u4e86\u51e0\u4e2a\u5b9e\u7528\u5efa\u8bae\uff1a<\/p>\n\n\n\n<ol>\n<li><strong>\u628a <code>per_device_train_batch_size<\/code> \u964d\u5230 1<\/strong><\/li>\n\n\n\n<li>**\u51cf\u5c0f <code>max_seq_length<\/code>**\uff08\u6bd4\u5982\u4ece 2048 \u964d\u5230 1024)<\/li>\n\n\n\n<li><strong>\u4fdd\u6301 <code>use_gradient_checkpointing = \"unsloth\"<\/code> \u5f00\u542f<\/strong> \u2014\u2014 \u8fd9\u662f Unsloth \u7684\u72ec\u5bb6\u4f18\u5316\uff0c\u80fd\u663e\u8457\u964d\u4f4e\u663e\u5b58\u5360\u7528\uff0c\u540c\u65f6\u652f\u6301\u66f4\u957f\u7684\u4e0a\u4e0b\u6587<\/li>\n<\/ol>\n\n\n\n<p>\u5b9e\u6d4b 9B \u6a21\u578b\u7528 4-bit QLoRA\uff0c\u5728\u4e00\u5f20 12GB \u663e\u5361\uff08\u6bd4\u5982 3060\/4060\uff09\u4e0a\u5c31\u80fd\u8dd1\u8d77\u6765\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u89c6\u89c9\u5fae\u8c03\u4e5f\u652f\u6301\uff01<\/strong><\/p>\n\n\n\n<p>\u8fd8\u8bb0\u5f97 Qwen3.5 \u662f\u539f\u751f\u591a\u6a21\u6001\u6a21\u578b\u5417\uff1fUnsloth \u540c\u6837\u652f\u6301<strong>\u89c6\u89c9\u5fae\u8c03<\/strong>\uff0c\u4f60\u53ef\u4ee5\u7528\u56fe\u6587\u5bf9\u6570\u636e\u6765\u8bad\u7ec3\u6a21\u578b\u7684\u89c6\u89c9\u7406\u89e3\u80fd\u529b\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">from unsloth import FastVisionModel\n\nmodel = FastVisionModel.get_peft_model(\n    model,\n    finetune_vision_layers  = True,   # \u5fae\u8c03\u89c6\u89c9\u5c42\n    finetune_language_layers = True,  # \u5fae\u8c03\u8bed\u8a00\u5c42\n    finetune_attention_modules = True, # \u5fae\u8c03\u6ce8\u610f\u529b\u5c42\n    finetune_mlp_modules = True,       # \u5fae\u8c03 MLP \u5c42\n    r = 16,\n    lora_alpha = 16,\n    lora_dropout = 0,\n    bias = \"none\",\n    random_state = 3407,\n    target_modules = \"all-linear\",\n)\n<\/pre>\n\n\n\n<p>\u4f60\u53ef\u4ee5\u7075\u6d3b\u63a7\u5236\u53ea\u5fae\u8c03\u89c6\u89c9\u5c42\u3001\u53ea\u5fae\u8c03\u8bed\u8a00\u5c42\u3001\u6216\u8005\u5168\u90e8\u4e00\u8d77\u5fae\u8c03\uff0c\u975e\u5e38\u7075\u6d3b\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u5fae\u8c03\u5b8c\u600e\u4e48\u5bfc\u51fa\uff1f<\/strong><\/p>\n\n\n\n<p>\u8bad\u7ec3\u5b8c\u7684\u6a21\u578b\u53ef\u4ee5\u5bfc\u51fa\u4e3a\u591a\u79cd\u683c\u5f0f\uff0c\u76f4\u63a5\u7528\u4e8e\u672c\u5730\u90e8\u7f72\uff1a<\/p>\n\n\n\n<p><strong>\u5bfc\u51fa\u4e3a GGUF\uff08\u7ed9 llama.cpp \/ Ollama \/ LM Studio \u7528\uff09\uff1a<\/strong><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># \u5bfc\u51fa\u4e3a Q4_K_M \u91cf\u5316\u7684 GGUF\nmodel.save_pretrained_gguf(\"my_model\", tokenizer, quantization_method=\"q4_k_m\")\n\n# \u6216\u8005\u5bfc\u51fa\u4e3a Q8 \u91cf\u5316\nmodel.save_pretrained_gguf(\"my_model\", tokenizer, quantization_method=\"q8_0\")\n\n# \u60f3\u4e0a\u4f20\u5230 HuggingFace\uff1f\nmodel.push_to_hub_gguf(\"\u4f60\u7684\u7528\u6237\u540d\/my_model\", tokenizer, quantization_method=\"q4_k_m\")\n<\/pre>\n\n\n\n<p><strong>\u5bfc\u51fa\u4e3a 16-bit\uff08\u7ed9 vLLM \u7528\uff09\uff1a<\/strong><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">model.save_pretrained_merged(\"finetuned_model\", tokenizer, save_method=\"merged_16bit\")\n\n# \u6216\u8005\u4e0a\u4f20\u5230 HuggingFace\nmodel.push_to_hub_merged(\"\u4f60\u7684\u7528\u6237\u540d\/model\", tokenizer, save_method=\"merged_16bit\", token=\"\")\n<\/pre>\n\n\n\n<p><strong>\u53ea\u4fdd\u5b58 LoRA \u9002\u914d\u5668\uff08\u4f53\u79ef\u5c0f\uff0c\u65b9\u4fbf\u5206\u4eab\uff09\uff1a<\/strong><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">model.save_pretrained(\"finetuned_lora\")\ntokenizer.save_pretrained(\"finetuned_lora\")\n<\/pre>\n\n\n\n<p>\u6574\u4e2a\u5de5\u4f5c\u6d41\uff1a<strong>Colab \u514d\u8d39\u8bad\u7ec3 \u2192 \u5bfc\u51fa GGUF \u2192 \u672c\u5730 llama.cpp \u8dd1\u8d77\u6765<\/strong>\uff0c\u4e00\u5206\u94b1\u4e0d\u82b1\uff0c\u5b8c\u5168\u514d\u8d39\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u5fae\u8c03\u7684\u5173\u952e\u6ce8\u610f\u4e8b\u9879<\/strong><\/p>\n\n\n\n<ol>\n<li><strong>\u60f3\u4fdd\u7559\u63a8\u7406\u80fd\u529b\uff1f<\/strong> \u8bad\u7ec3\u6570\u636e\u4e2d\u81f3\u5c11\u4fdd\u7559 75% \u7684\u5e26 thinking\uff08\u63a8\u7406\u601d\u8003\uff09\u7684\u6837\u672c\uff0c\u5176\u4f59\u53ef\u4ee5\u662f\u76f4\u63a5\u56de\u7b54<\/li>\n\n\n\n<li><strong>\u5bfc\u51fa\u540e\u6548\u679c\u53d8\u5dee\uff1f<\/strong> \u6700\u5e38\u89c1\u7684\u539f\u56e0\u662f\u63a8\u7406\u65f6\u7528\u7684 chat template \/ EOS token \u548c\u8bad\u7ec3\u65f6\u4e0d\u4e00\u81f4\u3002Unsloth \u4f1a\u81ea\u52a8\u63d0\u9192\u4f60<\/li>\n\n\n\n<li><strong>vLLM \u7248\u672c\u6ce8\u610f<\/strong>\uff1a\u622a\u81f3\u76ee\u524d vLLM 0.16.0 \u5c1a\u4e0d\u652f\u6301 Qwen3.5\uff0c\u9700\u8981\u7b49 0.17.0 \u6216\u4f7f\u7528 Nightly \u7248\u672c<\/li>\n<\/ol>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u8fdb\u9636\uff1a\u642d\u914d Claude Code \/ OpenAI Codex \u4f7f\u7528<\/strong><\/p>\n\n\n\n<p>Unsloth \u5b98\u65b9\u6587\u6863\u7279\u522b\u63d0\u5230\uff0c\u4f60\u53ef\u4ee5\u7528 llama-server \u642d\u5efa\u672c\u5730\u6a21\u578b\u670d\u52a1\u540e\uff0c\u76f4\u63a5\u5bf9\u63a5 <strong>Claude Code<\/strong> \u6216 <strong>OpenAI Codex<\/strong>\uff0c\u5b9e\u73b0\u514d\u8d39\u7684\u672c\u5730 AI \u7f16\u7a0b\u52a9\u624b\u3002<\/p>\n\n\n\n<p>\u64cd\u4f5c\u601d\u8def\uff1a<\/p>\n\n\n\n<ol>\n<li>\u7528\u4e0a\u9762\u7684\u65b9\u6cd5\u542f\u52a8 llama-server<\/li>\n\n\n\n<li>\u8bbe\u7f6e <code>OPENAI_BASE_URL=http:\/\/localhost:8080\/v1<\/code><\/li>\n\n\n\n<li>\u5728 Claude Code \u6216 Codex \u4e2d\u914d\u7f6e\u4f7f\u7528\u672c\u5730\u7aef\u70b9<\/li>\n<\/ol>\n\n\n\n<p>\u4e00\u4e2a 9B \u6a21\u578b\u5c31\u80fd\u9a71\u52a8\u4f60\u7684\u672c\u5730 Coding Agent\uff0c\u4e0d\u82b1\u4e00\u5206\u94b1 API \u8d39\u7528\u3002<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\ud83d\udd39\u8fdb\u9636\uff1a\u8d85\u957f\u6587\u672c\u5904\u7406\uff08YaRN \u6269\u5c55\u5230 100\u4e07 tokens\uff09<\/strong><\/p>\n\n\n\n<p>Qwen3.5-9B \u539f\u751f\u652f\u6301 262,144 tokens \u4e0a\u4e0b\u6587\uff0c\u4f46\u5982\u679c\u4f60\u9700\u8981\u5904\u7406\u66f4\u957f\u7684\u6587\u672c\uff08\u6bd4\u5982\u6574\u672c\u4e66\uff09\uff0c\u53ef\u4ee5\u901a\u8fc7 YaRN \u6280\u672f\u6269\u5c55\u5230 <strong>1,010,000 tokens<\/strong>\u3002<\/p>\n\n\n\n<p>\u5728 vLLM \u4e2d\u542f\u7528\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">VLLM_ALLOW_LONG_MAX_MODEL_LEN=1 vllm serve Qwen\/Qwen3.5-9B \\\n  --hf-overrides '{\"text_config\": {\"rope_parameters\": {\"mrope_interleaved\": true, \"mrope_section\": [11, 11, 10], \"rope_type\": \"yarn\", \"rope_theta\": 10000000, \"partial_rotary_factor\": 0.25, \"factor\": 4.0, \"original_max_position_embeddings\": 262144}}}' \\\n  --max-model-len 1010000\n<\/pre>\n\n\n\n<p>\u4e00\u4e2a 9B \u6a21\u578b\u5904\u7406\u767e\u4e07 token \u4e0a\u4e0b\u6587\uff0c\u60f3\u60f3\u5c31\u89c9\u5f97\u79bb\u8c31\u3002<\/p>\n\n\n\n<p><strong>\u6211\u7684\u5efa\u8bae\uff1a\u56db\u4e2a\u578b\u53f7\u600e\u4e48\u9009<\/strong><\/p>\n\n\n\n<figure class=\"wp-block-table is-style-stripes\"><table><thead><tr><th>\u4f60\u7684\u573a\u666f<\/th><th>\u63a8\u8350\u578b\u53f7<\/th><th>\u63a8\u8350\u91cf\u5316<\/th><th>\u9700\u8981\u5185\u5b58<\/th><\/tr><\/thead><tbody><tr><td>\u6811\u8393\u6d3e \/ IoT \u5d4c\u5165\u5f0f<\/td><td>0.8B<\/td><td>Q4_K_M<\/td><td>5 GB<\/td><\/tr><tr><td>\u624b\u673a\u7aef \/ \u8f7b\u8584\u672c<\/td><td>2B<\/td><td>Q4_K_M<\/td><td>5 GB<\/td><\/tr><tr><td>MacBook Air 8GB<\/td><td>4B<\/td><td>UD-Q4_K_XL<\/td><td>7 GB<\/td><\/tr><tr><td>MacBook Pro 16GB \/ 12GB GPU<\/td><td>9B<\/td><td>UD-Q4_K_XL<\/td><td>9 GB<\/td><\/tr><tr><td>\u8ffd\u6c42\u6781\u81f4\u8f7b\u91cf<\/td><td>0.8B<\/td><td>UD-Q2_K_XL<\/td><td>3 GB<\/td><\/tr><\/tbody><\/table><\/figure>\n\n\n\n<p><strong>\u6211\u4e2a\u4eba\u6700\u63a8\u8350 9B \u7684 Q4 \u91cf\u5316\u7248\u672c\u3002<\/strong> \u5728 GPQA Diamond \u4e0a\u62ff\u5230 81.7 \u7684 9B \u6a21\u578b\uff0c\u80fd\u88c5\u8fdb\u4e00\u53f0\u666e\u901a\u7b14\u8bb0\u672c\uff0c\u8fd8\u8981\u4ec0\u4e48\u81ea\u884c\u8f66\uff1f<\/p>\n\n\n\n<p>\u3000\u3000<\/p>\n\n\n\n<p><strong>\u603b\u7ed3\ud83d\udd16<\/strong><\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<p>Unsloth \u8fd9\u6b21\u56f4\u7ed5 Qwen3.5 \u5c0f\u6a21\u578b\u7684\u652f\u6301\u53ef\u4ee5\u8bf4\u662f<strong>\u5168\u94fe\u8def\u8986\u76d6<\/strong>\uff1a\u4ece GGUF \u91cf\u5316\u63a8\u7406\u5230 LoRA \u5fae\u8c03\u518d\u5230\u6a21\u578b\u5bfc\u51fa\uff0c\u4e00\u7ad9\u5f0f\u641e\u5b9a\u3002\u5bf9\u4e8e\u6211\u4eec\u8fd9\u4e9b\u672c\u5730\u90e8\u7f72\u73a9\u5bb6\u6765\u8bf4\uff0c\u57fa\u672c\u4e0a\u6253\u901a\u4e86\u6700\u540e\u4e00\u516c\u91cc\uff1a<\/p>\n\n\n\n<ul>\n<li><strong>\u95e8\u69db\u6781\u4f4e<\/strong>\uff1a3GB \u5185\u5b58\u5c31\u80fd\u8dd1 0.8B\uff0c9GB \u5185\u5b58\u5c31\u80fd\u8dd1 9B<\/li>\n\n\n\n<li><strong>\u7cbe\u5ea6\u9760\u8c31<\/strong>\uff1aDynamic 2.0 \u65b9\u6848\u4e0b\u7684 Q4 \u91cf\u5316\u51e0\u4e4e\u65e0\u635f<\/li>\n\n\n\n<li><strong>\u5de5\u5177\u94fe\u9f50\u5168<\/strong>\uff1allama.cpp\u3001vLLM\u3001SGLang \u5168\u7ebf\u652f\u6301<\/li>\n\n\n\n<li><strong>\u573a\u666f\u4e30\u5bcc<\/strong>\uff1a\u4ece\u5bf9\u8bdd\u5230 Agent \u5230\u4ee3\u7801\u751f\u6210\u5230\u767e\u4e07 token \u957f\u6587\u6863\u5904\u7406<\/li>\n\n\n\n<li><strong>\u514d\u8d39\u5fae\u8c03<\/strong>\uff1aGoogle Colab T4 GPU \u5c31\u80fd\u8bad\u7ec3\u4f60\u81ea\u5df1\u7684\u4e13\u5c5e\u6a21\u578b<\/li>\n\n\n\n<li><strong>\u95ed\u73af\u5bfc\u51fa<\/strong>\uff1a\u5fae\u8c03\u5b8c\u76f4\u63a5\u5bfc\u51fa GGUF\uff0c\u672c\u5730\u8dd1\u8d77\u6765<\/li>\n<\/ul>\n\n\n\n<p>\u8fd8\u7b49\u4ec0\u4e48\uff1f\u8d76\u7d27\u628a\u4f60\u7684 MacBook \u6b66\u88c5\u8d77\u6765\u5427\u3002<\/p>\n\n\n\n<p><strong>\u76f8\u5173\u94fe\u63a5\uff1a<\/strong><\/p>\n\n\n\n<ul>\n<li>Unsloth \u672c\u5730\u90e8\u7f72\u6307\u5357\uff1ahttps:\/\/unsloth.ai\/docs\/models\/qwen3.5<\/li>\n\n\n\n<li>Unsloth \u5fae\u8c03\u6307\u5357\uff1ahttps:\/\/unsloth.ai\/docs\/models\/qwen3.5\/fine-tune<\/li>\n\n\n\n<li>Unsloth GGUF \u5408\u96c6\uff1ahttps:\/\/huggingface.co\/collections\/unsloth\/qwen35<\/li>\n\n\n\n<li>Qwen3.5-9B \u6a21\u578b\u5361\uff1ahttps:\/\/huggingface.co\/Qwen\/Qwen3.5-9B<\/li>\n\n\n\n<li>llama.cpp \u9879\u76ee\u5730\u5740\uff1ahttps:\/\/github.com\/ggml-org\/llama.cpp<\/li>\n<\/ul>\n\n\n\n<p>#Qwen3.5 #Unsloth #GGUF #\u672c\u5730\u90e8\u7f72 #\u5fae\u8c03 #llama.cpp #\u5f00\u6e90<\/p>\n","protected":false},"excerpt":{"rendered":"<p>\u8f6c\u8f7d\uff1aQwen3.5 0.8B\/2B\/4B\/9B \u5c0f\u6a21\u578b\u672c\u5730\u90e8\u7f72\u6307\u5357\uff0c\u5fae\u8c03\u6559\u7a0b \u524d\u8a00 \ud83d\udd16 \u4eca\u5929\u8fd9\u7bc7\u662f Qwen [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[48],"tags":[],"_links":{"self":[{"href":"https:\/\/92it.top\/index.php?rest_route=\/wp\/v2\/posts\/20703"}],"collection":[{"href":"https:\/\/92it.top\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/92it.top\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/92it.top\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/92it.top\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=20703"}],"version-history":[{"count":2,"href":"https:\/\/92it.top\/index.php?rest_route=\/wp\/v2\/posts\/20703\/revisions"}],"predecessor-version":[{"id":20709,"href":"https:\/\/92it.top\/index.php?rest_route=\/wp\/v2\/posts\/20703\/revisions\/20709"}],"wp:attachment":[{"href":"https:\/\/92it.top\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=20703"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/92it.top\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=20703"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/92it.top\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=20703"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}