{"id":24120,"date":"2025-01-21T09:51:35","date_gmt":"2025-01-21T01:51:35","guid":{"rendered":"http:\/\/139.9.1.231\/?p=24120"},"modified":"2025-01-21T09:51:36","modified_gmt":"2025-01-21T01:51:36","slug":"qwen2-lora","status":"publish","type":"post","link":"http:\/\/139.9.1.231\/index.php\/2025\/01\/21\/qwen2-lora\/","title":{"rendered":"Qwen2 Lora LLM\u5fae\u8c03\u8bad\u7ec3\u6559\u7a0b"},"content":{"rendered":"\n<p class=\"has-light-gray-background-color has-background\"><strong>Github\uff1a<\/strong><a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/tree\/master\"><strong><em>https:\/\/github.com\/datawhalechina\/self-llm\/tree\/master<\/em><\/strong><\/a><\/p>\n\n\n\n\n\n<figure class=\"wp-block-image size-full\"><img loading=\"lazy\" width=\"895\" height=\"220\" src=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2025\/01\/image-84.png\" alt=\"\" class=\"wp-image-24128\" srcset=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2025\/01\/image-84.png 895w, http:\/\/139.9.1.231\/wp-content\/uploads\/2025\/01\/image-84-300x74.png 300w, http:\/\/139.9.1.231\/wp-content\/uploads\/2025\/01\/image-84-768x189.png 768w\" sizes=\"(max-width: 895px) 100vw, 895px\" \/><\/figure>\n\n\n\n<p>\u2003\u672c\u9879\u76ee\u662f\u4e00\u4e2a\u56f4\u7ed5\u5f00\u6e90\u5927\u6a21\u578b\u3001\u9488\u5bf9\u56fd\u5185\u521d\u5b66\u8005\u3001\u57fa\u4e8e Linux \u5e73\u53f0\u7684\u4e2d\u56fd\u5b9d\u5b9d\u4e13\u5c5e\u5927\u6a21\u578b\u6559\u7a0b\uff0c\u9488\u5bf9\u5404\u7c7b\u5f00\u6e90\u5927\u6a21\u578b\u63d0\u4f9b\u5305\u62ec\u73af\u5883\u914d\u7f6e\u3001\u672c\u5730\u90e8\u7f72\u3001\u9ad8\u6548\u5fae\u8c03\u7b49\u6280\u80fd\u5728\u5185\u7684\u5168\u6d41\u7a0b\u6307\u5bfc\uff0c\u7b80\u5316\u5f00\u6e90\u5927\u6a21\u578b\u7684\u90e8\u7f72\u3001\u4f7f\u7528\u548c\u5e94\u7528\u6d41\u7a0b\uff0c\u8ba9\u66f4\u591a\u7684\u666e\u901a\u5b66\u751f\u3001\u7814\u7a76\u8005\u66f4\u597d\u5730\u4f7f\u7528\u5f00\u6e90\u5927\u6a21\u578b\uff0c\u5e2e\u52a9\u5f00\u6e90\u3001\u81ea\u7531\u7684\u5927\u6a21\u578b\u66f4\u5feb\u878d\u5165\u5230\u666e\u901a\u5b66\u4e60\u8005\u7684\u751f\u6d3b\u4e2d\u3002<\/p>\n\n\n\n<p>\u2003\u2003\u672c\u9879\u76ee\u7684\u4e3b\u8981\u5185\u5bb9\u5305\u62ec\uff1a<\/p>\n\n\n\n<ol><li>\u57fa\u4e8e Linux \u5e73\u53f0\u7684\u5f00\u6e90 LLM \u73af\u5883\u914d\u7f6e\u6307\u5357\uff0c\u9488\u5bf9\u4e0d\u540c\u6a21\u578b\u8981\u6c42\u63d0\u4f9b\u4e0d\u540c\u7684\u8be6\u7ec6\u73af\u5883\u914d\u7f6e\u6b65\u9aa4\uff1b<\/li><li>\u9488\u5bf9\u56fd\u5185\u5916\u4e3b\u6d41\u5f00\u6e90 LLM \u7684\u90e8\u7f72\u4f7f\u7528\u6559\u7a0b\uff0c\u5305\u62ec LLaMA\u3001ChatGLM\u3001InternLM \u7b49\uff1b<\/li><li>\u5f00\u6e90 LLM \u7684\u90e8\u7f72\u5e94\u7528\u6307\u5bfc\uff0c\u5305\u62ec\u547d\u4ee4\u884c\u8c03\u7528\u3001\u5728\u7ebf Demo \u90e8\u7f72\u3001LangChain \u6846\u67b6\u96c6\u6210\u7b49\uff1b<\/li><li>\u5f00\u6e90 LLM \u7684\u5168\u91cf\u5fae\u8c03\u3001\u9ad8\u6548\u5fae\u8c03\u65b9\u6cd5\uff0c\u5305\u62ec\u5206\u5e03\u5f0f\u5168\u91cf\u5fae\u8c03\u3001LoRA\u3001ptuning \u7b49\u3002<\/li><\/ol>\n\n\n\n<h2>\u73af\u5883\u914d\u7f6e<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E7%8E%AF%E5%A2%83%E9%85%8D%E7%BD%AE\"><\/a><\/h2>\n\n\n\n<p>\u5728\u5b8c\u6210\u57fa\u672c\u73af\u5883\u914d\u7f6e\u548c\u672c\u5730\u6a21\u578b\u90e8\u7f72\u7684\u60c5\u51b5\u4e0b\uff0c\u4f60\u8fd8\u9700\u8981\u5b89\u88c5\u4e00\u4e9b\u7b2c\u4e09\u65b9\u5e93\uff0c\u53ef\u4ee5\u4f7f\u7528\u4ee5\u4e0b\u547d\u4ee4\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">python -m pip install --upgrade pip\n# \u66f4\u6362 pypi \u6e90\u52a0\u901f\u5e93\u7684\u5b89\u88c5\npip config set global.index-url https:\/\/pypi.tuna.tsinghua.edu.cn\/simple\n\npip install modelscope==1.18.0\npip install transformers==4.44.2\npip install streamlit==1.24.0\npip install sentencepiece==0.2.0\npip install accelerate==0.34.2\npip install datasets==2.20.0\npip install peft==0.11.1<\/pre>\n\n\n\n<h2>\u6a21\u578b\u4e0b\u8f7d<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E6%A8%A1%E5%9E%8B%E4%B8%8B%E8%BD%BD\"><\/a><\/h2>\n\n\n\n<p>\u4f7f\u7528 modelscope \u4e2d\u7684 snapshot_download \u51fd\u6570\u4e0b\u8f7d\u6a21\u578b\uff0c\u7b2c\u4e00\u4e2a\u53c2\u6570\u4e3a\u6a21\u578b\u540d\u79f0\uff0c\u53c2\u6570 cache_dir \u4e3a\u6a21\u578b\u7684\u4e0b\u8f7d\u8def\u5f84\u3002<\/p>\n\n\n\n<p>\u5728 \/root\/autodl-tmp \u8def\u5f84\u4e0b\u65b0\u5efa model_download.py \u6587\u4ef6\u5e76\u5728\u5176\u4e2d\u8f93\u5165\u4ee5\u4e0b\u5185\u5bb9\uff0c\u7c98\u8d34\u4ee3\u7801\u540e\u8bf7\u53ca\u65f6\u4fdd\u5b58\u6587\u4ef6\uff0c\u5982\u4e0b\u56fe\u6240\u793a\u3002\u5e76\u8fd0\u884c&nbsp;<code>python \/root\/autodl-tmp\/model_download.py<\/code>&nbsp;\u6267\u884c\u4e0b\u8f7d\uff0c\u6a21\u578b\u5927\u5c0f\u4e3a 15GB\uff0c\u4e0b\u8f7d\u6a21\u578b\u5927\u6982\u9700\u8981 5 \u5206\u949f\u3002<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">import torch\nfrom modelscope import snapshot_download, AutoModel, AutoTokenizer\nimport os\nmodel_dir = snapshot_download('qwen\/Qwen2.5-7B-Instruct', cache_dir='\/root\/autodl-tmp', revision='master')<\/pre>\n\n\n\n<h2>\u6307\u4ee4\u96c6\u6784\u5efa<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E6%8C%87%E4%BB%A4%E9%9B%86%E6%9E%84%E5%BB%BA\"><\/a><\/h2>\n\n\n\n<p>LLM \u7684\u5fae\u8c03\u4e00\u822c\u6307\u6307\u4ee4\u5fae\u8c03\u8fc7\u7a0b\u3002\u6240\u8c13\u6307\u4ee4\u5fae\u8c03\uff0c\u662f\u8bf4\u6211\u4eec\u4f7f\u7528\u7684\u5fae\u8c03\u6570\u636e\u5f62\u5982\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">{\n  \"instruction\": \"\u56de\u7b54\u4ee5\u4e0b\u7528\u6237\u95ee\u9898\uff0c\u4ec5\u8f93\u51fa\u7b54\u6848\u3002\",\n  \"input\": \"1+1\u7b49\u4e8e\u51e0?\",\n  \"output\": \"2\"\n}<\/pre>\n\n\n\n<p>\u5176\u4e2d\uff0c<code>instruction<\/code>&nbsp;\u662f\u7528\u6237\u6307\u4ee4\uff0c\u544a\u77e5\u6a21\u578b\u5176\u9700\u8981\u5b8c\u6210\u7684\u4efb\u52a1\uff1b<code>input<\/code>&nbsp;\u662f\u7528\u6237\u8f93\u5165\uff0c\u662f\u5b8c\u6210\u7528\u6237\u6307\u4ee4\u6240\u5fc5\u987b\u7684\u8f93\u5165\u5185\u5bb9\uff1b<code>output<\/code>&nbsp;\u662f\u6a21\u578b\u5e94\u8be5\u7ed9\u51fa\u7684\u8f93\u51fa\u3002<\/p>\n\n\n\n<p>\u5373\u6211\u4eec\u7684\u6838\u5fc3\u8bad\u7ec3\u76ee\u6807\u662f\u8ba9\u6a21\u578b\u5177\u6709\u7406\u89e3\u5e76\u9075\u5faa\u7528\u6237\u6307\u4ee4\u7684\u80fd\u529b\u3002\u56e0\u6b64\uff0c\u5728\u6307\u4ee4\u96c6\u6784\u5efa\u65f6\uff0c\u6211\u4eec\u5e94\u9488\u5bf9\u6211\u4eec\u7684\u76ee\u6807\u4efb\u52a1\uff0c\u9488\u5bf9\u6027\u6784\u5efa\u4efb\u52a1\u6307\u4ee4\u96c6\u3002\u4f8b\u5982\uff0c\u5728\u672c\u8282\u6211\u4eec\u4f7f\u7528\u7531\u7b14\u8005\u5408\u4f5c\u5f00\u6e90\u7684&nbsp;<a href=\"https:\/\/github.com\/KMnO4-zx\/huanhuan-chat\">Chat-\u7504\u5b1b<\/a>&nbsp;\u9879\u76ee\u4f5c\u4e3a\u793a\u4f8b\uff0c\u6211\u4eec\u7684\u76ee\u6807\u662f\u6784\u5efa\u4e00\u4e2a\u80fd\u591f\u6a21\u62df\u7504\u5b1b\u5bf9\u8bdd\u98ce\u683c\u7684\u4e2a\u6027\u5316 LLM\uff0c\u56e0\u6b64\u6211\u4eec\u6784\u9020\u7684\u6307\u4ee4\u5f62\u5982\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">{\n  \"instruction\": \"\u4f60\u662f\u8c01\uff1f\",\n  \"input\": \"\",\n  \"output\": \"\u5bb6\u7236\u662f\u5927\u7406\u5bfa\u5c11\u537f\u7504\u8fdc\u9053\u3002\"\n}<\/pre>\n\n\n\n<p>\u6211\u4eec\u6240\u6784\u9020\u7684\u5168\u90e8\u6307\u4ee4\u6570\u636e\u96c6\u5728\u6839\u76ee\u5f55\u4e0b\u3002<\/p>\n\n\n\n<h2>\u6570\u636e\u683c\u5f0f\u5316<\/h2>\n\n\n\n<p><a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E6%95%B0%E6%8D%AE%E6%A0%BC%E5%BC%8F%E5%8C%96\"><\/a><\/p>\n\n\n\n<p><code>Lora<\/code>&nbsp;\u8bad\u7ec3\u7684\u6570\u636e\u662f\u9700\u8981\u7ecf\u8fc7\u683c\u5f0f\u5316\u3001\u7f16\u7801\u4e4b\u540e\u518d\u8f93\u5165\u7ed9\u6a21\u578b\u8fdb\u884c\u8bad\u7ec3\u7684\uff0c\u5982\u679c\u662f\u719f\u6089&nbsp;<code>Pytorch<\/code>&nbsp;\u6a21\u578b\u8bad\u7ec3\u6d41\u7a0b\u7684\u540c\u5b66\u4f1a\u77e5\u9053\uff0c\u6211\u4eec\u4e00\u822c\u9700\u8981\u5c06\u8f93\u5165\u6587\u672c\u7f16\u7801\u4e3a input_ids\uff0c\u5c06\u8f93\u51fa\u6587\u672c\u7f16\u7801\u4e3a&nbsp;<code>labels<\/code>\uff0c\u7f16\u7801\u4e4b\u540e\u7684\u7ed3\u679c\u90fd\u662f\u591a\u7ef4\u7684\u5411\u91cf\u3002\u6211\u4eec\u9996\u5148\u5b9a\u4e49\u4e00\u4e2a\u9884\u5904\u7406\u51fd\u6570\uff0c\u8fd9\u4e2a\u51fd\u6570\u7528\u4e8e\u5bf9\u6bcf\u4e00\u4e2a\u6837\u672c\uff0c\u7f16\u7801\u5176\u8f93\u5165\u3001\u8f93\u51fa\u6587\u672c\u5e76\u8fd4\u56de\u4e00\u4e2a\u7f16\u7801\u540e\u7684\u5b57\u5178\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">def process_func(example):\n    MAX_LENGTH = 384    # Llama\u5206\u8bcd\u5668\u4f1a\u5c06\u4e00\u4e2a\u4e2d\u6587\u5b57\u5207\u5206\u4e3a\u591a\u4e2atoken\uff0c\u56e0\u6b64\u9700\u8981\u653e\u5f00\u4e00\u4e9b\u6700\u5927\u957f\u5ea6\uff0c\u4fdd\u8bc1\u6570\u636e\u7684\u5b8c\u6574\u6027\n    input_ids, attention_mask, labels = [], [], []\n    instruction = tokenizer(f\"&lt;|im_start|&gt;system\\n\u73b0\u5728\u4f60\u8981\u626e\u6f14\u7687\u5e1d\u8eab\u8fb9\u7684\u5973\u4eba--\u7504\u5b1b&lt;|im_end|&gt;\\n&lt;|im_start|&gt;user\\n{example['instruction'] + example['input']}&lt;|im_end|&gt;\\n&lt;|im_start|&gt;assistant\\n\", add_special_tokens=False)  # add_special_tokens \u4e0d\u5728\u5f00\u5934\u52a0 special_tokens\n    response = tokenizer(f\"{example['output']}\", add_special_tokens=False)\n    input_ids = instruction[\"input_ids\"] + response[\"input_ids\"] + [tokenizer.pad_token_id]\n    attention_mask = instruction[\"attention_mask\"] + response[\"attention_mask\"] + [1]  # \u56e0\u4e3aeos token\u54b1\u4eec\u4e5f\u662f\u8981\u5173\u6ce8\u7684\u6240\u4ee5 \u8865\u5145\u4e3a1\n    labels = [-100] * len(instruction[\"input_ids\"]) + response[\"input_ids\"] + [tokenizer.pad_token_id]\n    if len(input_ids) &gt; MAX_LENGTH:  # \u505a\u4e00\u4e2a\u622a\u65ad\n        input_ids = input_ids[:MAX_LENGTH]\n        attention_mask = attention_mask[:MAX_LENGTH]\n        labels = labels[:MAX_LENGTH]\n    return {\n        \"input_ids\": input_ids,\n        \"attention_mask\": attention_mask,\n        \"labels\": labels\n    }<\/pre>\n\n\n\n<p><code>Qwen2<\/code>&nbsp;\u91c7\u7528\u7684&nbsp;<code>Prompt Template<\/code>\u683c\u5f0f\u5982\u4e0b\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-code\"><code>&lt;|im_start|&gt;system\nYou are a helpful assistant.&lt;|im_end|&gt;\n&lt;|im_start|&gt;user\n\u4f60\u662f\u8c01\uff1f&lt;|im_end|&gt;\n&lt;|im_start|&gt;assistant\n\u6211\u662f\u4e00\u4e2a\u6709\u7528\u7684\u52a9\u624b\u3002&lt;|im_end|&gt;<\/code><\/pre>\n\n\n\n<h2>\u52a0\u8f7d tokenizer \u548c\u534a\u7cbe\u5ea6\u6a21\u578b<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E5%8A%A0%E8%BD%BD-tokenizer-%E5%92%8C%E5%8D%8A%E7%B2%BE%E5%BA%A6%E6%A8%A1%E5%9E%8B\"><\/a><\/h2>\n\n\n\n<p>\u6a21\u578b\u4ee5\u534a\u7cbe\u5ea6\u5f62\u5f0f\u52a0\u8f7d\uff0c\u5982\u679c\u4f60\u7684\u663e\u5361\u6bd4\u8f83\u65b0\u7684\u8bdd\uff0c\u53ef\u4ee5\u7528&nbsp;<code>torch.bfolat<\/code>\u5f62\u5f0f\u52a0\u8f7d\u3002\u5bf9\u4e8e\u81ea\u5b9a\u4e49\u7684\u6a21\u578b\u4e00\u5b9a\u8981\u6307\u5b9a&nbsp;<code>trust_remote_code<\/code>\u53c2\u6570\u4e3a&nbsp;<code>True<\/code>\u3002<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">tokenizer = AutoTokenizer.from_pretrained('\/root\/autodl-tmp\/qwen\/Qwen2.5-7B-Instruct\/', use_fast=False, trust_remote_code=True)\n\nmodel = AutoModelForCausalLM.from_pretrained('\/root\/autodl-tmp\/qwen\/Qwen2.5-7B-Instruct\/', device_map=\"auto\",torch_dtype=torch.bfloat16)<\/pre>\n\n\n\n<h2>\u5b9a\u4e49 LoraConfig<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E5%AE%9A%E4%B9%89-loraconfig\"><\/a><\/h2>\n\n\n\n<p><code>LoraConfig<\/code>\u8fd9\u4e2a\u7c7b\u4e2d\u53ef\u4ee5\u8bbe\u7f6e\u5f88\u591a\u53c2\u6570\uff0c\u4f46\u4e3b\u8981\u7684\u53c2\u6570\u6ca1\u591a\u5c11\uff0c\u7b80\u5355\u8bb2\u4e00\u8bb2\uff0c\u611f\u5174\u8da3\u7684\u540c\u5b66\u53ef\u4ee5\u76f4\u63a5\u770b\u6e90\u7801\u3002<\/p>\n\n\n\n<ul><li><code>task_type<\/code>\uff1a\u6a21\u578b\u7c7b\u578b<\/li><li><code>target_modules<\/code>\uff1a\u9700\u8981\u8bad\u7ec3\u7684\u6a21\u578b\u5c42\u7684\u540d\u5b57\uff0c\u4e3b\u8981\u5c31\u662f&nbsp;<code>attention<\/code>\u90e8\u5206\u7684\u5c42\uff0c\u4e0d\u540c\u7684\u6a21\u578b\u5bf9\u5e94\u7684\u5c42\u7684\u540d\u5b57\u4e0d\u540c\uff0c\u53ef\u4ee5\u4f20\u5165\u6570\u7ec4\uff0c\u4e5f\u53ef\u4ee5\u5b57\u7b26\u4e32\uff0c\u4e5f\u53ef\u4ee5\u6b63\u5219\u8868\u8fbe\u5f0f\u3002<\/li><li><code>r<\/code>\uff1a<code>lora<\/code>\u7684\u79e9\uff0c\u5177\u4f53\u53ef\u4ee5\u770b&nbsp;<code>Lora<\/code>\u539f\u7406<\/li><li><code>lora_alpha<\/code>\uff1a<code>Lora alaph<\/code>\uff0c\u5177\u4f53\u4f5c\u7528\u53c2\u89c1&nbsp;<code>Lora<\/code>&nbsp;\u539f\u7406<\/li><\/ul>\n\n\n\n<p><code>Lora<\/code>\u7684\u7f29\u653e\u662f\u5565\u561e\uff1f\u5f53\u7136\u4e0d\u662f&nbsp;<code>r<\/code>\uff08\u79e9\uff09\uff0c\u8fd9\u4e2a\u7f29\u653e\u5c31\u662f&nbsp;<code>lora_alpha\/r<\/code>, \u5728\u8fd9\u4e2a&nbsp;<code>LoraConfig<\/code>\u4e2d\u7f29\u653e\u5c31\u662f 4 \u500d\u3002<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">config = LoraConfig(\n    task_type=TaskType.CAUSAL_LM,\n    target_modules=[\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\", \"gate_proj\", \"up_proj\", \"down_proj\"],\n    inference_mode=False, # \u8bad\u7ec3\u6a21\u5f0f\n    r=8, # Lora \u79e9\n    lora_alpha=32, # Lora alaph\uff0c\u5177\u4f53\u4f5c\u7528\u53c2\u89c1 Lora \u539f\u7406\n    lora_dropout=0.1# Dropout \u6bd4\u4f8b\n)<\/pre>\n\n\n\n<h2>\u81ea\u5b9a\u4e49 TrainingArguments \u53c2\u6570<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E8%87%AA%E5%AE%9A%E4%B9%89-trainingarguments-%E5%8F%82%E6%95%B0\"><\/a><\/h2>\n\n\n\n<p><code>TrainingArguments<\/code>\u8fd9\u4e2a\u7c7b\u7684\u6e90\u7801\u4e5f\u4ecb\u7ecd\u4e86\u6bcf\u4e2a\u53c2\u6570\u7684\u5177\u4f53\u4f5c\u7528\uff0c\u5f53\u7136\u5927\u5bb6\u53ef\u4ee5\u6765\u81ea\u884c\u63a2\u7d22\uff0c\u8fd9\u91cc\u5c31\u7b80\u5355\u8bf4\u51e0\u4e2a\u5e38\u7528\u7684\u3002<\/p>\n\n\n\n<ul><li><code>output_dir<\/code>\uff1a\u6a21\u578b\u7684\u8f93\u51fa\u8def\u5f84<\/li><li><code>per_device_train_batch_size<\/code>\uff1a\u987e\u540d\u601d\u4e49&nbsp;<code>batch_size<\/code><\/li><li><code>gradient_accumulation_steps<\/code>: \u68af\u5ea6\u7d2f\u52a0\uff0c\u5982\u679c\u4f60\u7684\u663e\u5b58\u6bd4\u8f83\u5c0f\uff0c\u90a3\u53ef\u4ee5\u628a&nbsp;<code>batch_size<\/code>&nbsp;\u8bbe\u7f6e\u5c0f\u4e00\u70b9\uff0c\u68af\u5ea6\u7d2f\u52a0\u589e\u5927\u4e00\u4e9b\u3002<\/li><li><code>logging_steps<\/code>\uff1a\u591a\u5c11\u6b65\uff0c\u8f93\u51fa\u4e00\u6b21&nbsp;<code>log<\/code><\/li><li><code>num_train_epochs<\/code>\uff1a\u987e\u540d\u601d\u4e49&nbsp;<code>epoch<\/code><\/li><li><code>gradient_checkpointing<\/code>\uff1a\u68af\u5ea6\u68c0\u67e5\uff0c\u8fd9\u4e2a\u4e00\u65e6\u5f00\u542f\uff0c\u6a21\u578b\u5c31\u5fc5\u987b\u6267\u884c&nbsp;<code>model.enable_input_require_grads()<\/code>\uff0c\u8fd9\u4e2a\u539f\u7406\u5927\u5bb6\u53ef\u4ee5\u81ea\u884c\u63a2\u7d22\uff0c\u8fd9\u91cc\u5c31\u4e0d\u7ec6\u8bf4\u4e86\u3002<\/li><\/ul>\n\n\n\n<pre class=\"wp-block-preformatted\">args = TrainingArguments(\n    output_dir=\".\/output\/Qwen2.5_instruct_lora\",\n    per_device_train_batch_size=4,\n    gradient_accumulation_steps=4,\n    logging_steps=10,\n    num_train_epochs=3,\n    save_steps=100,\n    learning_rate=1e-4,\n    save_on_each_node=True,\n    gradient_checkpointing=True\n)<\/pre>\n\n\n\n<h2>\u4f7f\u7528 Trainer \u8bad\u7ec3<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E4%BD%BF%E7%94%A8-trainer-%E8%AE%AD%E7%BB%83\"><\/a><\/h2>\n\n\n\n<pre class=\"wp-block-preformatted\">trainer = Trainer(\n    model=model,\n    args=args,\n    train_dataset=tokenized_id,\n    data_collator=DataCollatorForSeq2Seq(tokenizer=tokenizer, padding=True),\n)\ntrainer.train()<\/pre>\n\n\n\n<h2>\u52a0\u8f7d lora \u6743\u91cd\u63a8\u7406<a href=\"https:\/\/github.com\/datawhalechina\/self-llm\/blob\/master\/models\/Qwen2.5\/05-Qwen2.5-7B-Instruct%20Lora%20%E5%BE%AE%E8%B0%83.md#%E5%8A%A0%E8%BD%BD-lora-%E6%9D%83%E9%87%8D%E6%8E%A8%E7%90%86\"><\/a><\/h2>\n\n\n\n<p>\u8bad\u7ec3\u597d\u4e86\u4e4b\u540e\u53ef\u4ee5\u4f7f\u7528\u5982\u4e0b\u65b9\u5f0f\u52a0\u8f7d&nbsp;<code>lora<\/code>\u6743\u91cd\u8fdb\u884c\u63a8\u7406\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">from transformers import AutoModelForCausalLM, AutoTokenizer\nimport torch\nfrom peft import PeftModel\n\nmodel_path = '\/root\/autodl-tmp\/qwen\/Qwen2.5-7B-Instruct\/'\nlora_path = 'lora_path'\n\n# \u52a0\u8f7dtokenizer\ntokenizer = AutoTokenizer.from_pretrained(model_path)\n\n# \u52a0\u8f7d\u6a21\u578b\nmodel = AutoModelForCausalLM.from_pretrained(model_path, device_map=\"auto\",torch_dtype=torch.bfloat16)\n\n# \u52a0\u8f7dlora\u6743\u91cd\nmodel = PeftModel.from_pretrained(model, model_id=lora_path, config=config)\n\nprompt = \"\u4f60\u662f\u8c01\uff1f\"\nmessages = [\n    {\"role\": \"system\", \"content\": \"\u73b0\u5728\u4f60\u8981\u626e\u6f14\u7687\u5e1d\u8eab\u8fb9\u7684\u5973\u4eba--\u7504\u5b1b\"},\n    {\"role\": \"user\", \"content\": prompt}\n]\n\ntext = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)\n\nmodel_inputs = tokenizer([text], return_tensors=\"pt\").to('cuda')\n\ngenerated_ids = model.generate(\n    model_inputs.input_ids,\n    max_new_tokens=512\n)\ngenerated_ids = [\n    output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)\n]\n\nresponse = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]\n\nprint(response)<\/pre>\n","protected":false},"excerpt":{"rendered":"<p>Github\uff1ahttps:\/\/github.com\/datawhalechina\/self-llm\/tree\/ &hellip; <a href=\"http:\/\/139.9.1.231\/index.php\/2025\/01\/21\/qwen2-lora\/\" class=\"more-link\">\u7ee7\u7eed\u9605\u8bfb<span class=\"screen-reader-text\">Qwen2 Lora LLM\u5fae\u8c03\u8bad\u7ec3\u6559\u7a0b<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[21,4,38],"tags":[],"_links":{"self":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/24120"}],"collection":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/comments?post=24120"}],"version-history":[{"count":18,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/24120\/revisions"}],"predecessor-version":[{"id":24139,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/24120\/revisions\/24139"}],"wp:attachment":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/media?parent=24120"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/categories?post=24120"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/tags?post=24120"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}