WeClone icon indicating copy to clipboard operation
WeClone copied to clipboard

怎么配置8位量化训练14B模型的settings.jsonc

Open XxiaoZzz opened this issue 7 months ago • 1 comments

{
"version": "0.2.2",
"common_args": {
"model_name_or_path": "./Qwen2.5-14B-Instruct",
"adapter_name_or_path": "./model_output",
"template": "qwen",
"default_system": "请你扮演一名人类,不要说自己是人工智能",
"finetuning_type": "lora",
"trust_remote_code": true,
"load_in_8bit": true,
"quantization_config": {
"load_in_8bit": true,
"llm_int8_threshold": 6.0
}
},
"make_dataset_args": {
"include_type": [
"文本"
],
"blocked_words": [
"例如 姓名",
"例如 密码",
"//....."
],
"single_combine_strategy": "time_window",
"qa_match_strategy": "time_window",
"single_combine_time_window": 2,
"qa_match_time_window": 5,
"combine_msg_max_length": 256,
"prompt_with_history": false,
"clean_dataset": {
"enable_clean": true,
"clean_strategy": "llm",
"llm": {
"accept_score": 2,
"max_prompt_length": 3000
}
}
},
"train_pt_args": {
"stage": "pt",
"dataset": "wechat-pt",
"dataset_dir": "./dataset/res_csv/pt",
"lora_target": "q_proj,k_proj,v_proj,o_proj,gate_proj,up_proj,down_proj",
"lora_rank": 2,
"lora_dropout": 0.1,
"output_dir": "model_output",
"overwrite_cache": true,
"per_device_train_batch_size": 1,
"gradient_accumulation_steps": 8,
"lr_scheduler_type": "cosine",
"logging_steps": 10,
"save_steps": 1000,
"learning_rate": 0.001,
"num_train_epochs": 30,
"plot_loss": true,
"fp16": true
},
"train_sft_args": {
"stage": "sft",
"dataset": "wechat-sft",
"dataset_dir": "./dataset/res_csv/sft",
"use_fast_tokenizer": true,
"lora_target": "q_proj,k_proj,v_proj,o_proj,gate_proj,up_proj,down_proj",
"lora_rank": 4,
"lora_dropout": 0.4,
"weight_decay": 0.1,
"overwrite_cache": true,
"per_device_train_batch_size": 1,
"gradient_accumulation_steps": 8,
"lr_scheduler_type": "cosine",
"cutoff_len": 256,
"logging_steps": 10,
"save_steps": 100,
"learning_rate": 1e-4,
"warmup_ratio": 0.1,
"num_train_epochs": 3,
"plot_loss": true,
"fp16": true,
"flash_attn": "fa2"
},
"infer_args": {
"repetition_penalty": 1.2,
"temperature": 0.5,
"max_length": 50,
"top_p": 0.65
}
} }这个是我的配置文件内容,我已经更新LLaMA Factory和依赖版本。尝试了很多方法,一直出现: raise ValueError(f"Some keys are not used by the HfArgumentParser: {sorted(unused_keys)}") ValueError: Some keys are not used by the HfArgumentParser: ['load_in_8bit', 'quantization_config'],求大神教教如何解决

XxiaoZzz avatar May 18 '25 15:05 XxiaoZzz

参考这篇文章https://blog.051088.xyz/2025/05/14/WeClone-%E7%94%A8%E5%BE%AE%E4%BF%A1%E8%81%8A%E5%A4%A9%E8%AE%B0%E5%BD%95%E6%89%93%E9%80%A0%E8%87%AA%E5%B7%B1%E7%9A%84AI%E6%95%B0%E5%AD%97%E5%88%86%E8%BA%AB/

xming521 avatar May 19 '25 00:05 xming521