mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-04 17:59:18 +08:00
Update finetune_lora.sh
This commit is contained in:
@@ -37,7 +37,7 @@ torchrun $DISTRIBUTED_ARGS finetune.py \
|
||||
--tune_vision true \
|
||||
--tune_llm false \
|
||||
--use_lora true \
|
||||
--lora_target_modules "llm\..*layers\.\d+\.self_attn\.(q_proj|k_proj)" \
|
||||
--lora_target_modules "llm\..*layers\.\d+\.self_attn\.(q_proj|k_proj|v_proj|o_proj)" \
|
||||
--model_max_length 2048 \
|
||||
--max_slice_nums 9 \
|
||||
--max_steps 10000 \
|
||||
|
||||
Reference in New Issue
Block a user