mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 18:29:18 +08:00
update lora finetune inference bug (#224)
This commit is contained in:
@@ -40,7 +40,6 @@ torchrun $DISTRIBUTED_ARGS finetune.py \
|
||||
--lora_target_modules "llm\..*layers\.\d+\.self_attn\.(q_proj|k_proj)" \
|
||||
--model_max_length 2048 \
|
||||
--max_slice_nums 9 \
|
||||
--scale_resolution 448 \
|
||||
--max_steps 10000 \
|
||||
--eval_steps 1000 \
|
||||
--output_dir output/output_minicpmv2_lora \
|
||||
|
||||
Reference in New Issue
Block a user