mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 10:19:18 +08:00
Update finetune_lora.sh
This commit is contained in:
@@ -47,7 +47,7 @@ torchrun $DISTRIBUTED_ARGS finetune.py \
|
|||||||
--logging_strategy "steps" \
|
--logging_strategy "steps" \
|
||||||
--per_device_train_batch_size 2 \
|
--per_device_train_batch_size 2 \
|
||||||
--per_device_eval_batch_size 1 \
|
--per_device_eval_batch_size 1 \
|
||||||
--gradient_accumulation_steps 1 \
|
--gradient_accumulation_steps 8 \
|
||||||
--evaluation_strategy "steps" \
|
--evaluation_strategy "steps" \
|
||||||
--save_strategy "steps" \
|
--save_strategy "steps" \
|
||||||
--save_steps 1000 \
|
--save_steps 1000 \
|
||||||
|
|||||||
Reference in New Issue
Block a user