mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 18:29:18 +08:00
Update LoRA finetuning code (#154)
* update lora tuning * updata lora fine-tuning code * update finetuning lora code * lora code * lora finetuning code * updating lora finetuning code * update lora finetuning code * Update Lora finetuning code * Update LoRA finetuning code * Update LoRA finetuning code
This commit is contained in:
@@ -13,14 +13,10 @@ class CPMTrainer(Trainer):
|
||||
labels = inputs.pop("labels")
|
||||
else:
|
||||
labels = None
|
||||
|
||||
vllm_embedding, vision_hidden_states = self.model.get_vllm_embedding(
|
||||
inputs)
|
||||
|
||||
outputs = self.model.llm(
|
||||
inputs_embeds=vllm_embedding,
|
||||
use_cache=False,
|
||||
)
|
||||
if not self. args.use_lora:
|
||||
outputs = self.model(data = inputs, use_cache=False)
|
||||
else:
|
||||
outputs = self.model.base_model(data = inputs, use_cache=False)
|
||||
|
||||
if labels is not None:
|
||||
# Flatten the tokens
|
||||
|
||||
Reference in New Issue
Block a user