mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 18:29:18 +08:00
Merge pull request #206 from ByeongkiJeong/patch-1
Update inference_on_multiple_gpus.md
This commit is contained in:
@@ -43,7 +43,7 @@ gpu_device_ids = [0, 1] # Define which gpu to use (now we have two GPUs, each ha
|
|||||||
no_split_module_classes = ["LlamaDecoderLayer"]
|
no_split_module_classes = ["LlamaDecoderLayer"]
|
||||||
|
|
||||||
max_memory = {
|
max_memory = {
|
||||||
device_id: memory for device_id in gpu_device_ids
|
device_id: max_memory_each_gpu for device_id in gpu_device_ids
|
||||||
}
|
}
|
||||||
|
|
||||||
config = AutoConfig.from_pretrained(
|
config = AutoConfig.from_pretrained(
|
||||||
|
|||||||
Reference in New Issue
Block a user