mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 18:29:18 +08:00
Modify eval_mm for MiniCPM-o 2.6
This commit is contained in:
@@ -1,11 +1,11 @@
|
||||
import os
|
||||
from ...api import OpenAIWrapper
|
||||
from ...smp import load_env
|
||||
|
||||
INTERNAL = os.environ.get('INTERNAL', 0)
|
||||
|
||||
|
||||
def build_judge(**kwargs):
|
||||
from ...api import OpenAIWrapper, SiliconFlowAPI
|
||||
model = kwargs.pop('model', None)
|
||||
kwargs.pop('nproc', None)
|
||||
load_env()
|
||||
@@ -19,12 +19,20 @@ def build_judge(**kwargs):
|
||||
'chatgpt-1106': 'gpt-3.5-turbo-1106',
|
||||
'chatgpt-0125': 'gpt-3.5-turbo-0125',
|
||||
'gpt-4o': 'gpt-4o-2024-05-13',
|
||||
'gpt-4o-0806': 'gpt-4o-2024-08-06',
|
||||
'gpt-4o-mini': 'gpt-4o-mini-2024-07-18',
|
||||
'qwen-7b': 'Qwen/Qwen2.5-7B-Instruct',
|
||||
'qwen-72b': 'Qwen/Qwen2.5-72B-Instruct',
|
||||
'deepseek': 'deepseek-ai/DeepSeek-V2.5',
|
||||
}
|
||||
model_version = model_map[model]
|
||||
else:
|
||||
model_version = LOCAL_LLM
|
||||
model = OpenAIWrapper(model_version, **kwargs)
|
||||
|
||||
if model in ['qwen-7b', 'qwen-72b', 'deepseek']:
|
||||
model = SiliconFlowAPI(model_version, **kwargs)
|
||||
else:
|
||||
model = OpenAIWrapper(model_version, **kwargs)
|
||||
return model
|
||||
|
||||
|
||||
@@ -32,7 +40,7 @@ DEBUG_MESSAGE = """
|
||||
To debug the OpenAI API, you can try the following scripts in python:
|
||||
```python
|
||||
from vlmeval.api import OpenAIWrapper
|
||||
model = OpenAIWrapper('gpt-4-1106-preview', verbose=True)
|
||||
model = OpenAIWrapper('gpt-4o', verbose=True)
|
||||
msgs = [dict(type='text', value='Hello!')]
|
||||
code, answer, resp = model.generate_inner(msgs)
|
||||
print(code, answer, resp)
|
||||
|
||||
Reference in New Issue
Block a user