mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 18:29:18 +08:00
Add eval_mm dir
This commit is contained in:
29
eval_mm/vlmevalkit/vlmeval/evaluate/misc.py
Normal file
29
eval_mm/vlmevalkit/vlmeval/evaluate/misc.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import os
|
||||
from vlmeval.api import OpenAIWrapper, OpenAIWrapperInternal
|
||||
from vlmeval.smp import load_env
|
||||
|
||||
INTERNAL = os.environ.get('INTERNAL', 0)
|
||||
|
||||
|
||||
def build_judge(**kwargs):
|
||||
model = kwargs.pop('model', None)
|
||||
load_env()
|
||||
LOCAL_LLM = os.environ.get('LOCAL_LLM', None)
|
||||
if LOCAL_LLM is None:
|
||||
model_map = {
|
||||
'gpt-4-turbo': 'gpt-4-1106-preview',
|
||||
'gpt-4-0613': 'gpt-4-0613',
|
||||
'gpt-4-0314': 'gpt-4-0314',
|
||||
'gpt-4-0125': 'gpt-4-0125-preview',
|
||||
'chatgpt-1106': 'gpt-3.5-turbo-1106',
|
||||
'chatgpt-0613': 'gpt-3.5-turbo-0613',
|
||||
'chatgpt-0125': 'gpt-3.5-turbo-0125'
|
||||
}
|
||||
model_version = model_map[model]
|
||||
else:
|
||||
model_version = LOCAL_LLM
|
||||
if INTERNAL:
|
||||
model = OpenAIWrapperInternal(model_version, **kwargs)
|
||||
else:
|
||||
model = OpenAIWrapper(model_version, **kwargs)
|
||||
return model
|
||||
Reference in New Issue
Block a user