-
Notifications
You must be signed in to change notification settings - Fork 147
/
__init__.py
80 lines (70 loc) · 2.39 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import importlib
import os
import sys
import hf_transfer
from loguru import logger
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
logger.remove()
logger.add(sys.stdout, level="WARNING")
AVAILABLE_MODELS = {
"auroracap": "AuroraCap",
"batch_gpt4": "BatchGPT4",
"claude": "Claude",
"cogvlm2": "CogVLM2",
"from_log": "FromLog",
"fuyu": "Fuyu",
"gemini_api": "GeminiAPI",
"gpt4v": "GPT4V",
"idefics2": "Idefics2",
"instructblip": "InstructBLIP",
"internvl": "InternVLChat",
"internvl2": "InternVL2",
"llama_vid": "LLaMAVid",
"llava": "Llava",
"llava_hf": "LlavaHf",
"llava_onevision": "Llava_OneVision",
"llava_onevision_moviechat": "Llava_OneVision_MovieChat",
"llava_sglang": "LlavaSglang",
"llava_vid": "LlavaVid",
"longva": "LongVA",
"mantis": "Mantis",
"minicpm_v": "MiniCPM_V",
"minimonkey": "MiniMonkey",
"moviechat": "MovieChat",
"mplug_owl_video": "mplug_Owl",
"phi3v": "Phi3v",
"qwen_vl": "Qwen_VL",
"qwen2_vl": "Qwen2_VL",
"qwen_vl_api": "Qwen_VL_API",
"reka": "Reka",
"srt_api": "SRT_API",
"tinyllava": "TinyLlava",
"videoChatGPT": "VideoChatGPT",
"video_llava": "VideoLLaVA",
"vila": "VILA",
"xcomposer2_4KHD": "XComposer2_4KHD",
"internvideo2": "InternVideo2",
"xcomposer2d5": "XComposer2D5",
"oryx": "Oryx",
"videochat2": "VideoChat2",
"llama_vision": "LlamaVision",
}
def get_model(model_name):
if model_name not in AVAILABLE_MODELS:
raise ValueError(f"Model {model_name} not found in available models.")
model_class = AVAILABLE_MODELS[model_name]
if "." not in model_class:
model_class = f"lmms_eval.models.{model_name}.{model_class}"
try:
model_module, model_class = model_class.rsplit(".", 1)
module = __import__(model_module, fromlist=[model_class])
return getattr(module, model_class)
except Exception as e:
logger.error(f"Failed to import {model_class} from {model_name}: {e}")
raise
if os.environ.get("LMMS_EVAL_PLUGINS", None):
# Allow specifying other packages to import models from
for plugin in os.environ["LMMS_EVAL_PLUGINS"].split(","):
m = importlib.import_module(f"{plugin}.models")
for model_name, model_class in getattr(m, "AVAILABLE_MODELS").items():
AVAILABLE_MODELS[model_name] = f"{plugin}.models.{model_name}.{model_class}"