feat(model): Support internlm2.5 models (#1735)

This commit is contained in:
Fangyin Cheng 2024-07-18 15:17:10 +08:00 committed by GitHub
parent 083becde08
commit d389fddc2f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 23 additions and 0 deletions

View File

@ -171,6 +171,8 @@ LLM_MODEL_CONFIG = {
"internlm-7b": os.path.join(MODEL_PATH, "internlm-chat-7b"),
"internlm-7b-8k": os.path.join(MODEL_PATH, "internlm-chat-7b-8k"),
"internlm-20b": os.path.join(MODEL_PATH, "internlm-chat-20b"),
"internlm2_5-7b-chat": os.path.join(MODEL_PATH, "internlm2_5-7b-chat"),
"internlm2_5-7b-chat-1m": os.path.join(MODEL_PATH, "internlm2_5-7b-chat-1m"),
"codellama-7b": os.path.join(MODEL_PATH, "CodeLlama-7b-Instruct-hf"),
"codellama-7b-sql-sft": os.path.join(MODEL_PATH, "codellama-7b-sql-sft"),
"codellama-13b": os.path.join(MODEL_PATH, "CodeLlama-13b-Instruct-hf"),

View File

@ -581,6 +581,26 @@ class Codegeex4Adapter(GLM4Adapter):
return super().load(model_path, from_pretrained_kwargs)
class Internlm2Adapter(NewHFChatModelAdapter):
"""
https://huggingface.co/internlm/internlm2_5-7b-chat
"""
def do_match(self, lower_model_name_or_path: Optional[str] = None):
return (
lower_model_name_or_path
and "internlm2" in lower_model_name_or_path
and "chat" in lower_model_name_or_path
)
def load(self, model_path: str, from_pretrained_kwargs: dict):
if not from_pretrained_kwargs:
from_pretrained_kwargs = {}
if "trust_remote_code" not in from_pretrained_kwargs:
from_pretrained_kwargs["trust_remote_code"] = True
return super().load(model_path, from_pretrained_kwargs)
# The following code is used to register the model adapter
# The last registered model adapter is matched first
register_model_adapter(YiAdapter)
@ -602,3 +622,4 @@ register_model_adapter(OpenChatAdapter)
register_model_adapter(GLM4Adapter)
register_model_adapter(Codegeex4Adapter)
register_model_adapter(Qwen2Adapter)
register_model_adapter(Internlm2Adapter)