Skip to content

Commit 1c89b77

Browse files
xiaomin-Dmickqianzhaochenyang20
authored andcommitted
Support InternVL3 (sgl-project#5350)
Co-authored-by: Mick <[email protected]> Co-authored-by: Chayenne <[email protected]>
1 parent dec94e9 commit 1c89b77

File tree

12 files changed

+1728
-9
lines changed

12 files changed

+1728
-9
lines changed

python/sglang/lang/chat_template.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -270,6 +270,29 @@ def get_chat_template_by_model_path(model_path):
270270
)
271271
)
272272

273+
register_chat_template(
274+
ChatTemplate(
275+
name="janus",
276+
default_system_prompt=None,
277+
role_prefix_and_suffix={
278+
"system": (
279+
"",
280+
"",
281+
),
282+
"user": (
283+
"<|User|>",
284+
"",
285+
),
286+
"assistant": (
287+
"<|Assistant|>",
288+
"<|end▁of▁sentence|>",
289+
),
290+
},
291+
stop_str=("<|end▁of▁sentence|>",),
292+
image_token="<image_placeholder>\n",
293+
)
294+
)
295+
273296
# The difference between "llama-3-instruct-llava" and "llama-3-instruct" is that llava uses a different image_token.
274297
register_chat_template(
275298
ChatTemplate(
@@ -395,6 +418,20 @@ def get_chat_template_by_model_path(model_path):
395418
)
396419
)
397420

421+
# Adapted from https://huggingface.co/OpenGVLab/InternVL2-4B/blob/main/modeling_intern_vit.py
422+
register_chat_template(
423+
ChatTemplate(
424+
name="internvl-2-5",
425+
default_system_prompt="你是书生·万象,英文名是InternVL,是由上海人工智能实验室、清华大学及多家合作单位联合开发的多模态大语言模型。",
426+
role_prefix_and_suffix={
427+
"system": ("<|im_start|>system\n", "<|im_end|>\n"),
428+
"user": ("<|im_start|>user\n", "<|im_end|>\n"),
429+
"assistant": ("<|im_start|>assistant\n", "<|im_end|>\n"),
430+
},
431+
stop_str=["<|im_end|>", "<|action_end|>"],
432+
)
433+
)
434+
398435
register_chat_template(
399436
ChatTemplate(
400437
name="granite-3-instruct",
@@ -565,6 +602,13 @@ def match_gemma3_instruct(model_path: str):
565602
return get_chat_template("gemma-it")
566603

567604

605+
@register_chat_template_matching_function
606+
def match_internvl_chat(model_path: str):
607+
model_path = model_path.lower()
608+
if "internvl" in model_path:
609+
return get_chat_template("internvl-2-5")
610+
611+
568612
if __name__ == "__main__":
569613
messages = [
570614
{"role": "system", "content": None}, # None means default

0 commit comments

Comments
 (0)