Skip to content

Commit fac17ac

Browse files
authored
add function call parser for DeepSeek V3 (#5224)
1 parent 8b39274 commit fac17ac

File tree

4 files changed

+119
-1
lines changed

4 files changed

+119
-1
lines changed

docs/references/deepseek.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,6 +163,35 @@ When using FlashInfer MLA wrapper (`--attention-backend flashinfer`) with specul
163163

164164
See [Separate Reasoning](https://docs.sglang.ai/backend/separate_reasoning.html).
165165

166+
167+
### Function calling for DeepSeek Models
168+
169+
Add arguments `--tool-call-parser deepseekv3` to enable this feature. For example (running on 1 * H20 node):
170+
171+
```
172+
python3 -m sglang.launch_server --model deepseek-ai/DeepSeek-V3-0324 --tp 8 --port 30000 --host 0.0.0.0 --mem-fraction-static 0.9 --disable-cuda-graph --tool-call-parser deepseekv3
173+
```
174+
175+
Sample Request:
176+
177+
```
178+
curl "http://127.0.0.1:30000/v1/chat/completions" \
179+
-H "Content-Type: application/json" \
180+
-d '{"temperature": 0, "max_tokens": 100, "model": "deepseek-ai/DeepSeek-V3-0324", "tools": [{"type": "function", "function": {"name": "query_weather", "description": "Get weather of an city, the user should supply a city first", "parameters": {"type": "object", "properties": {"city": {"type": "string", "description": "The city, e.g. Beijing"}}, "required": ["city"]}}}], "messages": [{"role": "user", "content": "Hows the weather like in Qingdao today"}]}'
181+
```
182+
183+
Expected Response
184+
185+
```
186+
{"id": "62af80528930423a82c806651ec66e7c", "object": "chat.completion", "created": 1744431333, "model": "deepseek-ai/DeepSeek-V3-0324", "choices": [{"index": 0, "message": {"role": "assistant", "content": null, "reasoning_content": null, "tool_calls": [{"id": "0", "type": "function", "function": {"name": "query_weather", "arguments": "{\\"city\\": \\"Guangzhou\\"}"}}]}, "logprobs": null, "finish_reason": "tool_calls", "matched_stop": null}], "usage": {"prompt_tokens": 118, "total_tokens": 140, "completion_tokens": 22, "prompt_tokens_details": null}}
187+
188+
```
189+
190+
Important Notes:
191+
1. Use a lower `"temperature"` value for better results.
192+
2. Currently, the function calling implementation for deepseek is incompatible with streaming requests.
193+
194+
166195
## FAQ
167196

168197
1. **Question**: What should I do if model loading takes too long and NCCL timeout occurs?

python/sglang/srt/function_call_parser.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
"<tool_call>",
2626
"<|python_tag|>",
2727
"[TOOL_CALLS]",
28+
"<|tool▁calls▁begin|>",
2829
]
2930

3031

@@ -477,6 +478,64 @@ def structure_info(self) -> _GetInfoFunc:
477478
)
478479

479480

481+
class DeepSeekV3Detector(BaseFormatDetector):
482+
"""
483+
Detector for DeepSeek models.
484+
Assumes function call format:
485+
'<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>get_current_weather\n```json\n{"location": "Tokyo"}\n```<|tool▁call▁end|>\n<|tool▁call▁begin|>function<|tool▁sep|>get_current_weather\n```json\n{"location": "Paris"}\n```<|tool▁call▁end|><|tool▁calls▁end|><|end▁of▁sentence|>
486+
"""
487+
488+
def __init__(self):
489+
super().__init__()
490+
self.bot_token = "<|tool▁calls▁begin|>"
491+
self.eot_token = "<|tool▁calls▁end|>"
492+
self.func_call_regex = r"<|tool▁call▁begin|>.*?<|tool▁call▁end|>"
493+
self.func_detail_regex = r"<|tool▁call▁begin|>(.*)<|tool▁sep|>(.*)\n```json\n(.*)\n```<|tool▁call▁end|>"
494+
495+
def has_tool_call(self, text: str) -> bool:
496+
"""Check if the text contains a deepseek format tool call."""
497+
return self.bot_token in text
498+
499+
def detect_and_parse(self, text: str, tools: List[Tool]) -> StreamingParseResult:
500+
"""
501+
One-time parsing: Detects and parses tool calls in the provided text.
502+
503+
:param text: The complete text to parse.
504+
:param tools: List of available tools.
505+
:return: ParseResult indicating success or failure, consumed text, leftover text, and parsed calls.
506+
"""
507+
idx = text.find(self.bot_token)
508+
normal_text = text[:idx].strip() if idx != -1 else text
509+
if self.bot_token not in text:
510+
return StreamingParseResult(normal_text=normal_text, calls=[])
511+
match_result_list = re.findall(self.func_call_regex, text, re.DOTALL)
512+
calls = []
513+
try:
514+
for match_result in match_result_list:
515+
# Get function name
516+
func_detail = re.search(self.func_detail_regex, match_result, re.DOTALL)
517+
func_name = func_detail.group(2)
518+
func_args = func_detail.group(3)
519+
func_args = json.loads(func_args)
520+
# construct match_result for parse_base_json
521+
match_result = {"name": func_name, "parameters": func_args}
522+
calls.extend(self.parse_base_json(match_result, tools))
523+
return StreamingParseResult(normal_text=normal_text, calls=calls)
524+
except Exception as e:
525+
logger.error(f"Error in detect_and_parse: {e}")
526+
# return the normal text if parsing fails
527+
return StreamingParseResult(normal_text=text)
528+
529+
def structure_info(self) -> _GetInfoFunc:
530+
return lambda name: StructureInfo(
531+
begin="<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>"
532+
+ name
533+
+ "\n```json\n",
534+
end="\n```<|tool▁call▁end|><|tool▁calls▁end|>",
535+
trigger="<|tool▁calls▁begin|>",
536+
)
537+
538+
480539
class MultiFormatParser:
481540
def __init__(self, detectors: List[BaseFormatDetector]):
482541
"""
@@ -543,6 +602,7 @@ class FunctionCallParser:
543602
"llama3": Llama32Detector,
544603
"qwen25": Qwen25Detector,
545604
"mistral": MistralDetector,
605+
"deepseekv3": DeepSeekV3Detector,
546606
}
547607

548608
def __init__(self, tools: List[Tool], tool_call_parser: str):

python/sglang/srt/openai_api/adapter.py

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -938,6 +938,35 @@ def v1_chat_generate_request(
938938

939939
if chat_template_name is None:
940940
openai_compatible_messages = []
941+
if (
942+
tools
943+
and tokenizer_manager.server_args.tool_call_parser == "deepseekv3"
944+
):
945+
# add function call prompt to deepseekv3
946+
openai_compatible_messages.append(
947+
{
948+
"role": "system",
949+
"content": """You are a helpful Assistant.
950+
## Tools
951+
### Function
952+
You have the following functions available:
953+
"""
954+
+ "".join(
955+
[
956+
f"""
957+
- `{tool['name']}`:
958+
```json
959+
{json.dumps(tool)}
960+
```
961+
"""
962+
for tool in tools
963+
]
964+
),
965+
}
966+
)
967+
# TODO fix the compatible issues with xgrammar
968+
strict_tag = None
969+
941970
for message in request.messages:
942971
if isinstance(message.content, str):
943972
openai_compatible_messages.append(

python/sglang/srt/server_args.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1087,7 +1087,7 @@ def add_cli_args(parser: argparse.ArgumentParser):
10871087
parser.add_argument(
10881088
"--tool-call-parser",
10891089
type=str,
1090-
choices=["qwen25", "mistral", "llama3"],
1090+
choices=["qwen25", "mistral", "llama3", "deepseekv3"],
10911091
default=ServerArgs.tool_call_parser,
10921092
help="Specify the parser for handling tool-call interactions. Options include: 'qwen25', 'mistral', and 'llama3'.",
10931093
)

0 commit comments

Comments
 (0)