The parsers module provides utilities for parsing tool calls from model outputs and formatting chat templates.
Abstract base class for tool call parsers.
from rllm.parser import ToolParser
Methods
parse
Extract tool calls from model response.
tool_calls = parser.parse(model_response)
Raw model output containing tool calls.
List of parsed tool calls.
Get the tool prompt for the model.
prompt = parser.get_tool_prompt(tools_schema)
JSON schema for available tools.
Formatted tool prompt to include in system message.
get_parser
Factory method to get appropriate parser for a tokenizer.
parser = ToolParser.get_parser(tokenizer)
Tokenizer to detect parser type from.
Appropriate parser instance (R1ToolParser or QwenToolParser).
Parser for DeepSeek R1-style tool call format.
from rllm.parser import R1ToolParser
parser = R1ToolParser()
R1 format uses special tokens:
<|tool_calls_begin|>
<|tool_call_begin|>function<|tool_sep|>function_name
{"param1": "value1", "param2": "value2"}
<|tool_call_end|>
<|tool_calls_end|>
Methods
tool_calls = parser.parse(model_response)
prompt = parser.get_tool_prompt(tools_schema)
Parser for Qwen-style tool call format.
from rllm.parser import QwenToolParser
parser = QwenToolParser()
Qwen format uses XML-like tags:
<tool_call>{"name": "function_name", "arguments": {...}}</tool_call>
Methods
tool_calls = parser.parse(model_response)
prompt = parser.get_tool_prompt(tools_schema)
ChatTemplateParser
Parser for converting between chat format and tokens.
from rllm.parser import ChatTemplateParser
parser = ChatTemplateParser.get_parser(tokenizer)
Methods
apply_chat_template
Convert messages to tokens.
tokens = parser.apply_chat_template(
messages,
add_generation_prompt=True
)
decode
Convert tokens back to text.
text = parser.decode(tokens)
from rllm.parser import R1ToolParser
from rllm.tools import ToolCall
parser = R1ToolParser()
model_response = """
<|tool_calls_begin|>
<|tool_call_begin|>function<|tool_sep|>search_web
{"query": "What is rLLM?", "num_results": 5}
<|tool_call_end|>
<|tool_calls_end|>
"""
tool_calls = parser.parse(model_response)
for call in tool_calls:
print(f"Tool: {call.name}")
print(f"Arguments: {call.arguments}")
# Output:
# Tool: search_web
# Arguments: {'query': 'What is rLLM?', 'num_results': 5}
from rllm.parser import QwenToolParser
parser = QwenToolParser()
model_response = '''
Let me search for that information.
<tool_call>{"name": "search", "arguments": {"query": "capital of France"}}</tool_call>
'''
tool_calls = parser.parse(model_response)
for call in tool_calls:
print(f"Tool: {call.name}")
print(f"Arguments: {call.arguments}")
# Output:
# Tool: search
# Arguments: {'query': 'capital of France'}
from rllm.parser import QwenToolParser
parser = QwenToolParser()
model_response = '''
<tool_call>{"name": "calculator", "arguments": {"a": 5, "b": 3, "op": "add"}}</tool_call>
<tool_call>{"name": "calculator", "arguments": {"a": 10, "b": 2, "op": "multiply"}}</tool_call>
'''
tool_calls = parser.parse(model_response)
print(f"Found {len(tool_calls)} tool calls")
for i, call in enumerate(tool_calls):
print(f"\nCall {i+1}:")
print(f" Name: {call.name}")
print(f" Args: {call.arguments}")
from rllm.parser import R1ToolParser
import json
parser = R1ToolParser()
tools_schema = json.dumps([
{
"type": "function",
"function": {
"name": "search_web",
"description": "Search the web for information",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string", "description": "Search query"}
},
"required": ["query"]
}
}
}
], indent=2)
tool_prompt = parser.get_tool_prompt(tools_schema)
print(tool_prompt)
# Outputs formatted tool prompt with instructions
Example: Auto-detecting Parser
from transformers import AutoTokenizer
from rllm.parser import ToolParser
# DeepSeek model -> R1ToolParser
tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B")
parser = ToolParser.get_parser(tokenizer)
print(type(parser).__name__) # R1ToolParser
# Qwen model -> QwenToolParser
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen3-4B")
parser = ToolParser.get_parser(tokenizer)
print(type(parser).__name__) # QwenToolParser
Example: Using Parser with Agent
from transformers import AutoTokenizer
from rllm.parser import ToolParser
from rllm.agents import ToolAgent
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen3-4B")
parser = ToolParser.get_parser(tokenizer)
agent = ToolAgent(
tool_map={"search": MySearchTool},
parser_name="qwen", # or pass parser directly
system_prompt="You are a helpful assistant."
)
# Agent will use the parser to extract tool calls from model responses
Example: ChatTemplateParser
from transformers import AutoTokenizer
from rllm.parser import ChatTemplateParser
tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B")
parser = ChatTemplateParser.get_parser(tokenizer)
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "What is 2+2?"}
]
# Convert to tokens
tokens = parser.apply_chat_template(
messages,
add_generation_prompt=True,
return_tensors="pt"
)
print(f"Tokens: {tokens}")
# Decode back to text
text = parser.decode(tokens[0])
print(f"Formatted: {text}")