Skip to content

Commit 6fb0a8f

Browse files
Added xai responses support (#16391)
* Added xai responses support * add the xai provider config above * remove init file * remove init file * Fix f string lint error --------- Co-authored-by: Ishaan Jaffer <ishaanjaffer0324@gmail.com>
1 parent eef8c01 commit 6fb0a8f

File tree

4 files changed

+261
-0
lines changed

4 files changed

+261
-0
lines changed

litellm/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1230,6 +1230,7 @@ def add_known_models():
12301230
from .llms.azure.responses.o_series_transformation import (
12311231
AzureOpenAIOSeriesResponsesAPIConfig,
12321232
)
1233+
from .llms.xai.responses.transformation import XAIResponsesAPIConfig
12331234
from .llms.litellm_proxy.responses.transformation import (
12341235
LiteLLMProxyResponsesAPIConfig,
12351236
)
Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
from typing import TYPE_CHECKING, Any, Dict, List, Optional
2+
3+
import litellm
4+
from litellm._logging import verbose_logger
5+
from litellm.llms.openai.responses.transformation import OpenAIResponsesAPIConfig
6+
from litellm.secret_managers.main import get_secret_str
7+
from litellm.types.llms.openai import ResponsesAPIOptionalRequestParams
8+
from litellm.types.router import GenericLiteLLMParams
9+
from litellm.types.utils import LlmProviders
10+
11+
if TYPE_CHECKING:
12+
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj
13+
14+
LiteLLMLoggingObj = _LiteLLMLoggingObj
15+
else:
16+
LiteLLMLoggingObj = Any
17+
18+
XAI_API_BASE = "https://api.x.ai/v1"
19+
20+
21+
class XAIResponsesAPIConfig(OpenAIResponsesAPIConfig):
22+
"""
23+
Configuration for XAI's Responses API.
24+
25+
Inherits from OpenAIResponsesAPIConfig since XAI's Responses API is largely
26+
compatible with OpenAI's, with a few differences:
27+
- Does not support the 'instructions' parameter
28+
- Requires code_interpreter tools to have 'container' field removed
29+
- Recommends store=false when sending images
30+
31+
Reference: https://docs.x.ai/docs/api-reference#create-new-response
32+
"""
33+
34+
@property
35+
def custom_llm_provider(self) -> LlmProviders:
36+
return LlmProviders.XAI
37+
38+
def get_supported_openai_params(self, model: str) -> list:
39+
"""
40+
Get supported parameters for XAI Responses API.
41+
42+
XAI supports most OpenAI Responses API params except 'instructions'.
43+
"""
44+
supported_params = super().get_supported_openai_params(model)
45+
46+
# Remove 'instructions' as it's not supported by XAI
47+
if "instructions" in supported_params:
48+
supported_params.remove("instructions")
49+
50+
return supported_params
51+
52+
def map_openai_params(
53+
self,
54+
response_api_optional_params: ResponsesAPIOptionalRequestParams,
55+
model: str,
56+
drop_params: bool,
57+
) -> Dict:
58+
"""
59+
Map parameters for XAI Responses API.
60+
61+
Handles XAI-specific transformations:
62+
1. Drops 'instructions' parameter (not supported)
63+
2. Transforms code_interpreter tools to remove 'container' field
64+
3. Sets store=false when images are detected (recommended by XAI)
65+
"""
66+
params = dict(response_api_optional_params)
67+
68+
# Drop instructions parameter (not supported by XAI)
69+
if "instructions" in params:
70+
verbose_logger.debug(
71+
"XAI Responses API does not support 'instructions' parameter. Dropping it."
72+
)
73+
params.pop("instructions")
74+
75+
# Transform code_interpreter tools - remove container field
76+
if "tools" in params and params["tools"]:
77+
tools_list = params["tools"]
78+
# Ensure tools is a list for iteration
79+
if not isinstance(tools_list, list):
80+
tools_list = [tools_list]
81+
82+
transformed_tools: List[Any] = []
83+
for tool in tools_list:
84+
if isinstance(tool, dict) and tool.get("type") == "code_interpreter":
85+
# XAI supports code_interpreter but doesn't use the container field
86+
# Keep only the type field
87+
verbose_logger.debug(
88+
"XAI: Transforming code_interpreter tool, removing container field"
89+
)
90+
transformed_tools.append({"type": "code_interpreter"})
91+
else:
92+
transformed_tools.append(tool)
93+
params["tools"] = transformed_tools
94+
95+
return params
96+
97+
def validate_environment(
98+
self, headers: dict, model: str, litellm_params: Optional[GenericLiteLLMParams]
99+
) -> dict:
100+
"""
101+
Validate environment and set up headers for XAI API.
102+
103+
Uses XAI_API_KEY from environment or litellm_params.
104+
"""
105+
litellm_params = litellm_params or GenericLiteLLMParams()
106+
api_key = (
107+
litellm_params.api_key
108+
or litellm.api_key
109+
or get_secret_str("XAI_API_KEY")
110+
)
111+
112+
if not api_key:
113+
raise ValueError(
114+
"XAI API key is required. Set XAI_API_KEY environment variable or pass api_key parameter."
115+
)
116+
117+
headers.update(
118+
{
119+
"Authorization": f"Bearer {api_key}",
120+
}
121+
)
122+
return headers
123+
124+
def get_complete_url(
125+
self,
126+
api_base: Optional[str],
127+
litellm_params: dict,
128+
) -> str:
129+
"""
130+
Get the complete URL for XAI Responses API endpoint.
131+
132+
Returns:
133+
str: The full URL for the XAI /responses endpoint
134+
"""
135+
api_base = (
136+
api_base
137+
or litellm.api_base
138+
or get_secret_str("XAI_API_BASE")
139+
or XAI_API_BASE
140+
)
141+
142+
# Remove trailing slashes
143+
api_base = api_base.rstrip("/")
144+
145+
return f"{api_base}/responses"
146+

litellm/utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7374,6 +7374,8 @@ def get_provider_responses_api_config(
73747374
return litellm.AzureOpenAIOSeriesResponsesAPIConfig()
73757375
else:
73767376
return litellm.AzureOpenAIResponsesAPIConfig()
7377+
elif litellm.LlmProviders.XAI == provider:
7378+
return litellm.XAIResponsesAPIConfig()
73777379
elif litellm.LlmProviders.LITELLM_PROXY == provider:
73787380
return litellm.LiteLLMProxyResponsesAPIConfig()
73797381
return None
Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
"""
2+
Tests for XAI Responses API transformation
3+
4+
Tests the XAIResponsesAPIConfig class that handles XAI-specific
5+
transformations for the Responses API.
6+
7+
Source: litellm/llms/xai/responses/transformation.py
8+
"""
9+
import sys
10+
import os
11+
12+
sys.path.insert(0, os.path.abspath("../../../../.."))
13+
14+
import pytest
15+
from litellm.types.utils import LlmProviders
16+
from litellm.utils import ProviderConfigManager
17+
from litellm.llms.xai.responses.transformation import XAIResponsesAPIConfig
18+
from litellm.types.llms.openai import ResponsesAPIOptionalRequestParams
19+
20+
21+
class TestXAIResponsesAPITransformation:
22+
"""Test XAI Responses API configuration and transformations"""
23+
24+
def test_xai_provider_config_registration(self):
25+
"""Test that XAI provider returns XAIResponsesAPIConfig"""
26+
config = ProviderConfigManager.get_provider_responses_api_config(
27+
model="xai/grok-4-fast",
28+
provider=LlmProviders.XAI,
29+
)
30+
31+
assert config is not None, "Config should not be None for XAI provider"
32+
assert isinstance(
33+
config, XAIResponsesAPIConfig
34+
), f"Expected XAIResponsesAPIConfig, got {type(config)}"
35+
assert (
36+
config.custom_llm_provider == LlmProviders.XAI
37+
), "custom_llm_provider should be XAI"
38+
39+
def test_code_interpreter_container_field_removed(self):
40+
"""Test that container field is removed from code_interpreter tools"""
41+
config = XAIResponsesAPIConfig()
42+
43+
params = ResponsesAPIOptionalRequestParams(
44+
tools=[
45+
{
46+
"type": "code_interpreter",
47+
"container": {"type": "auto"}
48+
}
49+
]
50+
)
51+
52+
result = config.map_openai_params(
53+
response_api_optional_params=params,
54+
model="grok-4-fast",
55+
drop_params=False
56+
)
57+
58+
assert "tools" in result
59+
assert len(result["tools"]) == 1
60+
assert result["tools"][0]["type"] == "code_interpreter"
61+
assert "container" not in result["tools"][0], "Container field should be removed"
62+
63+
def test_instructions_parameter_dropped(self):
64+
"""Test that instructions parameter is dropped for XAI"""
65+
config = XAIResponsesAPIConfig()
66+
67+
params = ResponsesAPIOptionalRequestParams(
68+
instructions="You are a helpful assistant.",
69+
temperature=0.7
70+
)
71+
72+
result = config.map_openai_params(
73+
response_api_optional_params=params,
74+
model="grok-4-fast",
75+
drop_params=False
76+
)
77+
78+
assert "instructions" not in result, "Instructions should be dropped"
79+
assert result.get("temperature") == 0.7, "Other params should be preserved"
80+
81+
def test_supported_params_excludes_instructions(self):
82+
"""Test that get_supported_openai_params excludes instructions"""
83+
config = XAIResponsesAPIConfig()
84+
supported = config.get_supported_openai_params("grok-4-fast")
85+
86+
assert "instructions" not in supported, "instructions should not be supported"
87+
assert "tools" in supported, "tools should be supported"
88+
assert "temperature" in supported, "temperature should be supported"
89+
assert "model" in supported, "model should be supported"
90+
91+
def test_xai_responses_endpoint_url(self):
92+
"""Test that get_complete_url returns correct XAI endpoint"""
93+
config = XAIResponsesAPIConfig()
94+
95+
# Test with default XAI API base
96+
url = config.get_complete_url(api_base=None, litellm_params={})
97+
assert url == "https://api.x.ai/v1/responses", f"Expected XAI responses endpoint, got {url}"
98+
99+
# Test with custom api_base
100+
custom_url = config.get_complete_url(
101+
api_base="https://custom.x.ai/v1",
102+
litellm_params={}
103+
)
104+
assert custom_url == "https://custom.x.ai/v1/responses", f"Expected custom endpoint, got {custom_url}"
105+
106+
# Test with trailing slash
107+
url_with_slash = config.get_complete_url(
108+
api_base="https://api.x.ai/v1/",
109+
litellm_params={}
110+
)
111+
assert url_with_slash == "https://api.x.ai/v1/responses", "Should handle trailing slash"
112+

0 commit comments

Comments
 (0)