Skip to content

Commit

Permalink
fix: max_tokens in O1 (#17703)
Browse files Browse the repository at this point in the history
  • Loading branch information
ex0ns authored Feb 3, 2025
1 parent 4c81fff commit 8a3feea
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ def _get_model_kwargs(self, **kwargs: Any) -> Dict[str, Any]:
all_kwargs = {**base_kwargs, **self.additional_kwargs}
if "stream" not in all_kwargs and "stream_options" in all_kwargs:
del all_kwargs["stream_options"]
if self.model in O1_MODELS and base_kwargs["max_tokens"] is not None:
if self.model in O1_MODELS and base_kwargs.get("max_tokens") is not None:
# O1 models use max_completion_tokens instead of max_tokens
all_kwargs["max_completion_tokens"] = all_kwargs.get(
"max_completion_tokens", all_kwargs["max_tokens"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-openai"
readme = "README.md"
version = "0.3.16"
version = "0.3.17"

[tool.poetry.dependencies]
python = ">=3.9,<4.0"
Expand Down

0 comments on commit 8a3feea

Please sign in to comment.