Skip to content

Commit

Permalink
update deprecated things
Browse files Browse the repository at this point in the history
  • Loading branch information
zzstoatzz committed Sep 17, 2024
1 parent a234180 commit 3d493f6
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 4 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ dependencies = [
"pydantic-settings>=2.2.1",
"textual>=0.61.1",
"tiktoken>=0.7.0",
"typer[all]>=0.10",
"typer>=0.10",
]
readme = "README.md"
requires-python = ">= 3.9"
Expand Down
4 changes: 2 additions & 2 deletions src/controlflow/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def _run_model(
#### Payload
```json
{response.json(indent=2)}
{response.model_dump_json(indent=2)}
```
""",
description=f"LLM Response for Agent {self.name}",
Expand Down Expand Up @@ -362,7 +362,7 @@ async def _run_model_async(
#### Payload
```json
{response.json(indent=2)}
{response.model_dump_json(indent=2)}
```
""",
description=f"LLM Response for Agent {self.name}",
Expand Down
4 changes: 3 additions & 1 deletion src/controlflow/events/message_compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,9 @@ def format_message_name(

def count_tokens(message: BaseMessage) -> int:
# always use gpt-3.5 token counter with the entire message object; we only need to be approximate here
return len(tiktoken.encoding_for_model("gpt-3.5-turbo").encode(message.json()))
return len(
tiktoken.encoding_for_model("gpt-3.5-turbo").encode(message.model_dump_json())
)


def trim_messages(
Expand Down

0 comments on commit 3d493f6

Please sign in to comment.