Skip to content

Commit 44ca723

Browse files
committed
Clean up completions
1 parent e304245 commit 44ca723

File tree

3 files changed

+18
-15
lines changed

3 files changed

+18
-15
lines changed

src/controlflow/core/controller/controller.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,6 @@ async def _run_agent(self, agent: Agent, tasks: list[Task] = None):
130130
tools=tools,
131131
handlers=[TUIHandler()] if controlflow.settings.enable_tui else None,
132132
max_iterations=1,
133-
yield_deltas=False,
134133
):
135134
response_messages.append(msg)
136135

src/controlflow/llm/completions.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,12 @@ def completion(
5757

5858
counter = 0
5959
while not response_messages or get_tool_calls(response_messages):
60+
completion_messages = trim_messages(
61+
as_oai_messages(messages + new_messages), model=model
62+
)
6063
response = litellm.completion(
6164
model=model,
62-
messages=trim_messages(
63-
messages + as_oai_messages(new_messages), model=model
64-
),
65+
messages=completion_messages,
6566
tools=[t.model_dump() for t in tools] if tools else None,
6667
**kwargs,
6768
)
@@ -129,11 +130,12 @@ def completion_stream(
129130

130131
counter = 0
131132
while not snapshot_message or get_tool_calls([snapshot_message]):
133+
completion_messages = trim_messages(
134+
as_oai_messages(messages + new_messages), model=model
135+
)
132136
response = litellm.completion(
133137
model=model,
134-
messages=trim_messages(
135-
messages + as_oai_messages(new_messages), model=model
136-
),
138+
messages=completion_messages,
137139
tools=[t.model_dump() for t in tools] if tools else None,
138140
stream=True,
139141
**kwargs,
@@ -217,11 +219,12 @@ async def completion_async(
217219

218220
counter = 0
219221
while not response_messages or get_tool_calls(response_messages):
222+
completion_messages = trim_messages(
223+
as_oai_messages(messages + new_messages), model=model
224+
)
220225
response = await litellm.acompletion(
221226
model=model,
222-
messages=trim_messages(
223-
messages + as_oai_messages(new_messages), model=model
224-
),
227+
messages=completion_messages,
225228
tools=[t.model_dump() for t in tools] if tools else None,
226229
**kwargs,
227230
)
@@ -287,11 +290,12 @@ async def completion_stream_async(
287290

288291
counter = 0
289292
while not snapshot_message or get_tool_calls([snapshot_message]):
293+
completion_messages = trim_messages(
294+
as_oai_messages(messages + new_messages), model=model
295+
)
290296
response = await litellm.acompletion(
291297
model=model,
292-
messages=trim_messages(
293-
messages + as_oai_messages(new_messages), model=model
294-
),
298+
messages=completion_messages,
295299
tools=[t.model_dump() for t in tools] if tools else None,
296300
stream=True,
297301
**kwargs,

src/controlflow/utilities/types.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -272,12 +272,12 @@ def as_cf_messages(
272272
return result
273273

274274

275-
def as_oai_messages(messages: list[Union[ControlFlowMessage, litellm.Message]]):
275+
def as_oai_messages(messages: list[Union[dict, ControlFlowMessage, litellm.Message]]):
276276
result = []
277277
for msg in messages:
278278
if isinstance(msg, ControlFlowMessage):
279279
result.append(msg.as_openai_message())
280-
elif isinstance(msg, litellm.Message):
280+
elif isinstance(msg, (dict, litellm.Message)):
281281
result.append(msg)
282282
else:
283283
raise ValueError(f"Invalid message type: {type(msg)}")

0 commit comments

Comments
 (0)