Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add compilation flag for removing all system messages #355

Merged
merged 3 commits into from
Oct 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 18 additions & 10 deletions src/controlflow/events/message_compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,19 +113,25 @@ def convert_system_messages(
messages: list[BaseMessage], rules: LLMRules
) -> list[BaseMessage]:
"""
Converts system messages to human messages if the LLM doesnt support system messages.
Converts system messages to human messages if the LLM doesnt support system
messages, either at all or in the first position.
"""
if not messages or not rules.require_system_message_first:
return messages

new_messages = []
for message in messages:
for i, message in enumerate(messages):
if isinstance(message, SystemMessage):
new_messages.append(
HumanMessage(
content=f"ORCHESTRATOR: {message.content}", name=message.name
# If system messages are not supported OR if they must be first and
# this is not the first message, THEN convert the message to a human message
if not rules.allow_system_messages or (
i > 0 and rules.require_system_message_first
):
new_messages.append(
HumanMessage(
content=f"ORCHESTRATOR: {message.content}", name=message.name
)
)
)
else:
# If the system message is allowed, add it as-is
new_messages.append(message)
else:
new_messages.append(message)
return new_messages
Expand Down Expand Up @@ -249,7 +255,9 @@ def compile_to_messages(self, agent: "Agent") -> list[BaseMessage]:
messages = break_up_consecutive_ai_messages(messages, rules=context.llm_rules)
messages = format_message_name(messages, rules=context.llm_rules)

messages = system_prompt + messages

# this should go last
messages = convert_system_messages(messages, rules=context.llm_rules)

return system_prompt + messages
return messages
3 changes: 3 additions & 0 deletions src/controlflow/llm/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ class LLMRules(ControlFlowModel):
# require at least one non-system message
require_at_least_one_message: bool = False

# system messages are supported as a role
allow_system_messages: bool = True

# system messages can only be provided as the very first message in a thread
require_system_message_first: bool = False

Expand Down
4 changes: 2 additions & 2 deletions tests/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def test_min_failed(self):
task2 = Task("Task 2")
task3 = Task("Task 3")

with instructions("fail tasks 1 and 3"):
with instructions("fail tasks 1 and 3. Don't work on task 2."):
run_tasks(
[task1, task2, task3],
run_until=AnyFailed(min_failed=2),
Expand Down Expand Up @@ -157,7 +157,7 @@ async def test_min_failed(self):
task2 = Task("Task 2")
task3 = Task("Task 3")

with instructions("fail tasks 1 and 3"):
with instructions("fail tasks 1 and 3. Don't work on task 2."):
await run_tasks_async(
[task1, task2, task3],
run_until=AnyFailed(min_failed=2),
Expand Down