From aa25ad1d54a1b6a507ac34043c7a53668520cd53 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 17 Dec 2024 10:52:28 -0800 Subject: [PATCH] o1-preview and o1-mini can stream now Refs https://github.com/simonw/llm/issues/676#issuecomment-2549328154 --- llm/default_plugins/openai_models.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/llm/default_plugins/openai_models.py b/llm/default_plugins/openai_models.py index 3d418582..57ec615e 100644 --- a/llm/default_plugins/openai_models.py +++ b/llm/default_plugins/openai_models.py @@ -63,12 +63,12 @@ def register_models(register): ) # o1 register( - Chat("o1-preview", can_stream=False, allows_system_prompt=False), - AsyncChat("o1-preview", can_stream=False, allows_system_prompt=False), + Chat("o1-preview", allows_system_prompt=False), + AsyncChat("o1-preview", allows_system_prompt=False), ) register( - Chat("o1-mini", can_stream=False, allows_system_prompt=False), - AsyncChat("o1-mini", can_stream=False, allows_system_prompt=False), + Chat("o1-mini", allows_system_prompt=False), + AsyncChat("o1-mini", allows_system_prompt=False), ) # The -instruct completion model register(