From a59fb9112d3e487313cc494a05342a7173c78762 Mon Sep 17 00:00:00 2001 From: Dmitri Pikus Date: Wed, 30 Jul 2025 15:51:28 +0300 Subject: [PATCH] Fix in Completions API prompt --- synthetic-multi-round-qa/multi-round-qa.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synthetic-multi-round-qa/multi-round-qa.py b/synthetic-multi-round-qa/multi-round-qa.py index 82141be..e341643 100644 --- a/synthetic-multi-round-qa/multi-round-qa.py +++ b/synthetic-multi-round-qa/multi-round-qa.py @@ -173,7 +173,7 @@ async def _async_launch_request(self, messages: List[Dict[str, str]], max_token else: # Use completions API # Convert messages to a prompt string - prompt = "\n".join([f"{msg['role'].upper()}: {msg['content']}" for msg in messages]) + prompt = "\n".join([f"{msg['role'].upper()}: {msg['content']}" for msg in messages]) + "\nASSISTANT:" response = await self.client.completions.create( model=self.model, @@ -213,7 +213,7 @@ async def _async_launch_request(self, messages: List[Dict[str, str]], max_token stream=False, ) else: - prompt = "\n".join([f"{msg['role'].upper()}: {msg['content']}" for msg in messages]) + prompt = "\n".join([f"{msg['role'].upper()}: {msg['content']}" for msg in messages]) + "\nASSISTANT:" final_response = await self.client.completions.create( model=self.model, prompt=prompt,