Skip to content

Commit

Permalink
Merge pull request #223 from 2077-Collective/fix/summary-generation-t…
Browse files Browse the repository at this point in the history
…oo-short

chore: remove max tokens limit
  • Loading branch information
iankressin authored Jan 8, 2025
2 parents 3a854b6 + 47b6792 commit 13bbfe2
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions server/apps/research/services/gpt_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ class GPTService:
def __init__(self):
self.client = AsyncOpenAI(api_key=settings.OPENAI_API_KEY)
self.model = "gpt-4o"
self.max_tokens = 500

async def prompt(self, system: str, user: str) -> str:
"""
Expand All @@ -34,7 +33,6 @@ async def prompt(self, system: str, user: str) -> str:
{"role": "system", "content": system},
{"role": "user", "content": self.clear_message(user)}
],
max_tokens=self.max_tokens
)
# Access the response content directly from the completion object
return completion.choices[0].message.content
Expand Down

0 comments on commit 13bbfe2

Please sign in to comment.