Skip to content

Commit

Permalink
Remove OpenAI client, using it from langfuse.
Browse files Browse the repository at this point in the history
  • Loading branch information
milistu committed Jun 4, 2024
1 parent 21e0867 commit 5de9814
Showing 1 changed file with 9 additions and 13 deletions.
22 changes: 9 additions & 13 deletions utils.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import json
import os
from typing import Dict, Generator, List, Tuple
from typing import Dict, Generator, List

import streamlit as st
import yaml
from langfuse.decorators import observe
from langfuse.decorators import langfuse_context, observe
from langfuse.openai import openai
from loguru import logger
from openai import OpenAI
from openai.types.chat import ChatCompletion
from pydantic import BaseModel
from qdrant_client import QdrantClient
Expand Down Expand Up @@ -74,7 +73,7 @@ def load_config(yaml_file_path: str = "./config.yaml") -> Config:


@st.cache_resource
def initialize_clients() -> Tuple[OpenAI, QdrantClient]:
def initialize_clients() -> QdrantClient:
"""
Initializes and returns the clients for OpenAI and Qdrant services.
Expand All @@ -90,18 +89,14 @@ def initialize_clients() -> Tuple[OpenAI, QdrantClient]:
qdrant_api_key = os.environ["QDRANT_API_KEY"]
qdrant_client = QdrantClient(url=qdrant_url, api_key=qdrant_api_key)

# Retrieve OpenAI client configuration from environment variables
openai_api_key = os.environ["OPENAI_API_KEY"]
openai_client = OpenAI(api_key=openai_api_key)

return openai_client, qdrant_client
return qdrant_client
except KeyError as e:
error_msg = f"Missing environment variable: {str(e)}"
logger.error(error_msg)
raise EnvironmentError(error_msg)


@observe()
@observe(as_type="generation")
def call_llm(
model: str,
temperature: float,
Expand Down Expand Up @@ -132,14 +127,13 @@ def call_llm(

@observe()
def generate_response(
query: str, openai_client: OpenAI, qdrant_client: QdrantClient, config: Config
query: str, qdrant_client: QdrantClient, config: Config
) -> Generator[str, None, None]:
"""
Generates a response for a given user query using a combination of semantic search and a chat model.
Args:
- query (str): The user's query string.
- openai_client (OpenAI): Client to interact with OpenAI's API.
- qdrant_client (QdrantClient): Client to interact with Qdrant's API.
- config (Config): Configuration settings for API interaction and response handling.
Expand All @@ -162,10 +156,10 @@ def generate_response(
)
collections = json.loads(response.choices[0].message.content)["response"]
logger.info(f"Query routed to collections: {collections}")
langfuse_context.update_current_trace(tags=collections)

# Embed the user query using the specified model in the configuration
embedding_response = embed_text(
client=openai_client,
text=query,
model=config.openai.embeddings.model,
)
Expand All @@ -191,6 +185,8 @@ def generate_response(
if part is not None:
yield part

langfuse_context.flush()

except Exception as e:
logger.error(f"An error occurred while generating the response: {str(e)}")
yield "Sorry, an error occurred while processing your request."
Expand Down

0 comments on commit 5de9814

Please sign in to comment.