Skip to content

Commit

Permalink
use initial messages from config
Browse files Browse the repository at this point in the history
  • Loading branch information
jjoller committed Feb 12, 2025
1 parent b4433d6 commit cc3f7d4
Show file tree
Hide file tree
Showing 8 changed files with 100 additions and 119 deletions.
3 changes: 1 addition & 2 deletions client/src/app/chat-backend.model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ export interface Chat {
stream: false,
temperature: 0,
model: "gpt-4o-mini",
conversationId: string,
tenantIds: number[]
conversationId: string
}

export interface ChatResponse {
Expand Down
6 changes: 6 additions & 0 deletions client/src/app/chat-backend.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,10 @@ export class ChatBackendService {
`${this.endpoint}/chat`, chat
);
}

getConfig(chatBotId: string): Observable<Chatbot> {
return this.http.get<Chatbot>(
`${this.endpoint}/chat-config/` + chatBotId
)
}
}
40 changes: 18 additions & 22 deletions client/src/app/learn-page/learn-page.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,28 +36,24 @@ export class LearnPageComponent implements OnInit, OnDestroy {
ngOnInit() {
this.subscriptions.push(
this.route.params.subscribe((params) => {
this.route.queryParams.subscribe((queryParams) => {
console.log('Query Params:', queryParams);
this.chatBackendService.getChatbots().subscribe((chatbots: Chatbot[]) => {
const chatBot = chatbots.find(chatbot => chatbot.id === params['chatbotId']);
this.chat = {
messages: chatBot?.initial_messages?.map(message => ({content: message, role: "assistant"})) || [],
chatbotId: params['chatbotId'],
temperature: 0,
model: "gpt-4o-mini",
conversationId: new Date().toISOString(),
stream: false,
tenantIds: queryParams["tenant_id"]
};
this.chatMessages = chatBot?.initial_messages?.map(message => ({isTaskbase: true, text: message})) || [];
this.subscriptions.push(
this.chatService.messageEvent.subscribe((text: string) => {
this.handleUserMessage(text);
})
);
}
)
})
this.chatBackendService.getConfig(params['chatbotId']).subscribe((chatBot: Chatbot) => {
console.log("chat: " + chatBot)
this.chat = {
messages: chatBot?.initial_messages?.map(message => ({content: message, role: "assistant"})) || [],
chatbotId: params['chatbotId'],
temperature: 0,
model: "gpt-4o-mini",
conversationId: new Date().toISOString(),
stream: false,
};
this.chatMessages = chatBot?.initial_messages?.map(message => ({isTaskbase: true, text: message})) || [];
this.subscriptions.push(
this.chatService.messageEvent.subscribe((text: string) => {
this.handleUserMessage(text);
})
);
}
)
})
);
}
Expand Down
34 changes: 3 additions & 31 deletions server/src/aitutor/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from pydantic import BaseModel
from typing_extensions import TypedDict
from aitutor.models import ChatConfig
from aitutor.configuration import chat_config
import logging

# Set logging level
Expand All @@ -34,19 +35,11 @@ class State(TypedDict):
llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
llm_with_tools = llm.bind_tools(tools)

system_prompt = (
"You are an interactive AI tutor. You ask the student what they want to learn and challenge them with exercises. "
"You provide feedback for the student's answers. "
"If the student answers incorrectly, offer helpful hints instead of direct answers. "
"After any clarification, always return to the exercise to keep the learning experience interactive and engaging. "
"Only use the tool for generating exercises. "
"Always respond in the same language as the exercises."
)


def chatbot(state: State, config: RunnableConfig):
if not state["messages"] or not isinstance(state["messages"][0], SystemMessage):
state["messages"].insert(0, SystemMessage(content=system_prompt)) # Ensure system message exists
# System message must be the first message.
state["messages"].insert(0, SystemMessage(content=chat_config(config).system_message))
return {"messages": [llm_with_tools.invoke(state["messages"], config=config)]}


Expand Down Expand Up @@ -79,32 +72,11 @@ def stream_message(message: str, config: RunnableConfig):
)
response_messages = []
for event in events:
logging.info("Event details: %s", event)
response_messages.append(event["messages"][-1].content)
return {"response": response_messages[-1] if response_messages else "No response generated."}


def ai_tutor_chat_call(message: Message, user_id: str, chat_config: ChatConfig):
warmup_rag_store(lap_token=chat_config.lap_token, tenant_ids=chat_config.tenant_ids)
config = {"configurable": {"thread_id": user_id, "user_id": user_id, "chat_config": chat_config}}
# snapshot = graph.get_state(config)
return stream_message(message.content, config)

# def input_chat(user_input: str, role: str = "user"):
# events = graph.stream(
# {"messages": [{"role": role, "content": user_input}]},
# config,
# stream_mode="values",
# )
# for event in events:
# event["messages"][-1].pretty_print()
#
#
# input_chat("Hallo")
#
# while True:
# user_input = input("User: ")
# if user_input.lower() in ["quit", "exit", "q"]:
# print("Goodbye!")
# break
# input_chat(user_input)
8 changes: 8 additions & 0 deletions server/src/aitutor/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from aitutor.models import ChatConfig
import boto3
from langchain_core.runnables import RunnableConfig
from typing import Optional

s3_client = boto3.client("s3")
bucket_name = settings.bucket_name
Expand All @@ -22,6 +23,13 @@ def get_config():
return config


def get_chat_config(chatbot_id: str) -> Optional[ChatConfig]:
config = get_config()
chat_config = next(
(chatbot_config for chatbot_config in config.chat_configurations if chatbot_config.id == chatbot_id), None)
return chat_config


def _synchronize_config():
global last_modified_time, config, last_check_time
if last_check_time is None or last_check_time + timedelta(seconds=check_interval_seconds) < datetime.now():
Expand Down
13 changes: 13 additions & 0 deletions server/src/aitutor/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,12 @@ class Feedback(BaseModel):
criteria: list[Criteria]


class PublicChatConfig(BaseModel):
id: str
name: str
initial_messages: list[str]


class ChatConfig(BaseModel):
id: str
name: str
Expand All @@ -32,6 +38,13 @@ class ChatConfig(BaseModel):
system_message: str
initial_messages: list[str]

def to_public(self) -> PublicChatConfig:
return PublicChatConfig(
id=self.id,
name=self.name,
initial_messages=self.initial_messages
)


class Configuration(BaseModel):
chat_configurations: list[ChatConfig]
42 changes: 19 additions & 23 deletions server/src/aitutor/tools/recommend_exercise_tool.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,19 @@
import json
import random
import logging
import random
from typing import Optional

import yake
from aitutor.configuration import chat_config
from aitutor.configuration import resolve_runnable_config_param
from aitutor.models import Task
from aitutor.rag import get_rag_store
from cachetools import TTLCache
from langchain_core.runnables import RunnableConfig
from langchain_core.tools import tool
from aitutor.configuration import chat_config


class UserTaskHistory:
def __init__(self):
self.data = {}

def add_id(self, user_id: str, id: str):
"""Adds an id to the user's set."""
if user_id not in self.data:
self.data[user_id] = set()
self.data[user_id].add(id)

def has_id(self, user_id: str, id: str) -> bool:
"""Checks if a user already has a given id."""
return id in self.data.get(user_id, set())


history = UserTaskHistory()
# Create a TTLCache with expiration
history = TTLCache(maxsize=10000, ttl=24 * 60 * 60)


@tool(parse_docstring=True)
Expand All @@ -44,8 +30,6 @@ def recommend_exercise_tool(topic: str, config: RunnableConfig) -> Optional[Task
Returns:
Optional[Task]: The most relevant exercise if found, otherwise None.
"""
print(f"RUNNABLE CONFIG recommend: {config}")

tenant_ids = chat_config(config=config).tenant_ids
lap_token = chat_config(config=config).lap_token
user_id = resolve_runnable_config_param(key="user_id", config=config)
Expand All @@ -56,13 +40,25 @@ def recommend_exercise_tool(topic: str, config: RunnableConfig) -> Optional[Task
# Extract the task
task = json_to_task(json.loads(doc.page_content))
# Make sure the user does not get a task they already saw.
if not history.has_id(user_id, task.id):
history.add_id(user_id=user_id, id=task.id)
if not _has_solved(user_id, task.id):
_add_solved_task(user_id=user_id, task_id=task.id)
logging.info(f"recommended task: %s", task)
return task
return None


def _add_solved_task(user_id: int, task_id: str):
"""Add a task id to the set of solved tasks"""
if user_id not in history:
history[user_id] = set() # Initialize set if not present
history[user_id].add(task_id)


def _has_solved(user_id: int, task_id: str) -> bool:
"""Check if a user has solved a task."""
return task_id in history.get(user_id, set())


@tool(parse_docstring=True)
def get_list_of_topics(topic: Optional[str], config: RunnableConfig) -> list[str]:
"""Get a list of topics that the chatbot has exercises for.
Expand Down
73 changes: 32 additions & 41 deletions server/src/controllers/chat.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
import httpx
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import List
from typing import List, Optional
from aitutor.agent import ai_tutor_chat_call as ai_tutor_chat_call
from app_settings import settings
from models import Message
from aitutor.configuration import get_config
from aitutor.configuration import get_chat_config
from aitutor.models import ChatConfig, PublicChatConfig

router = APIRouter()

url = "https://www.chatbase.co/api/v1"
chatbase_url = "https://www.chatbase.co/api/v1"


class ChatRequest(BaseModel):
Expand All @@ -19,60 +20,37 @@ class ChatRequest(BaseModel):
temperature: float
model: str
conversationId: str
tenantIds: List[int] = []


class ChatDefinition(BaseModel):
id: str
name: str
messages: List[Message]
chatbotId: str
stream: bool
temperature: float
model: str
conversationId: str
initial_messages: List[str]


grammar_trainer = ChatDefinition(
id="grammar_trainer",
name="Grammar Trainer",
messages=[],
chatbotId="grammar_trainer",
stream=False,
temperature=0,
model="",
conversationId="",
initial_messages=[
"Hi!"
],
)


@router.get("/api/chat/get-chatbots")
def get_chatbots():
return _get_chatbase_configs()


def _get_chatbase_configs():
headers = {"accept": "application/json", "Authorization": f"Bearer {settings.chatbase_api_key}"}
timeout = httpx.Timeout(10)
response = httpx.get(url=url + "/get-chatbots", headers=headers, timeout=timeout)
response = httpx.get(url=chatbase_url + "/get-chatbots", headers=headers, timeout=timeout)
if response.status_code != 200:
raise HTTPException(status_code=response.status_code, detail=response.text)
chatbots = response.json()['chatbots']['data']
chatbots.append(grammar_trainer)
return chatbots
chat_bot_dicts = response.json()['chatbots']['data']
return [
PublicChatConfig(
id=src["id"],
name=src["name"],
initial_messages=src["initial_messages"]
) for src in chat_bot_dicts]


@router.post("/api/chat")
def chat(request: ChatRequest):
config = get_config()
print(f"config: {config}\nchatbot id: {request.chatbotId}")
# Check if there is a matchin AI Tutor configuration
ai_tutor_config = next((chatbot_config for chatbot_config in config.chat_configurations if chatbot_config.id == request.chatbotId), None)
print(f"ai_tutor_config: {ai_tutor_config}")
if ai_tutor_config:
chat_config = get_chat_config(request.chatbotId)
if chat_config:
response = ai_tutor_chat_call(
request.messages[-1],
request.conversationId,
ai_tutor_config
chat_config
)
return {"message": response["response"]}
else:
Expand All @@ -84,3 +62,16 @@ def chat(request: ChatRequest):
raise HTTPException(status_code=response.status_code, detail=response.text)
response_data = response.json()
return {"message": response_data["text"]}


@router.get("/api/chat-config/{chat_id}")
def chat_config(chat_id: str):
config: Optional[ChatConfig] = get_chat_config(chat_id)
if config is None:
chatbase_config = next((config for config in _get_chatbase_configs() if config.id == chat_id), None)
if not chatbase_config:
raise HTTPException(status_code=404, detail=f"Chat config not found for chat_id: {chat_id}")
else:
return chatbase_config
else:
return config.to_public()

0 comments on commit cc3f7d4

Please sign in to comment.