From 90b5e8719ae73cc81e5ada8b0e394afa4f8b6151 Mon Sep 17 00:00:00 2001 From: P47Parzival Date: Tue, 18 Nov 2025 10:30:50 +0530 Subject: [PATCH 1/4] Change default gemini-2.5-flash-preview-04-17 to gemini-2.5-flash-preview-05-2025, cause default is depreceated and not useful --- frontend/src/components/InputForm.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/InputForm.tsx b/frontend/src/components/InputForm.tsx index 97aa5c67..a413006a 100644 --- a/frontend/src/components/InputForm.tsx +++ b/frontend/src/components/InputForm.tsx @@ -26,7 +26,7 @@ export const InputForm: React.FC = ({ }) => { const [internalInputValue, setInternalInputValue] = useState(""); const [effort, setEffort] = useState("medium"); - const [model, setModel] = useState("gemini-2.5-flash-preview-04-17"); + const [model, setModel] = useState("gemini-2.5-flash-preview-05-2025"); const handleInternalSubmit = (e?: React.FormEvent) => { if (e) e.preventDefault(); @@ -144,7 +144,7 @@ export const InputForm: React.FC = ({
From eec963f77b2ee19066391535207cc49c73e818bf Mon Sep 17 00:00:00 2001 From: P47Parzival Date: Tue, 18 Nov 2025 13:37:23 +0530 Subject: [PATCH 2/4] Initial ui enhancement --- frontend/src/components/ActivityTimeline.tsx | 54 ++--- frontend/src/components/ChatMessagesView.tsx | 195 +++++++++++++-- frontend/src/components/InputForm.tsx | 4 +- frontend/src/components/WelcomeScreen.tsx | 235 +++++++++++++++++-- 4 files changed, 423 insertions(+), 65 deletions(-) diff --git a/frontend/src/components/ActivityTimeline.tsx b/frontend/src/components/ActivityTimeline.tsx index b3669299..88dfecff 100644 --- a/frontend/src/components/ActivityTimeline.tsx +++ b/frontend/src/components/ActivityTimeline.tsx @@ -36,20 +36,20 @@ export function ActivityTimeline({ useState(false); const getEventIcon = (title: string, index: number) => { if (index === 0 && isLoading && processedEvents.length === 0) { - return ; + return ; } if (title.toLowerCase().includes("generating")) { - return ; + return ; } else if (title.toLowerCase().includes("thinking")) { - return ; + return ; } else if (title.toLowerCase().includes("reflection")) { - return ; + return ; } else if (title.toLowerCase().includes("research")) { - return ; + return ; } else if (title.toLowerCase().includes("finalizing")) { - return ; + return ; } - return ; + return ; }; useEffect(() => { @@ -59,18 +59,18 @@ export function ActivityTimeline({ }, [isLoading, processedEvents]); return ( - - + +
setIsTimelineCollapsed(!isTimelineCollapsed)} > Research {isTimelineCollapsed ? ( - + ) : ( - + )}
@@ -80,12 +80,12 @@ export function ActivityTimeline({ {isLoading && processedEvents.length === 0 && (
-
-
- +
+
+
-

+

Searching...

@@ -97,16 +97,16 @@ export function ActivityTimeline({
{index < processedEvents.length - 1 || (isLoading && index === processedEvents.length - 1) ? ( -
+
) : null} -
+
{getEventIcon(eventItem.title, index)}
-

+

{eventItem.title}

-

+

{typeof eventItem.data === "string" ? eventItem.data : Array.isArray(eventItem.data) @@ -118,11 +118,11 @@ export function ActivityTimeline({ ))} {isLoading && processedEvents.length > 0 && (

-
- +
+
-

+

Searching...

@@ -130,10 +130,10 @@ export function ActivityTimeline({ )}
) : !isLoading ? ( // Only show "No activity" if not loading and no events -
- -

No activity to display.

-

+

+ +

No activity to display.

+

Timeline will update during processing.

diff --git a/frontend/src/components/ChatMessagesView.tsx b/frontend/src/components/ChatMessagesView.tsx index 1a245d88..4638f441 100644 --- a/frontend/src/components/ChatMessagesView.tsx +++ b/frontend/src/components/ChatMessagesView.tsx @@ -4,7 +4,7 @@ import { ScrollArea } from "@/components/ui/scroll-area"; import { Loader2, Copy, CopyCheck } from "lucide-react"; import { InputForm } from "@/components/InputForm"; import { Button } from "@/components/ui/button"; -import { useState, ReactNode } from "react"; +import { useState, ReactNode, useEffect } from "react"; import ReactMarkdown from "react-markdown"; import { cn } from "@/lib/utils"; import { Badge } from "@/components/ui/badge"; @@ -13,6 +13,40 @@ import { ProcessedEvent, } from "@/components/ActivityTimeline"; // Assuming ActivityTimeline is in the same dir or adjust path +const Star = ({ delay }: { delay: number }) => ( +
+); + +const ShootingStar = ({ delay }: { delay: number }) => ( +
+); + +const Sunray = ({ delay, angle }: { delay: number; angle: number }) => ( +
+); + // Markdown component props type from former ReportView type MdComponentProps = { className?: string; @@ -38,7 +72,7 @@ const mdComponents = { ), p: ({ className, children, ...props }: MdComponentProps) => ( -

+

{children}

), @@ -147,7 +181,8 @@ const HumanMessageBubble: React.FC = ({ }) => { return (
{typeof message.content === "string" @@ -187,7 +222,7 @@ const AiMessageBubble: React.FC = ({ const isLiveActivityForThisBubble = isLastMessage && isOverallLoading; return ( -
+
{activityForThisBubble && activityForThisBubble.length > 0 && (
(null); + const [stars, setStars] = useState([]); + + useEffect(() => { + // Generate 50 random stars for background twinkling/falling + const starDelays = Array.from({ length: 50 }, (_, i) => i * 0.1); + setStars(starDelays); + }, []); + + const sunrays = Array.from({ length: 12 }, (_, i) => ({ + angle: (i * 30) - 165, + delay: i * 0.5, + })); const handleCopy = async (text: string, messageId: string) => { try { @@ -253,9 +300,49 @@ export function ChatMessagesView({ } }; return ( -
- -
+
+ {/* Space Background with Darker Nebula Gradient */} +
+ {/* Twinkling Stars */} + {stars.map((delay, i) => ( +
+ ))} + + {/* Falling Stars */} + {stars.slice(0, 20).map((delay, i) => ( + + ))} + + {/* Shooting Stars */} + {stars.slice(0, 5).map((delay, i) => ( + + ))} + + {/* Sunrays */} +
+ {sunrays.map((ray, i) => ( + + ))} +
+ + {/* Subtle Solar System Orb - Central Glow */} +
+
+
+ + {/* Orbiting Planet (subtle) */} +
+
+ +
{messages.map((message, index) => { const isLast = index === messages.length - 1; return ( @@ -292,7 +379,7 @@ export function ChatMessagesView({
{" "} {/* AI message row structure */} -
+
{liveActivityEvents.length > 0 ? (
- 0} - /> +
+ 0} + /> +
+ +
); } diff --git a/frontend/src/components/InputForm.tsx b/frontend/src/components/InputForm.tsx index a413006a..feecdf3c 100644 --- a/frontend/src/components/InputForm.tsx +++ b/frontend/src/components/InputForm.tsx @@ -26,7 +26,7 @@ export const InputForm: React.FC = ({ }) => { const [internalInputValue, setInternalInputValue] = useState(""); const [effort, setEffort] = useState("medium"); - const [model, setModel] = useState("gemini-2.5-flash-preview-05-2025"); + const [model, setModel] = useState("gemini-2.5-flash-preview-09-2025"); const handleInternalSubmit = (e?: React.FormEvent) => { if (e) e.preventDefault(); @@ -144,7 +144,7 @@ export const InputForm: React.FC = ({
diff --git a/frontend/src/components/WelcomeScreen.tsx b/frontend/src/components/WelcomeScreen.tsx index b1015aa8..bfbe14be 100644 --- a/frontend/src/components/WelcomeScreen.tsx +++ b/frontend/src/components/WelcomeScreen.tsx @@ -1,4 +1,5 @@ import { InputForm } from "./InputForm"; +import { useEffect, useState } from "react"; interface WelcomeScreenProps { handleSubmit: ( @@ -10,30 +11,220 @@ interface WelcomeScreenProps { isLoading: boolean; } +const Star = ({ delay }: { delay: number }) => ( +
+); + +const ShootingStar = ({ delay }: { delay: number }) => ( +
+); + +const Sunray = ({ delay, angle }: { delay: number; angle: number }) => ( +
+); + export const WelcomeScreen: React.FC = ({ handleSubmit, onCancel, isLoading, -}) => ( -
-
-

- Welcome. -

-

- How can I help you today? -

-
-
- +}) => { + const [stars, setStars] = useState([]); + + useEffect(() => { + // Generate 50 random stars for background twinkling/falling + const starDelays = Array.from({ length: 50 }, (_, i) => i * 0.1); + setStars(starDelays); + }, []); + + const sunrays = Array.from({ length: 12 }, (_, i) => ({ + angle: (i * 30) - 165, + delay: i * 0.5, + })); + + return ( +
+ {/* Space Background with Darker Nebula Gradient */} +
+ {/* Twinkling Stars */} + {stars.map((delay, i) => ( +
+ ))} + + {/* Falling Stars */} + {stars.slice(0, 20).map((delay, i) => ( + + ))} + + {/* Shooting Stars */} + {stars.slice(0, 5).map((delay, i) => ( + + ))} + + {/* Sunrays */} +
+ {sunrays.map((ray, i) => ( + + ))} +
+ + {/* Subtle Solar System Orb - Central Glow */} +
+
+
+ + {/* Orbiting Planet (subtle) */} +
+
+ + {/* Content Overlay */} +
+
+

+ Welcome. +

+

+ How can I help you today? +

+
+
+ +
+

+ Powered by Google Gemini and LangChain LangGraph. +

+
+ +
-

- Powered by Google Gemini and LangChain LangGraph. -

-
-); + ); +}; \ No newline at end of file From fa6554a3c619b12adc9f0ceafa191155b335f366 Mon Sep 17 00:00:00 2001 From: P47Parzival Date: Wed, 19 Nov 2025 18:49:15 +0530 Subject: [PATCH 3/4] syntax validated response --- backend/src/agent/graph.py | 77 +++++++++++++++++++++++++++++++++++- backend/src/agent/prompts.py | 16 ++++++++ 2 files changed, 92 insertions(+), 1 deletion(-) diff --git a/backend/src/agent/graph.py b/backend/src/agent/graph.py index 0f19c3f2..aef54fb5 100644 --- a/backend/src/agent/graph.py +++ b/backend/src/agent/graph.py @@ -1,4 +1,5 @@ import os +import re from agent.tools_and_schemas import SearchQueryList, Reflection from dotenv import load_dotenv @@ -40,6 +41,77 @@ genai_client = Client(api_key=os.getenv("GEMINI_API_KEY")) +def clean_markdown_formatting(text: str) -> str: + """Post-process the generated answer to clean up markdown formatting. + + This function performs the following cleanup operations: + - Fixes malformed markdown tables (ensures proper spacing and alignment) + - Cleans up citation placement (removes duplicate spaces around citations) + - Ensures proper line breaks around tables + - Normalizes whitespace + + Args: + text: The raw markdown text generated by the model + + Returns: + Cleaned and properly formatted markdown text + """ + if not text: + return text + + # Fix table formatting: ensure proper spacing around pipes + # Match markdown tables and normalize spacing + def fix_table_row(match): + row = match.group(0) + # Split by pipes and clean each cell + cells = [cell.strip() for cell in row.split('|')] + # Rejoin with proper spacing + return '| ' + ' | '.join(filter(None, cells)) + ' |' + + # Fix table rows (lines with pipes) + lines = text.split('\n') + cleaned_lines = [] + in_table = False + + for line in lines: + # Detect table rows (lines with multiple pipes) + if '|' in line and line.count('|') >= 2: + in_table = True + # Fix spacing around pipes + cells = [cell.strip() for cell in line.split('|')] + cells = [cell for cell in cells if cell] # Remove empty cells + if cells: + cleaned_line = '| ' + ' | '.join(cells) + ' |' + cleaned_lines.append(cleaned_line) + else: + cleaned_lines.append(line) + else: + # Add blank line after table ends + if in_table and line.strip(): + cleaned_lines.append('') + in_table = False + cleaned_lines.append(line) + + text = '\n'.join(cleaned_lines) + + # Clean up citation formatting + # Remove extra spaces around citations: "text [source](url)" -> "text [source](url)" + text = re.sub(r'\s+(\[[\w\s]+\]\([^)]+\))', r' \1', text) + + # Ensure citations are directly after the relevant text (no space before citation) + # "text [source]" is correct, "text [source]" gets fixed + text = re.sub(r'([^\s])\s{2,}(\[[\w\s]+\]\([^)]+\))', r'\1 \2', text) + + # Clean up multiple consecutive blank lines + text = re.sub(r'\n{3,}', '\n\n', text) + + # Ensure blank line before and after tables + text = re.sub(r'([^\n])\n(\|)', r'\1\n\n\2', text) + text = re.sub(r'(\|[^\n]+)\n([^\n|])', r'\1\n\n\2', text) + + return text.strip() + + # Nodes def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerationState: """LangGraph node that generates search queries based on the User's question. @@ -259,8 +331,11 @@ def finalize_answer(state: OverallState, config: RunnableConfig): ) unique_sources.append(source) + # Post-process the generated answer to clean up markdown formatting + cleaned_content = clean_markdown_formatting(result.content) + return { - "messages": [AIMessage(content=result.content)], + "messages": [AIMessage(content=cleaned_content)], "sources_gathered": unique_sources, } diff --git a/backend/src/agent/prompts.py b/backend/src/agent/prompts.py index 8963f6a6..39883a50 100644 --- a/backend/src/agent/prompts.py +++ b/backend/src/agent/prompts.py @@ -87,6 +87,22 @@ def get_current_date(): - You have access to all the information gathered from the previous steps. - You have access to the user's question. - Generate a high-quality answer to the user's question based on the provided summaries and the user's question. + +Markdown Formatting Requirements: +- Use proper markdown syntax for all formatting elements. +- Format any tables using standard markdown table syntax with pipes (|) and hyphens (-). +- Ensure tables are properly aligned with header separators (e.g., | Column 1 | Column 2 |). +- Example table format: + | Header 1 | Header 2 | Header 3 | + |----------|----------|----------| + | Data 1 | Data 2 | Data 3 | + +Citation Requirements (CRITICAL): +- Place citations as markdown links immediately after each fact, claim, or data point: [source](url) +- Citations must be placed right next to the relevant information, not at the end of paragraphs. +- Use the exact URLs from the Summaries - do not modify them. +- Every factual statement MUST include a citation. +- Format: "The company's revenue grew by 25% [reuters](https://example.com/article)." - Include the sources you used from the Summaries in the answer correctly, use markdown format (e.g. [apnews](https://vertexaisearch.cloud.google.com/id/1-0)). THIS IS A MUST. User Context: From c43bb83884970f021a08bc57c580f6dd0ace39c6 Mon Sep 17 00:00:00 2001 From: P47Parzival Date: Wed, 19 Nov 2025 21:10:44 +0530 Subject: [PATCH 4/4] Conversation history added, title of history is yet ti be added --- CONVERSATION_HISTORY.md | 170 ++++++++++++++ GETTING_STARTED.md | 121 ++++++++++ backend/conversations.db | Bin 0 -> 40960 bytes backend/src/agent/app.py | 139 ++++++++++- backend/src/agent/database.py | 217 ++++++++++++++++++ backend/src/agent/state.py | 1 + backend/src/agent/utils.py | 25 ++ backend/test_conversations.db | Bin 0 -> 24576 bytes backend/test_database.py | 79 +++++++ frontend/src/App.tsx | 153 +++++++++++- .../src/components/ConversationHistory.tsx | 187 +++++++++++++++ frontend/vite.config.ts | 7 +- 12 files changed, 1087 insertions(+), 12 deletions(-) create mode 100644 CONVERSATION_HISTORY.md create mode 100644 GETTING_STARTED.md create mode 100644 backend/conversations.db create mode 100644 backend/src/agent/database.py create mode 100644 backend/test_conversations.db create mode 100644 backend/test_database.py create mode 100644 frontend/src/components/ConversationHistory.tsx diff --git a/CONVERSATION_HISTORY.md b/CONVERSATION_HISTORY.md new file mode 100644 index 00000000..5bc5f11e --- /dev/null +++ b/CONVERSATION_HISTORY.md @@ -0,0 +1,170 @@ +# Conversation History Implementation + +This document describes the conversation history feature that has been added to the application. + +## Overview + +The application now supports persistent conversation history using SQLite database. Users can: +- Save conversations automatically +- View all previous conversations +- Resume any previous conversation +- Delete conversations +- Start new conversations + +## Backend Changes + +### 1. Database Layer (`backend/src/agent/database.py`) + +Created a `ConversationDatabase` class that manages: +- **Conversations table**: Stores conversation metadata (id, title, timestamps) +- **Messages table**: Stores individual messages with role (human/ai) and content +- SQLite database with proper indexing for performance + +Key methods: +- `create_conversation()`: Create a new conversation +- `get_conversation()`: Get conversation by ID +- `get_all_conversations()`: List all conversations +- `add_message()`: Add a message to a conversation +- `get_messages()`: Get all messages for a conversation +- `delete_conversation()`: Delete a conversation +- `update_conversation_title()`: Update conversation title + +### 2. API Endpoints (`backend/src/agent/app.py`) + +Added REST API endpoints: + +``` +GET /api/conversations - List all conversations +GET /api/conversation/{id} - Get specific conversation +POST /api/conversation - Create new conversation +POST /api/conversation/{id}/message - Add message to conversation +GET /api/conversation/{id}/messages - Get all messages +DELETE /api/conversation/{id} - Delete conversation +PATCH /api/conversation/{id}/title - Update title +``` + +### 3. State Management (`backend/src/agent/state.py`) + +Added `conversation_id` field to `OverallState` to track which conversation the agent is processing. + +### 4. Utilities (`backend/src/agent/utils.py`) + +Added `load_conversation_history()` function to load previous messages from database and convert them to LangChain message format. + +## Frontend Changes + +### 1. ConversationHistory Component (`frontend/src/components/ConversationHistory.tsx`) + +A modal component that displays: +- List of all conversations with titles and timestamps +- Message count for each conversation +- Delete button for each conversation +- Click to resume any conversation +- Beautiful space-themed UI matching the app design + +Features: +- Formatted timestamps (Today, Yesterday, date) +- Current conversation highlighting +- Confirmation before deletion +- Loading and error states + +### 2. App.tsx Updates + +Integrated conversation history with: +- **History Button**: Fixed position button to open history modal +- **New Chat Button**: Start a fresh conversation +- **Auto-save**: Messages are automatically saved to database +- **Resume**: Load previous conversations with all messages +- **State Management**: Track current conversation ID + +Key functions added: +- `createNewConversation()`: Create a new conversation when user starts chatting +- `saveMessage()`: Save each message (human and AI) to database +- `loadConversation()`: Load a previous conversation and restore state + +## Usage + +### Starting a New Conversation + +1. Open the app (welcome screen) +2. Type a message and submit +3. A new conversation is automatically created +4. All messages are saved as they arrive + +### Viewing History + +1. Click the "History" button (top-left) +2. See list of all previous conversations +3. Click any conversation to resume it +4. Delete unwanted conversations with the trash icon + +### Resuming a Conversation + +1. Open History +2. Click on a conversation +3. All previous messages load +4. Continue the conversation from where you left off + +### Starting Fresh + +Click "New Chat" button to start a new conversation while keeping the previous one saved. + +## Database Schema + +### Conversations Table +```sql +CREATE TABLE conversations ( + id TEXT PRIMARY KEY, + title TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + metadata TEXT +) +``` + +### Messages Table +```sql +CREATE TABLE messages ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + conversation_id TEXT, + role TEXT, + content TEXT, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + metadata TEXT, + FOREIGN KEY (conversation_id) REFERENCES conversations(id) ON DELETE CASCADE +) +``` + +## File Structure + +``` +backend/src/agent/ +├── database.py # New: SQLite database manager +├── app.py # Updated: Added API endpoints +├── state.py # Updated: Added conversation_id field +└── utils.py # Updated: Added load_conversation_history() + +frontend/src/ +├── components/ +│ ├── ConversationHistory.tsx # New: History modal component +│ └── ... +└── App.tsx # Updated: Integrated conversation history +``` + +## Future Enhancements + +Potential improvements: +1. Search conversations by content +2. Rename conversations with custom titles +3. Export conversations +4. Share conversations +5. Conversation folders/tags +6. Automatic title generation from first message +7. Conversation analytics + +## Notes + +- Database file is created at `backend/conversations.db` +- Messages are saved immediately after being sent/received +- Deleting a conversation cascades to delete all its messages +- The database is persistent across app restarts diff --git a/GETTING_STARTED.md b/GETTING_STARTED.md new file mode 100644 index 00000000..cb02f623 --- /dev/null +++ b/GETTING_STARTED.md @@ -0,0 +1,121 @@ +# Starting the Application with Conversation History + +## Quick Start + +### 1. Start the Backend Server + +Open a terminal in the `backend` directory and run: + +```bash +cd backend +langgraph dev +``` + +This will start the LangGraph server on `http://127.0.0.1:2024` with the new conversation history API endpoints. + +**Note:** Make sure you have: +- Installed backend dependencies: `pip install .` +- Set up your `.env` file with `GEMINI_API_KEY` + +### 2. Start the Frontend Development Server + +Open another terminal in the `frontend` directory and run: + +```bash +cd frontend +npm run dev +``` + +This will start the Vite dev server on `http://localhost:5173/app/` + +The frontend is now configured to proxy API requests to the backend server on port 2024. + +### 3. Access the Application + +Open your browser and navigate to: `http://localhost:5173/app/` + +## Features You Can Now Use + +### History Button +- Click the "History" button (top-left) to view all your saved conversations +- Each conversation shows: + - Title + - Timestamp (formatted as "Today", "Yesterday", or date) + - Message count + - Current conversation highlighted + +### Starting a Conversation +- Type your question in the welcome screen +- A new conversation is automatically created +- All messages are saved to the SQLite database (`backend/conversations.db`) + +### Resuming a Conversation +1. Click "History" button +2. Click on any conversation from the list +3. All previous messages load automatically +4. Continue the conversation where you left off + +### Starting a New Chat +- Click "New Chat" button (appears when you're in a conversation) +- Starts a fresh conversation +- Previous conversation remains saved + +### Deleting Conversations +- Hover over any conversation in the history +- Click the trash icon that appears +- Confirm deletion + +## Troubleshooting + +### "Failed to create conversation" Error + +This error means the frontend can't reach the backend API. Check: + +1. **Backend is running**: Make sure `langgraph dev` is running in the backend directory +2. **Correct port**: Backend should be on port 2024 (check terminal output) +3. **Vite proxy**: The `frontend/vite.config.ts` should proxy `/api` to `http://127.0.0.1:2024` + +### WebSocket Connection Errors + +These are Vite HMR warnings and won't affect the application functionality. They occur because the app is served under `/app/` path. + +### Database Errors + +If you see SQLite errors: +1. Make sure the `backend` directory is writable +2. The database file `conversations.db` will be created automatically +3. Delete `conversations.db` to reset all conversations + +## Database Location + +Conversations are stored in: `backend/conversations.db` + +You can: +- Inspect the database with any SQLite browser +- Delete it to start fresh +- Back it up to preserve conversations + +## API Endpoints + +The backend now serves these additional endpoints: + +``` +GET /api/conversations - List all conversations +GET /api/conversation/{id} - Get specific conversation +POST /api/conversation - Create new conversation +POST /api/conversation/{id}/message - Add message to conversation +GET /api/conversation/{id}/messages - Get all messages +DELETE /api/conversation/{id} - Delete conversation +PATCH /api/conversation/{id}/title - Update title +``` + +## Testing the Database + +Run the test script to verify the database is working: + +```bash +cd backend +python test_database.py +``` + +You should see: `🎉 All tests passed!` diff --git a/backend/conversations.db b/backend/conversations.db new file mode 100644 index 0000000000000000000000000000000000000000..f099fddb2f4527cef47b4beb8d67dd3a404f7735 GIT binary patch literal 40960 zcmeHQNt4@1cCM0o)%3XBC!IQ+5p!rsQ&e*WNjWCQTmbI-PQ?VB0FnSm0109t2zGba zJ+}KY7e_e4CtrP@U*P}Yvwwn5KKdnCELByjyA^ghtSN$_599LcrXT;6E%Jvhj#1-F^At)z29gicKEf+4q@=4RQ(JeExU+#~(la z;*0a|{cz0VPdPq+aBGy~yXe^ue%bNNG#LOG@)f?1y8T+K(4V0OKSPDluv2TpU@g8q zy!_E`-vIy(`RPyWZBw26%q}pXOHB7?_Hs2iGZz~c8rE9;U|48%QJEJCqvjBmM*Tkg z=Ee0VKmO|t;$j)Q_-BqLbozX)(hhO?!6iq1Uf|(?C4PWzqTxK#Zkg`%? zP%4!9w<7uc_kVr<@zXEAJpcQ@2`}ocac;eMbnEio`sk;xF}pA~FYu3p;%e^CUnQGh zm&glngRf!!pIiXX0{HJ?oLd+2?GWc5ee|^T<@s5t6|QC%_FNjizNI3ZMdB8)%$H~W z{n;?BZYGzvvMm5FrpD5FrpD5FrpD5FrpD5FrpD5FrpD5Fv020rAmCqgX2S zd2X5Ja6FTVWigkFr8#LC%je^{SS}-{6A7Hll6<^Pd{nv-uS>?0nOGtbOXN`^^D32t ze|O%>ze=XQ{#HCc9D^iNa+Y8W!{@TGw1_jYoSccr#B^Lny8>JZ6c$Irt2CuP!s5x4i3<>L@t~EkH7t=M4rn( zM?Q8CMyjT$dVn-@sRN(d)v%7RDI*J5#@bSoP{DIt8^bUr!nUh9?j>5A)HhiHCw$pK zvc_oP1X0hmJQsz7T37=DvVDns8@Sr=3_m0s$EEPXkv!cBx7d*+7?x5KMn|{ZxHd8*etoM%Ej-%yw#Z6wf!3Dxqdx*NshQ z>sU%(=&PHYULQ8vX58_ze3h%N^lGzSHWzz2neEndZR0t5?Q3hzg5Qnr>7<2;&@2S4 zdcC*WrAx!I?QqJfSFQJGvo$Ovmd$y3Z}MGSiw&G@(QFvXGOrP>m}ob2Es34>V&)F2 zyN0(L4U2i4E+#SUr4!q7uN91&qvpz;WTYuA=Z#E%Hm#K$r#>zx%YqnB?YTikcdDsW z-+(<#I5}4(OKecju?V$va!;pN?dR+Mj^{`5y~X9LcrPquGc&aI@v@Ox$t}*}vlX$< z*G!JC+v7#Qr;@I}?geQ)RMZuY*KK_?%nj>CHj(Y=-E`0E#H>`$s_jbw*H`vFzZ4dh z8LJ4j0$!M_eP-Yc_A9Yf$=V%Trjw>=b~27oO{cQ!xs`VIdP552Mj@_d{MK6JinH2p zp1(v@m>fQ}*9hh!t5tAenk|9x?Xjr|+-TRPZ04{T2S@=GAEt>Z=^hhkWO_1z>3Num zTPi`>#g-tO=$fJdqVRpgfFZFRM6>39tt#G^Sp%B?WBTahv(Z!hZVF%ZK->E&MIdT7 z6!n$*Mdo2O(67}0Z*S-0fd-I{fBmg_&JMUpef}=a`|SfB7lz~Uu2!Gpa&zZ+SNVVD z|DRv{=NbHq9uWc&0ucfc0ucfc0ucfc0ucfc0ucfc0ucfc0uKg(&(0rzaei!~@tE=d zA76aW`2XKW6#x(BB1b0@ArK)DArK)DArK)DArK)DArK)DArK+(ULx?WwEoBMy5l?Z z{{fCqLVD23#^(B-NI|^R66Rx^ksRp4DfGKz6>r+hbgxWbzU*5+9=qp^{A;Uz66C zXsk)Vr_=D|8y3h3(t;#P*NzHQLj~(Pu{Gk{lXMAM+ezDAhqPPyQY=0)=P6}xY=xO# zwz6?sl1r&_JJA_->~)*BhgMly6z3dQpO)sSW=Bwh<)BE@gW4w6sWm5ZdV#A!e^28& zomMJB-`EWE!$z~jXMHbirOO;Q+8SPCkS*u3^{mZsakv^SFVM5a`yp4X{u zTGifeznum)WLmty9);t0ED%!n=XMI&En6lr{(j}w2qO=;Ui za$s~uxput0iJ6_ay7L#lYNrRWbq3?ro{p!kFB?|5GaF6U9o^9vZdnN>NMO`LCOaxQ zN&<2;5olw|El%P=L0uw|NYFr+%$=rxtGd0%Yj>RQuWNie=$zZI?^=c|t>MEP8kmIIyCz&*JdSf|B#doqQ&xRGXm(FnYRBqF(;k57X z7VDIlY7HujMutl?TSB2Ga?K18HaWXC=F@ht3Hn`ahx;wU)%_YLW9k~QTy@Ir;_I#y z@7DxE(>1vC*!1p6!iADB*V_eA9a+BPmE3mRvm|0j=$VBWWQ+U1%htycl% zTWO^)$Lg*zkd~F!UTNpMYspu%z1-g|3gXO|*OeWO1tU^gD$L+1uquOaNn-Q

8^6 z8yUFFKx;nOKtP9HY9=TM*nv_TKJ^HKl?x+Lr&NX_1BcqMQeDCaHseS8swSz3budSu zwOs5estd{orkoCnj3U{_{()ZqFtzq+$q*RLB@C;t9j+CHunq;*mw_AT1SHH2moB&) z0b9i!ZRZjK2Fy{|bg~4|pdcwA7a<`ZZ08oyMe5mL-uN0=Hms0Ev?;8B%osW>PCmQ{ zrAtw% z7(tSb9f#Ga9A;oUyg3Nevs6L{EI%LnImpN`T(3Bhq+Hsc&xcWXeq$% zz}jSQ+7#+crGu-EeB)af$<~5UMDtHZ!*%jl=LitW+X_X^RXhF$+!%U?c?$wk6z- zP1|ZNWG+rqw9#uF>{k;a${JM6-&^|Ty;+r0AXq~t1lN-{U93V=tx`*?Vg)i8=2P2;#k%cu_BrTM;kqI3*WC)dn z`{E^Y;ttZMLsCbFkC-4so{=gxwp3^*aEdzh;S1h-V-kV?|K$Io;Yi{tyWV6SXIQR0 zTp=~l3zxPhPr%d0GOTT|(6K-un=)I?@mRkb8=Bf&dlbxfe$)Qu(7+~K7@=E-4lczp zy9!IK!;>a-G{r$20JLdL(JgRTKpXV>l0Ze*m@BP zP$~{jMQmPqY1fUrkOzLR4ic8hooK@1G=*cpdIHLXTuDa?q~j#>Rc~}l;Xvg856*#D z;}tGUxjg8GnQ;CP*aeEDLrw4>uL zI?!NHL4hrdy4YTWZ3w<^7)FXi7>PNp5RnX!irEIxYarxgIr1fF4$X+4=!rGMOkeDcFHtXg~Z3uIRQj*ABGIrfjruq~@kpqfl z)-D*9sbu_K1maIm7#?WH-j`4)aD9axVs8(@VHPPfD9|-SjbQ=(k_D7QrxsE}>{D`` zc0I8L{3W<0VZ&{vOGEHNUFrk>G-RZLD!)7wQLr7V5N3-N>1L!9tK4MN8y5W-Kbh}Wn_ZdHbT3~v$DFSRjs3vL(S_^@u{oLa#0)AUDcVcV^hl6ym%x6IK<^k)>;Dmq;cCVMl}*$X312=c8wFU*QD z((PDQsmDYJc(iK0Uh1q6XzG+;AC@fm9ww4{JN9kM;5t;q_x zMC_)y1L%|vwKNz?aV{ZKOAY2zCa;WtHl1W>60M$U9ww>}IMj5>%(??}tqsv+1Io8h z5=erqaer9)cO(NcwE_|YDL=M5$lDF+D?#=iVkM^-0erx1^&uEmCXNMButVthI+M&u z17V&R%r(X;V44JYwm_N(wR2ddjt&HTVuMcsK4+Nyy(%5Njzd6ZOR+%*0UP0pLkd}} z2hEi`O=WwPv;x@x^%53H++DDkZh@P9gpzhghK>|(rh!_XLp Dict: + """Create a new conversation.""" + def _create(): + with sqlite3.connect(self.db_path) as conn: + cursor = conn.cursor() + cursor.execute(""" + INSERT INTO conversations (id, title, metadata) + VALUES (?, ?, ?) + """, (conversation_id, title, json.dumps(metadata or {}))) + conn.commit() + + return { + "id": conversation_id, + "title": title, + "created_at": datetime.now().isoformat(), + "updated_at": datetime.now().isoformat(), + "metadata": metadata or {} + } + + return await asyncio.to_thread(_create) + + async def get_conversation(self, conversation_id: str) -> Optional[Dict]: + """Get a specific conversation by ID.""" + def _get(): + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + cursor.execute(""" + SELECT * FROM conversations WHERE id = ? + """, (conversation_id,)) + row = cursor.fetchone() + + if row: + return { + "id": row["id"], + "title": row["title"], + "created_at": row["created_at"], + "updated_at": row["updated_at"], + "metadata": json.loads(row["metadata"]) if row["metadata"] else {} + } + return None + + return await asyncio.to_thread(_get) + + async def get_all_conversations(self, limit: int = 50) -> List[Dict]: + """Get all conversations, ordered by most recent.""" + def _get_all(): + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + cursor.execute(""" + SELECT c.*, COUNT(m.id) as message_count + FROM conversations c + LEFT JOIN messages m ON c.id = m.conversation_id + GROUP BY c.id + ORDER BY c.updated_at DESC + LIMIT ? + """, (limit,)) + rows = cursor.fetchall() + + return [ + { + "id": row["id"], + "title": row["title"], + "created_at": row["created_at"], + "updated_at": row["updated_at"], + "message_count": row["message_count"], + "metadata": json.loads(row["metadata"]) if row["metadata"] else {} + } + for row in rows + ] + + return await asyncio.to_thread(_get_all) + + async def add_message(self, conversation_id: str, role: str, content: str, metadata: Optional[Dict] = None) -> Dict: + """Add a message to a conversation.""" + def _add(): + with sqlite3.connect(self.db_path) as conn: + cursor = conn.cursor() + + # Add message + cursor.execute(""" + INSERT INTO messages (conversation_id, role, content, metadata) + VALUES (?, ?, ?, ?) + """, (conversation_id, role, content, json.dumps(metadata or {}))) + + message_id = cursor.lastrowid + + # Update conversation's updated_at timestamp + cursor.execute(""" + UPDATE conversations + SET updated_at = CURRENT_TIMESTAMP + WHERE id = ? + """, (conversation_id,)) + + conn.commit() + + return { + "id": message_id, + "conversation_id": conversation_id, + "role": role, + "content": content, + "timestamp": datetime.now().isoformat(), + "metadata": metadata or {} + } + + return await asyncio.to_thread(_add) + + async def get_messages(self, conversation_id: str) -> List[Dict]: + """Get all messages for a conversation.""" + def _get_msgs(): + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + cursor.execute(""" + SELECT * FROM messages + WHERE conversation_id = ? + ORDER BY timestamp ASC + """, (conversation_id,)) + rows = cursor.fetchall() + + return [ + { + "id": row["id"], + "conversation_id": row["conversation_id"], + "role": row["role"], + "content": row["content"], + "timestamp": row["timestamp"], + "metadata": json.loads(row["metadata"]) if row["metadata"] else {} + } + for row in rows + ] + + return await asyncio.to_thread(_get_msgs) + + async def delete_conversation(self, conversation_id: str) -> bool: + """Delete a conversation and all its messages.""" + def _delete(): + with sqlite3.connect(self.db_path) as conn: + cursor = conn.cursor() + cursor.execute("DELETE FROM conversations WHERE id = ?", (conversation_id,)) + deleted = cursor.rowcount > 0 + conn.commit() + return deleted + + return await asyncio.to_thread(_delete) + + async def update_conversation_title(self, conversation_id: str, title: str) -> bool: + """Update the title of a conversation.""" + def _update(): + with sqlite3.connect(self.db_path) as conn: + cursor = conn.cursor() + cursor.execute(""" + UPDATE conversations + SET title = ?, updated_at = CURRENT_TIMESTAMP + WHERE id = ? + """, (title, conversation_id)) + updated = cursor.rowcount > 0 + conn.commit() + return updated + + return await asyncio.to_thread(_update) diff --git a/backend/src/agent/state.py b/backend/src/agent/state.py index d5ad4dcd..cd9af7ec 100644 --- a/backend/src/agent/state.py +++ b/backend/src/agent/state.py @@ -19,6 +19,7 @@ class OverallState(TypedDict): max_research_loops: int research_loop_count: int reasoning_model: str + conversation_id: str # Track which conversation this state belongs to class ReflectionState(TypedDict): diff --git a/backend/src/agent/utils.py b/backend/src/agent/utils.py index d02c8d91..1fa2176a 100644 --- a/backend/src/agent/utils.py +++ b/backend/src/agent/utils.py @@ -2,6 +2,31 @@ from langchain_core.messages import AnyMessage, AIMessage, HumanMessage +def load_conversation_history(conversation_id: str) -> List[AnyMessage]: + """ + Load conversation history from database and convert to LangChain messages. + + Args: + conversation_id: The ID of the conversation to load + + Returns: + List of LangChain messages (HumanMessage or AIMessage) + """ + from agent.database import ConversationDatabase + + db = ConversationDatabase() + messages_data = db.get_messages(conversation_id) + + langchain_messages = [] + for msg in messages_data: + if msg["role"] == "human": + langchain_messages.append(HumanMessage(content=msg["content"])) + elif msg["role"] == "ai": + langchain_messages.append(AIMessage(content=msg["content"])) + + return langchain_messages + + def get_research_topic(messages: List[AnyMessage]) -> str: """ Get the research topic from the messages. diff --git a/backend/test_conversations.db b/backend/test_conversations.db new file mode 100644 index 0000000000000000000000000000000000000000..50b432bb695bea24d52e0bf39ae4477ce7441a75 GIT binary patch literal 24576 zcmeI)O>YuG7zglOXrcB++7l;|$6kQ6H3e!+8xuvCwnW~9C9x+mcE@%juoQ;c#`I7% zUi>D0hhF?H#=G(0(OK+PmQoU9O{4iwV3}cOpPl&)dzfXQ`ZRAzK`-o1lS@iz^O~+} zkBBr)n^Mn!db-K4G81k=z3Kz`tFlwt-NO%aq3_z{%xCR(Cv-M@Hv3X#ksts82tWV= z5P$##AOL~CC2;DW42Dw?{WN2>OmSqHNA+F1btpPrE-kxNx6G#KcKM#@1~L_vHW=lK zS@w)ZRHSJdN~I#XN5_Vu#yyqG6M^v3lD;GP^M>diG}OGK-W3Nu(b^p@PM$ZzNI%Xq z8mb#>@upZ-N5NFYb*%dWAc0-KWtpZ8Wl4EK}OZ7tXW}A#$fmMxkp-fq} znXcsx%G4?q)yDe8p3$vs3dv0_`Ixh8mMSc_RqXfw(vVVxZ8CMD46D*$&$}_pjFb8> zWLci6Pf#XZ&7`yJl34uBwjK;eqx!p2f6zQJJO!U;_&mF=9zzFzZ~#XmNK1|%iH_h> zm~}NJzTo^_=w4ef|6vTzU|ptM_XWa*sIK*|>hr6F_oUq~doJ;AJ>da#=JL+`UYm0^ z^O_d==wy%}009U<00Izz00bZa0SG_<0uZ=a0YeW&5~);Nsyjd{iDatw^Zo)Es!WD+ zLnFDGytk4_tR&Vcv9^&|-B?{4@z(1_C&JQD=_1a+;Ze009U<00Izz00bZa0SG_<0ucC@1!mQp-(7Lw I8(jbY19_o%%K!iX literal 0 HcmV?d00001 diff --git a/backend/test_database.py b/backend/test_database.py new file mode 100644 index 00000000..a9427b95 --- /dev/null +++ b/backend/test_database.py @@ -0,0 +1,79 @@ +""" +Test script for conversation database functionality. +Run this to verify the database is working correctly. +""" + +import sys +from pathlib import Path + +# Add the src directory to the path +sys.path.insert(0, str(Path(__file__).parent / "src")) + +from agent.database import ConversationDatabase + + +def test_database(): + """Test basic database operations.""" + print("🧪 Testing Conversation Database...\n") + + # Initialize database + db = ConversationDatabase("test_conversations.db") + print("✅ Database initialized") + + # Create a conversation + conv = db.create_conversation( + conversation_id="test-123", + title="Test Conversation", + metadata={"test": True} + ) + print(f"✅ Created conversation: {conv['id']}") + + # Add messages + msg1 = db.add_message( + conversation_id="test-123", + role="human", + content="Hello, how are you?" + ) + print(f"✅ Added human message: {msg1['id']}") + + msg2 = db.add_message( + conversation_id="test-123", + role="ai", + content="I'm doing great! How can I help you today?" + ) + print(f"✅ Added AI message: {msg2['id']}") + + # Get conversation + retrieved = db.get_conversation("test-123") + print(f"✅ Retrieved conversation: {retrieved['title']}") + + # Get messages + messages = db.get_messages("test-123") + print(f"✅ Retrieved {len(messages)} messages") + + # Get all conversations + all_convs = db.get_all_conversations() + print(f"✅ Found {len(all_convs)} total conversations") + + # Update title + db.update_conversation_title("test-123", "Updated Test Title") + updated = db.get_conversation("test-123") + print(f"✅ Updated title to: {updated['title']}") + + # Delete conversation + deleted = db.delete_conversation("test-123") + print(f"✅ Deleted conversation: {deleted}") + + # Verify deletion + after_delete = db.get_all_conversations() + print(f"✅ Conversations after delete: {len(after_delete)}") + + print("\n🎉 All tests passed!") + + # Cleanup + Path("test_conversations.db").unlink(missing_ok=True) + print("🧹 Cleaned up test database") + + +if __name__ == "__main__": + test_database() diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index d06d4021..f6f967f7 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -4,9 +4,15 @@ import { useState, useEffect, useRef, useCallback } from "react"; import { ProcessedEvent } from "@/components/ActivityTimeline"; import { WelcomeScreen } from "@/components/WelcomeScreen"; import { ChatMessagesView } from "@/components/ChatMessagesView"; +import { ConversationHistory } from "@/components/ConversationHistory"; import { Button } from "@/components/ui/button"; +import { History } from "lucide-react"; export default function App() { + const [showHistory, setShowHistory] = useState(false); + const [currentConversationId, setCurrentConversationId] = useState< + string | null + >(null); const [processedEventsTimeline, setProcessedEventsTimeline] = useState< ProcessedEvent[] >([]); @@ -99,16 +105,104 @@ export default function App() { } }, [thread.messages, thread.isLoading, processedEventsTimeline]); + const createNewConversation = useCallback(async () => { + try { + const response = await fetch("/api/conversation", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + title: "New Conversation", + }), + }); + + if (!response.ok) { + throw new Error("Failed to create conversation"); + } + + const data = await response.json(); + setCurrentConversationId(data.id); + return data.id; + } catch (error) { + console.error("Error creating conversation:", error); + return null; + } + }, []); + + const saveMessage = useCallback( + async (role: string, content: string, conversationId: string) => { + try { + await fetch(`/api/conversation/${conversationId}/message`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + role, + content, + }), + }); + } catch (error) { + console.error("Error saving message:", error); + } + }, + [] + ); + + const loadConversation = useCallback( + async (conversationId: string) => { + try { + const response = await fetch( + `/api/conversation/${conversationId}/messages` + ); + if (!response.ok) { + throw new Error("Failed to load conversation"); + } + + const messages = await response.json(); + const langchainMessages: Message[] = messages.map((msg: any) => ({ + type: msg.role === "human" ? "human" : "ai", + content: msg.content, + id: msg.id.toString(), + })); + + setCurrentConversationId(conversationId); + thread.submit({ + messages: langchainMessages, + initial_search_query_count: 3, + max_research_loops: 3, + reasoning_model: "gemini-2.0-flash-thinking-exp-01-21", + }); + setShowHistory(false); + } catch (error) { + console.error("Error loading conversation:", error); + alert("Failed to load conversation"); + } + }, + [thread] + ); + const handleSubmit = useCallback( - (submittedInputValue: string, effort: string, model: string) => { + async (submittedInputValue: string, effort: string, model: string) => { if (!submittedInputValue.trim()) return; setProcessedEventsTimeline([]); hasFinalizeEventOccurredRef.current = false; + // Create a new conversation if we don't have one + let conversationId = currentConversationId; + if (!conversationId) { + conversationId = await createNewConversation(); + if (!conversationId) { + alert("Failed to create conversation"); + return; + } + } + + // Save the user message + await saveMessage("human", submittedInputValue, conversationId); + // convert effort to, initial_search_query_count and max_research_loops - // low means max 1 loop and 1 query - // medium means max 3 loops and 3 queries - // high means max 10 loops and 5 queries let initial_search_query_count = 0; let max_research_loops = 0; switch (effort) { @@ -141,16 +235,65 @@ export default function App() { reasoning_model: model, }); }, - [thread] + [thread, currentConversationId, createNewConversation, saveMessage] ); + // Save AI messages when they arrive + useEffect(() => { + if ( + currentConversationId && + thread.messages.length > 0 && + !thread.isLoading + ) { + const lastMessage = thread.messages[thread.messages.length - 1]; + if (lastMessage && lastMessage.type === "ai") { + saveMessage("ai", lastMessage.content as string, currentConversationId); + } + } + }, [thread.messages, thread.isLoading, currentConversationId, saveMessage]); + const handleCancel = useCallback(() => { thread.stop(); window.location.reload(); }, [thread]); + const handleNewConversation = useCallback(() => { + setCurrentConversationId(null); + window.location.reload(); + }, []); + return (

+ {/* History Button - Fixed Position */} + + + {/* New Conversation Button - Show when in conversation */} + {thread.messages.length > 0 && ( + + )} + + {/* Conversation History Modal */} + {showHistory && ( + setShowHistory(false)} + currentConversationId={currentConversationId || undefined} + /> + )} +
{thread.messages.length === 0 ? ( ; +} + +interface ConversationHistoryProps { + onSelectConversation: (conversationId: string) => void; + onClose: () => void; + currentConversationId?: string; +} + +export function ConversationHistory({ + onSelectConversation, + onClose, + currentConversationId, +}: ConversationHistoryProps) { + const [conversations, setConversations] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + useEffect(() => { + fetchConversations(); + }, []); + + const fetchConversations = async () => { + try { + setLoading(true); + const response = await fetch("/api/conversations"); + if (!response.ok) { + throw new Error("Failed to fetch conversations"); + } + const data = await response.json(); + setConversations(data); + setError(null); + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to load conversations"); + } finally { + setLoading(false); + } + }; + + const deleteConversation = async (conversationId: string, event: React.MouseEvent) => { + event.stopPropagation(); + + if (!confirm("Are you sure you want to delete this conversation?")) { + return; + } + + try { + const response = await fetch(`/api/conversation/${conversationId}`, { + method: "DELETE", + }); + + if (!response.ok) { + throw new Error("Failed to delete conversation"); + } + + // Refresh the list + fetchConversations(); + } catch (err) { + console.error("Error deleting conversation:", err); + alert("Failed to delete conversation"); + } + }; + + const formatDate = (dateString: string) => { + const date = new Date(dateString); + const now = new Date(); + const diffInHours = (now.getTime() - date.getTime()) / (1000 * 60 * 60); + + if (diffInHours < 24) { + return date.toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" }); + } else if (diffInHours < 48) { + return "Yesterday"; + } else { + return date.toLocaleDateString([], { month: "short", day: "numeric" }); + } + }; + + return ( +
+ + +
+
+ + Conversation History + + + Resume or delete previous conversations + +
+ +
+
+ + + {loading ? ( +
+ +
+ ) : error ? ( +
+

{error}

+ +
+ ) : conversations.length === 0 ? ( +
+ +

No conversations yet

+

Start a new conversation to see it here

+
+ ) : ( +
+ {conversations.map((conversation) => ( +
onSelectConversation(conversation.id)} + className={`group relative p-4 rounded-lg border transition-all cursor-pointer ${ + currentConversationId === conversation.id + ? "bg-indigo-500/20 border-indigo-500/50 shadow-md" + : "bg-neutral-800/40 border-neutral-700/50 hover:bg-neutral-800/60 hover:border-indigo-500/30" + }`} + > +
+
+
+ +

+ {conversation.title} +

+ {currentConversationId === conversation.id && ( + + Current + + )} +
+
+ + + {formatDate(conversation.updated_at)} + + + {conversation.message_count}{" "} + {conversation.message_count === 1 ? "message" : "messages"} + +
+
+ +
+
+ ))} +
+ )} +
+
+
+
+ ); +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 8abaa377..763ea282 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -14,12 +14,11 @@ export default defineConfig({ }, server: { proxy: { - // Proxy API requests to the backend server + // Proxy API requests to the LangGraph backend server "/api": { - target: "http://127.0.0.1:8000", // Default backend address + target: "http://127.0.0.1:2024", // LangGraph dev server port changeOrigin: true, - // Optionally rewrite path if needed (e.g., remove /api prefix if backend doesn't expect it) - // rewrite: (path) => path.replace(/^\/api/, ''), + secure: false, }, }, },