"
+ _ = server.AddMessage(t, folderName, missedMessageID1, "Missed Email 1", "from@test.com", "to@test.com", now.Add(-30*time.Minute))
+ _ = server.AddMessage(t, folderName, missedMessageID2, "Missed Email 2", "from@test.com", "to@test.com", now)
+
+ // Now simulate WebSocket connecting and triggering sync
+ // This should catch up on all missed emails using incremental sync
+ err = service.SyncThreadsForFolder(ctx, userID, folderName)
+ if err != nil {
+ t.Fatalf("Failed to sync missed emails: %v", err)
+ }
+
+ // Verify both missed emails are in the database
+ msg1, err := db.GetMessageByMessageID(ctx, pool, userID, missedMessageID1)
+ if err != nil {
+ t.Fatalf("Missed email 1 not found in database: %v", err)
+ }
+ if msg1.Subject != "Missed Email 1" {
+ t.Errorf("Expected subject 'Missed Email 1', got %s", msg1.Subject)
+ }
+
+ msg2, err := db.GetMessageByMessageID(ctx, pool, userID, missedMessageID2)
+ if err != nil {
+ t.Fatalf("Missed email 2 not found in database: %v", err)
+ }
+ if msg2.Subject != "Missed Email 2" {
+ t.Errorf("Expected subject 'Missed Email 2', got %s", msg2.Subject)
+ }
+
+ // Verify sync info was updated to the highest UID
+ syncInfo, err := db.GetFolderSyncInfo(ctx, pool, userID, folderName)
+ if err != nil {
+ t.Fatalf("Failed to get sync info: %v", err)
+ }
+ if syncInfo == nil {
+ t.Fatal("Expected sync info to exist after sync")
+ }
+ if syncInfo.LastSyncedUID == nil {
+ t.Error("Expected LastSyncedUID to be set after sync")
+ }
+}
+
+// getThreadIDs is a helper to extract thread IDs for debugging.
+func getThreadIDs(threads []*models.Thread) []string {
+ ids := make([]string, len(threads))
+ for i, thread := range threads {
+ ids[i] = thread.StableThreadID
+ }
+ return ids
+}
diff --git a/backend/internal/imap/pool_cleanup.go b/backend/internal/imap/pool_cleanup.go
index 638d279..5e807f8 100644
--- a/backend/internal/imap/pool_cleanup.go
+++ b/backend/internal/imap/pool_cleanup.go
@@ -5,7 +5,7 @@ import (
)
// startCleanupGoroutine runs a background goroutine that periodically cleans up idle connections.
-// The goroutine will stop when cleanupCtx is cancelled (via Pool.Close()).
+// The goroutine will stop when cleanupCtx is canceled (via Pool.Close()).
func (p *Pool) startCleanupGoroutine() {
ticker := time.NewTicker(1 * time.Minute)
go func() {
@@ -13,7 +13,7 @@ func (p *Pool) startCleanupGoroutine() {
for {
select {
case <-p.cleanupCtx.Done():
- // Context cancelled - stop the ticker and exit
+ // Context canceled - stop the ticker and exit
return
case <-ticker.C:
// Periodic cleanup
diff --git a/backend/internal/imap/pool_interface.go b/backend/internal/imap/pool_interface.go
index 86b2d41..5544320 100644
--- a/backend/internal/imap/pool_interface.go
+++ b/backend/internal/imap/pool_interface.go
@@ -32,6 +32,13 @@ type IMAPPool interface {
// Close closes all connections in the pool.
Close()
+
+ // GetListenerConnection gets or creates a dedicated listener client for IDLE.
+ // Returns a locked client that must be unlocked by the caller.
+ GetListenerConnection(userID, server, username, password string) (ListenerClient, error)
+
+ // RemoveListenerConnection removes a listener connection from the pool.
+ RemoveListenerConnection(userID string)
}
// ClientWrapper wraps a go-imap client.Client to implement IMAPClient interface.
diff --git a/backend/internal/imap/search_test.go b/backend/internal/imap/search_test.go
index fa2c0e0..fab63cf 100644
--- a/backend/internal/imap/search_test.go
+++ b/backend/internal/imap/search_test.go
@@ -413,8 +413,8 @@ func TestService_buildThreadMapFromMessages(t *testing.T) {
}
t.Run("returns error when GetMessageByMessageID returns non-NotFound error", func(t *testing.T) {
- // Create a cancelled context to simulate a database error
- cancelledCtx, cancel := context.WithCancel(ctx)
+ // Create a canceled context to simulate a database error
+ canceledCtx, cancel := context.WithCancel(ctx)
cancel() // Cancel immediately to cause context error
imapMsg := &imap.Message{
@@ -424,7 +424,7 @@ func TestService_buildThreadMapFromMessages(t *testing.T) {
},
}
- _, _, err := service.buildThreadMapFromMessages(cancelledCtx, userID, []*imap.Message{imapMsg})
+ _, _, err := service.buildThreadMapFromMessages(canceledCtx, userID, []*imap.Message{imapMsg})
if err == nil {
t.Error("Expected error when GetMessageByMessageID returns non-NotFound error")
}
diff --git a/backend/internal/imap/service.go b/backend/internal/imap/service.go
index 2ca7da6..468d815 100644
--- a/backend/internal/imap/service.go
+++ b/backend/internal/imap/service.go
@@ -392,7 +392,6 @@ func (s *Service) processFullSyncMessages(ctx context.Context, messages []*imap.
// Uses incremental sync if possible (only syncs new messages since last sync).
func (s *Service) SyncThreadsForFolder(ctx context.Context, userID, folderName string) error {
return s.withClientAndSelectFolder(ctx, userID, folderName, func(client *imapclient.Client, mbox *imap.MailboxStatus) error {
- log.Printf("Selected folder %s: %d messages", folderName, mbox.Messages)
// Check if we can do incremental sync
syncInfo, err := db.GetFolderSyncInfo(ctx, s.dbPool, userID, folderName)
@@ -412,13 +411,13 @@ func (s *Service) SyncThreadsForFolder(ctx context.Context, userID, folderName s
if err != nil {
return fmt.Errorf("failed to fetch message headers: %w", err)
}
- log.Printf("Fetched %d message headers", len(messages))
+ log.Printf("IMAP Sync: Fetched %d message headers for user %s, folder %s", len(messages), userID, folderName)
s.processIncrementalMessages(ctx, messages, userID, folderName)
// Update sync info with the highest UID
highestUIDInt64 := int64(incResult.highestUID)
if err := db.SetFolderSyncInfo(ctx, s.dbPool, userID, folderName, &highestUIDInt64); err != nil {
- log.Printf("Warning: Failed to set folder sync info: %v", err)
+ log.Printf("IMAP Sync: Warning: Failed to set folder sync info for user %s, folder %s: %v", userID, folderName, err)
}
go s.updateThreadCountInBackground(userID, folderName)
return nil
@@ -439,13 +438,14 @@ func (s *Service) SyncThreadsForFolder(ctx context.Context, userID, folderName s
return fmt.Errorf("failed to fetch message headers: %w", err)
}
- log.Printf("Fetched %d message headers", len(messages))
+ log.Printf("IMAP Sync: Fetched %d message headers for user %s, folder %s", len(messages), userID, folderName)
// Process messages: use thread structure if available, otherwise use incremental processing
threadMaps := fullResult.threadMaps
if threadMaps == nil {
// THREAD command not supported - process messages without thread structure
// (same as incremental sync)
+ log.Printf("IMAP Sync: THREAD command not supported, processing messages incrementally for user %s, folder %s", userID, folderName)
s.processIncrementalMessages(ctx, messages, userID, folderName)
} else {
// Process messages using thread structure
@@ -457,8 +457,9 @@ func (s *Service) SyncThreadsForFolder(ctx context.Context, userID, folderName s
// Update sync info with the highest UID
highestUIDInt64 := int64(fullResult.highestUID)
if err := db.SetFolderSyncInfo(ctx, s.dbPool, userID, folderName, &highestUIDInt64); err != nil {
- log.Printf("Warning: Failed to set folder sync info: %v", err)
- // Don't fail the entire sync if timestamp update fails
+ log.Printf("IMAP Sync: Warning: Failed to set folder sync info for user %s, folder %s: %v", userID, folderName, err)
+ } else {
+ log.Printf("IMAP Sync: Updated sync info for user %s, folder %s (highest UID: %d)", userID, folderName, fullResult.highestUID)
}
// Trigger background thread count update
diff --git a/backend/internal/imap/service_interface.go b/backend/internal/imap/service_interface.go
index 9b06f03..63c0bb5 100644
--- a/backend/internal/imap/service_interface.go
+++ b/backend/internal/imap/service_interface.go
@@ -4,6 +4,7 @@ import (
"context"
"github.com/vdavid/vmail/backend/internal/models"
+ "github.com/vdavid/vmail/backend/internal/websocket"
)
// MessageToSync represents a message that needs to be synced.
@@ -35,6 +36,10 @@ type IMAPService interface {
// Returns threads, total count, and error.
Search(ctx context.Context, userID string, query string, page, limit int) ([]*models.Thread, int, error)
+ // StartIdleListener runs an IMAP IDLE loop for a user and pushes events to the WebSocket hub.
+ // This function blocks until the context is canceled.
+ StartIdleListener(ctx context.Context, userID string, hub *websocket.Hub)
+
// Close closes the service and cleans up connections.
Close()
}
diff --git a/backend/internal/websocket/hub.go b/backend/internal/websocket/hub.go
new file mode 100644
index 0000000..544f87b
--- /dev/null
+++ b/backend/internal/websocket/hub.go
@@ -0,0 +1,121 @@
+package websocket
+
+import (
+ "log"
+ "sync"
+ "time"
+
+ "github.com/gorilla/websocket"
+)
+
+// Client wraps a WebSocket connection.
+type Client struct {
+ conn *websocket.Conn
+}
+
+// Conn returns the underlying WebSocket connection.
+func (c *Client) Conn() *websocket.Conn {
+ return c.conn
+}
+
+// Hub manages active WebSocket connections per user.
+// It supports multiple connections per user (e.g., multiple tabs).
+type Hub struct {
+ mu sync.RWMutex
+ clients map[string]map[*Client]struct{} // userID -> set of clients
+ maxPerUser int
+}
+
+// NewHub creates a new Hub with a per-user connection limit.
+func NewHub(maxPerUser int) *Hub {
+ if maxPerUser <= 0 {
+ maxPerUser = 10
+ }
+ return &Hub{
+ clients: make(map[string]map[*Client]struct{}),
+ maxPerUser: maxPerUser,
+ }
+}
+
+// Register adds a WebSocket connection for the given user.
+// If the per-user limit is exceeded, the new connection is closed and nil is returned.
+func (h *Hub) Register(userID string, conn *websocket.Conn) *Client {
+ h.mu.Lock()
+ defer h.mu.Unlock()
+
+ userClients, ok := h.clients[userID]
+ if !ok {
+ userClients = make(map[*Client]struct{})
+ h.clients[userID] = userClients
+ }
+
+ if len(userClients) >= h.maxPerUser {
+ log.Printf("websocket: user %s exceeded max connections (%d), closing new connection", userID, h.maxPerUser)
+ _ = conn.WriteControl(
+ websocket.CloseMessage,
+ websocket.FormatCloseMessage(websocket.ClosePolicyViolation, "too many connections for this user"),
+ // Use zero deadline - best effort.
+ // See https://pkg.go.dev/github.com/gorilla/websocket#Conn.WriteControl
+ // for details.
+ //nolint:exhaustruct
+ time.Time{},
+ )
+ _ = conn.Close()
+ return nil
+ }
+
+ client := &Client{conn: conn}
+ userClients[client] = struct{}{}
+ return client
+}
+
+// Unregister removes a client for the given user and closes the connection.
+func (h *Hub) Unregister(userID string, client *Client) {
+ if client == nil {
+ return
+ }
+
+ h.mu.Lock()
+ defer h.mu.Unlock()
+
+ userClients, ok := h.clients[userID]
+ if !ok {
+ _ = client.conn.Close()
+ return
+ }
+
+ delete(userClients, client)
+
+ if len(userClients) == 0 {
+ delete(h.clients, userID)
+ }
+
+ _ = client.conn.Close()
+}
+
+// Send broadcasts a message to all active clients for the user.
+func (h *Hub) Send(userID string, msg []byte) {
+ h.mu.RLock()
+ userClients := h.clients[userID]
+ h.mu.RUnlock()
+
+ if len(userClients) == 0 {
+ return
+ }
+
+ for client := range userClients {
+ if err := client.conn.WriteMessage(websocket.TextMessage, msg); err != nil {
+ log.Printf("websocket: failed to write message for user %s: %v", userID, err)
+ // Best-effort cleanup: unregister this client.
+ go h.Unregister(userID, client)
+ }
+ }
+}
+
+// ActiveConnections returns the number of active WebSocket connections for a user.
+func (h *Hub) ActiveConnections(userID string) int {
+ h.mu.RLock()
+ defer h.mu.RUnlock()
+
+ return len(h.clients[userID])
+}
diff --git a/docs/architecture.md b/docs/architecture.md
index 7f8d3e7..6461520 100644
--- a/docs/architecture.md
+++ b/docs/architecture.md
@@ -134,14 +134,33 @@ unique identifier, such as the `Message-ID` header of the root/first message in
### Real-time API (WebSockets)
-For real-time updates (like new emails), the front end will open a WebSocket connection.
+For real-time updates (like new emails), the front end opens a WebSocket connection.
-* [ ] `GET /api/v1/ws`: Upgrades the HTTP connection to a WebSocket.
+* [x] `GET /api/v1/ws`: Upgrades the HTTP connection to a WebSocket.
The server uses this connection to push updates to the client.
+ * The backend maintains a per-process **WebSocket Hub** that:
+ * Tracks multiple connections per user (`userID -> set of connections`).
+ * Limits the number of concurrent connections per user (default: 10).
+ * Sends messages (like new-email notifications) to all active connections for a user.
+ * When the first WebSocket connection for a user is established, the backend starts an **IMAP IDLE listener**:
+ * Uses a dedicated IMAP listener connection from the pool.
+ * Runs `IDLE` on the `INBOX` folder.
+ * On new-mail notifications, performs an **incremental sync** for `INBOX` immediately and then pushes an event to the WebSocket hub.
* **Server-to-client message example:**
```json
- {"type": "new_message", "folder": "INBOX"}
+ {"type": "new_email", "folder": "INBOX"}
```
+ * The front end listens for `new_email` messages and calls `queryClient.invalidateQueries({ queryKey: ['threads', folder] })`
+ so `GET /threads?folder=...` refetches and the new email appears.
+
+**Cache TTL as fallback:**
+The 5‑minute cache TTL used by `GET /threads` is now a **backup mechanism**:
+
+* Real-time updates (IDLE + WebSockets) cause immediate incremental syncs for `INBOX`.
+* TTL-based sync still runs when:
+ * WebSockets are not connected or temporarily unavailable.
+ * The IDLE listener fails or is not yet started.
+ * A user navigates to a folder without real-time support.
### Technical decisions
diff --git a/e2e/fixtures/auth.ts b/e2e/fixtures/auth.ts
index 07984b0..d976cb7 100644
--- a/e2e/fixtures/auth.ts
+++ b/e2e/fixtures/auth.ts
@@ -6,10 +6,10 @@ import { Page } from '@playwright/test'
* We intercept API requests and modify the Authorization header to include the email.
*/
export async function setupAuth(page: Page, userEmail: string = 'test@example.com') {
- // Intercept all API requests and modify the Authorization header
+ // Intercept all API requests and test endpoints, and modify the Authorization header
// to include the email in the token format "email:user@example.com"
// This allows the backend to extract the email in test mode
- await page.route('**/api/**', async (route) => {
+ const addAuthHeader = async (route: any) => {
const request = route.request()
const headers = { ...request.headers() }
@@ -19,7 +19,12 @@ export async function setupAuth(page: Page, userEmail: string = 'test@example.co
// Continue with the modified request
await route.continue({ headers })
- })
+ }
+
+ // Intercept API routes
+ await page.route('**/api/**', addAuthHeader)
+ // Intercept test routes
+ await page.route('**/test/**', addAuthHeader)
}
/**
diff --git a/e2e/tests/inbox.spec.ts b/e2e/tests/inbox.spec.ts
index 66e76f9..64f7127 100644
--- a/e2e/tests/inbox.spec.ts
+++ b/e2e/tests/inbox.spec.ts
@@ -1,10 +1,6 @@
import { test, expect } from '@playwright/test'
-import {
- clickFirstEmail,
- setupInboxForNavigation,
- setupInboxTest,
-} from '../utils/helpers'
+import { clickFirstEmail, setupInboxForNavigation, setupInboxTest } from '../utils/helpers'
/**
* Test 2: Existing User Read-Only Flow
@@ -137,6 +133,88 @@ test.describe('Existing User Read-Only Flow', () => {
}
})
+ test('shows new emails in real time without page reload', async ({ page }) => {
+ // Capture console logs to debug WebSocket issues
+ const consoleMessages: string[] = []
+ page.on('console', (msg) => {
+ const text = msg.text()
+ consoleMessages.push(`[${msg.type()}] ${text}`)
+ // Log errors and warnings immediately
+ if (msg.type() === 'error' || msg.type() === 'warning') {
+ console.log(`Browser ${msg.type()}:`, text)
+ }
+ })
+
+ // Capture network requests to see WebSocket connection status
+ const networkErrors: string[] = []
+ page.on('requestfailed', (request) => {
+ const error = `${request.method()} ${request.url()} - ${request.failure()?.errorText}`
+ networkErrors.push(error)
+ console.log('Network error:', error)
+ })
+
+ const result = await setupInboxTest(page)
+ if (!result) {
+ // Skip if redirected to settings
+ return
+ }
+
+ // Wait for WebSocket connection to be established.
+ // The connection status banner only shows when disconnected, so wait for it to not be visible.
+ // Give it a few seconds for the WebSocket to connect.
+ await page.waitForTimeout(3000)
+
+ // Verify WebSocket is connected by checking that the connection banner is not visible
+ // (it only shows when status is 'disconnected')
+ const connectionBanner = page.locator('text=Connection lost')
+ const bannerVisible = await connectionBanner.isVisible().catch(() => false)
+
+ if (bannerVisible) {
+ console.log('WebSocket connection banner is visible - connection may not be established')
+ console.log('Console messages:', consoleMessages.filter(m => m.includes('WebSocket') || m.includes('error')))
+ console.log('Network errors:', networkErrors)
+ }
+
+ // Capture current thread subjects (if any).
+ const initialSubjects = await page
+ .locator('[data-testid="email-subject"]')
+ .allInnerTexts()
+
+ // Trigger backend helper that appends a new message to INBOX on the test IMAP server.
+ // The backend is expected to expose a test-only endpoint for this.
+ // Use page.evaluate to make the request from the page context so it goes through route interceptors
+ const response = await page.evaluate(async () => {
+ const res = await fetch('/test/add-imap-message', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ folder: 'INBOX',
+ subject: 'E2E Real-Time Test',
+ from: 'sender@example.com',
+ to: 'username@example.com',
+ }),
+ })
+ return { status: res.status, statusText: res.statusText }
+ })
+
+ if (response.status !== 204) {
+ console.log(`Test endpoint returned status ${response.status}: ${response.statusText}`)
+ }
+
+ // Wait for the new subject to appear without reloading the page.
+ await expect(
+ page.locator('[data-testid="email-subject"]', { hasText: 'E2E Real-Time Test' }),
+ ).toBeVisible({ timeout: 15000 })
+
+ const updatedSubjects = await page
+ .locator('[data-testid="email-subject"]')
+ .allInnerTexts()
+
+ expect(updatedSubjects).not.toEqual(initialSubjects)
+ })
+
test('clicking email navigates to thread with correct URL and displays body', async ({
page,
}) => {
diff --git a/frontend/src/components/ConnectionStatusBanner.tsx b/frontend/src/components/ConnectionStatusBanner.tsx
new file mode 100644
index 0000000..62aa646
--- /dev/null
+++ b/frontend/src/components/ConnectionStatusBanner.tsx
@@ -0,0 +1,26 @@
+import { useConnectionStore } from '../store/connection.store'
+
+export default function ConnectionStatusBanner() {
+ const { status, triggerReconnect } = useConnectionStore()
+
+ if (status !== 'disconnected') {
+ return null
+ }
+
+ return (
+
+
+ Connection lost. New emails may be delayed.
+
+
+
+ )
+}
diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx
index f549cc9..9f02ef3 100644
--- a/frontend/src/components/Layout.tsx
+++ b/frontend/src/components/Layout.tsx
@@ -1,7 +1,9 @@
import { useState, type ReactNode } from 'react'
import { useKeyboardShortcuts } from '../hooks/useKeyboardShortcuts'
+import { useWebSocket } from '../hooks/useWebSocket'
+import ConnectionStatusBanner from './ConnectionStatusBanner'
import Header from './Header'
import Sidebar from './Sidebar'
@@ -11,10 +13,12 @@ interface LayoutProps {
export default function Layout({ children }: LayoutProps) {
useKeyboardShortcuts()
+ useWebSocket()
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
return (
+
{
setIsSidebarOpen(true)
diff --git a/frontend/src/hooks/useWebSocket.test.tsx b/frontend/src/hooks/useWebSocket.test.tsx
new file mode 100644
index 0000000..c99e1f0
--- /dev/null
+++ b/frontend/src/hooks/useWebSocket.test.tsx
@@ -0,0 +1,89 @@
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
+import { act, render } from '@testing-library/react'
+import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest'
+
+import { useConnectionStore } from '../store/connection.store'
+
+import { useWebSocket } from './useWebSocket'
+
+class MockSocket {
+ static instances: MockSocket[] = []
+ onopen: (() => void) | null = null
+ onmessage: ((event: MessageEvent) => void) | null = null
+ onerror: (() => void) | null = null
+ onclose: ((event: CloseEvent) => void) | null = null
+
+ url: string
+
+ constructor(url: string) {
+ this.url = url
+ MockSocket.instances.push(this)
+ }
+
+ send(): void {}
+
+ close() {
+ if (this.onclose) {
+ this.onclose(new CloseEvent('close'))
+ }
+ }
+}
+
+function TestComponent() {
+ useWebSocket()
+ return null
+}
+
+function renderWithClient(queryClient: QueryClient) {
+ return render(
+
+
+ ,
+ )
+}
+
+describe('useWebSocket', () => {
+ beforeEach(() => {
+ // Reset connection store between tests
+ useConnectionStore.setState({
+ status: 'connecting',
+ lastError: null,
+ forceReconnectToken: 0,
+ })
+ vi.stubGlobal('WebSocket', MockSocket as unknown as typeof WebSocket)
+ })
+
+ afterEach(() => {
+ MockSocket.instances = []
+ vi.unstubAllGlobals()
+ })
+
+ it('invalidates threads query when new_email message is received', () => {
+ const queryClient = new QueryClient({
+ defaultOptions: {
+ queries: { retry: false },
+ },
+ })
+ const invalidateSpy = vi
+ .spyOn(queryClient, 'invalidateQueries')
+ .mockResolvedValue(undefined)
+
+ renderWithClient(queryClient)
+
+ // Grab the created mock socket instance.
+ const socket = MockSocket.instances[0]
+ expect(socket).toBeDefined()
+
+ act(() => {
+ const event = new MessageEvent('message', {
+ data: JSON.stringify({ type: 'new_email', folder: 'INBOX' }),
+ })
+ socket.onmessage?.(event)
+ })
+
+ expect(invalidateSpy).toHaveBeenCalledWith({
+ queryKey: ['threads', 'INBOX'],
+ exact: false,
+ })
+ })
+})
diff --git a/frontend/src/hooks/useWebSocket.ts b/frontend/src/hooks/useWebSocket.ts
new file mode 100644
index 0000000..f9facd5
--- /dev/null
+++ b/frontend/src/hooks/useWebSocket.ts
@@ -0,0 +1,143 @@
+import { useQueryClient } from '@tanstack/react-query'
+import { useEffect, useRef } from 'react'
+
+import { useConnectionStore } from '../store/connection.store'
+
+export function useWebSocket() {
+ const queryClient = useQueryClient()
+ const { setStatus, setLastError, forceReconnectToken } = useConnectionStore()
+ const queryClientRef = useRef(queryClient)
+ const socketRef = useRef(null)
+ const socketCreationTimeRef = useRef(null)
+
+ // Keep refs up to date without causing re-renders
+ useEffect(() => {
+ queryClientRef.current = queryClient
+ }, [queryClient])
+
+ useEffect(() => {
+ // Check if we already have an active socket from a previous effect run (StrictMode)
+ const existingSocket = socketRef.current
+ if (existingSocket) {
+ const state = existingSocket.readyState
+ // Try to reuse existing socket
+ if (state === WebSocket.OPEN || state === WebSocket.CONNECTING) {
+ // Update status to match the current state
+ if (state === WebSocket.OPEN) {
+ setStatus('connected')
+ setLastError(null)
+ } else {
+ setStatus('connecting')
+ }
+ // Return early - don't create a new socket or set up handlers
+ // The existing socket will continue with its existing handlers
+ // No cleanup needed since we're not creating anything new
+ return
+ }
+ // Socket is closed or closing, create a new one
+ }
+
+ setStatus('connecting')
+
+ // Get the token (currently hardcoded as "token", same as used in API calls).
+ // TODO: When Authelia is implemented, this should get the actual JWT token.
+ const token = 'token'
+
+ const wsEnvUrl = import.meta.env.VITE_WS_URL as string | undefined
+ let wsUrl: string
+ if (wsEnvUrl && wsEnvUrl.length > 0) {
+ wsUrl = wsEnvUrl
+ } else {
+ const baseUrl = `${window.location.origin.replace(/^http/, 'ws')}/api/v1/ws`
+ // Append token as query parameter since WebSocket connections can't set headers.
+ wsUrl = `${baseUrl}?token=${encodeURIComponent(token)}`
+ }
+
+ // Connect
+ const socket = new WebSocket(wsUrl)
+ const socketInstance = socket
+ socketRef.current = socket
+ socketCreationTimeRef.current = Date.now()
+
+ socket.onopen = () => {
+ // Only update state if this is still the current socket
+ if (socketRef.current === socketInstance) {
+ setStatus('connected')
+ setLastError(null)
+ } else {
+ // Connection opened but socket ref changed (StrictMode)
+ socket.close()
+ }
+ }
+
+ socket.onerror = (error) => {
+ // eslint-disable-next-line no-console -- We do want to log this in production too
+ console.error('WebSocket: Error occurred', error, 'readyState:', socket.readyState)
+ if (socketRef.current === socketInstance) {
+ setStatus('disconnected')
+ setLastError('WebSocket error')
+ }
+ }
+
+ socket.onclose = () => {
+ if (socketRef.current === socketInstance) {
+ socketRef.current = null
+ setStatus('disconnected')
+ }
+ }
+
+ socket.onmessage = (event) => {
+ if (socketRef.current !== socketInstance) {
+ return
+ }
+ try {
+ const data = JSON.parse(event.data as string) as { type?: string; folder?: string }
+ if (data.type === 'new_email' && data.folder) {
+ // Invalidate all queries that start with ['threads', folder]
+ // This will match ['threads', folder, page, limit] for any page/limit
+ queryClientRef.current
+ .invalidateQueries({
+ queryKey: ['threads', data.folder],
+ exact: false, // Match all queries that start with this key
+ })
+ .catch((err: unknown) => {
+ // eslint-disable-next-line no-console -- Weird error, better log it
+ console.error('WebSocket: Failed to invalidate queries', err)
+ })
+ }
+ } catch (err) {
+ // eslint-disable-next-line no-console -- We actually want to log this
+ console.error('WebSocket: Failed to parse message', err, event.data)
+ }
+ }
+
+ return () => {
+ // Cleanup method
+
+ // Only clean up if this is still the current socket
+ if (socketRef.current !== socketInstance) {
+ return
+ }
+
+ const timeSinceCreation = socketCreationTimeRef.current
+ ? Date.now() - socketCreationTimeRef.current
+ : Infinity
+
+ // In StrictMode, effects run twice. If cleanup is called very soon after creation,
+ // it's likely a StrictMode double-mount. Don't close immediately.
+ // The 100 ms is kinda arbitrary, so this is kinda a hack.
+ if (timeSinceCreation < 100) {
+ return
+ }
+
+ socketRef.current = null
+ if (
+ socket.readyState === WebSocket.CONNECTING ||
+ socket.readyState === WebSocket.OPEN
+ ) {
+ // Close socket as cleanup
+ socket.close()
+ }
+ }
+ }, [forceReconnectToken, setLastError, setStatus])
+}
diff --git a/frontend/src/store/connection.store.ts b/frontend/src/store/connection.store.ts
new file mode 100644
index 0000000..e342780
--- /dev/null
+++ b/frontend/src/store/connection.store.ts
@@ -0,0 +1,30 @@
+import { create } from 'zustand'
+
+type ConnectionStatus = 'connected' | 'connecting' | 'disconnected'
+
+interface ConnectionState {
+ status: ConnectionStatus
+ lastError: string | null
+ forceReconnectToken: number
+ setStatus: (status: ConnectionStatus) => void
+ setLastError: (message: string | null) => void
+ triggerReconnect: () => void
+}
+
+export const useConnectionStore = create((set) => ({
+ status: 'connecting',
+ lastError: null,
+ forceReconnectToken: 0,
+ setStatus: (status) => {
+ set({ status })
+ },
+ setLastError: (message) => {
+ set({ lastError: message })
+ },
+ triggerReconnect: () => {
+ set((state) => ({
+ forceReconnectToken: state.forceReconnectToken + 1,
+ status: 'connecting',
+ }))
+ },
+}))
diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts
index a31d64c..758da14 100644
--- a/frontend/vite.config.ts
+++ b/frontend/vite.config.ts
@@ -18,6 +18,11 @@ export default defineConfig({
strictPort: true,
proxy: {
'/api': {
+ target: process.env.VITE_API_URL || 'http://localhost:11764',
+ changeOrigin: true, // Needed for CORS
+ ws: true, // Enable WebSocket proxying
+ },
+ '/test': {
target: process.env.VITE_API_URL || 'http://localhost:11764',
changeOrigin: true,
},
@@ -35,6 +40,11 @@ export default defineConfig({
strictPort: true,
proxy: {
'/api': {
+ target: process.env.VITE_API_URL || 'http://localhost:11764',
+ changeOrigin: true, // Needed for CORS
+ ws: true, // Enable WebSocket proxying
+ },
+ '/test': {
target: process.env.VITE_API_URL || 'http://localhost:11764',
changeOrigin: true,
},