diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..4136a0a5 --- /dev/null +++ b/.env.example @@ -0,0 +1,55 @@ +# Ushadow Environment Configuration Template +# Copy this file to .env and customize for your environment +# DO NOT COMMIT .env - it contains environment-specific configuration + +# ========================================== +# ENVIRONMENT & PROJECT NAMING +# ========================================== +ENV_NAME=ushadow +COMPOSE_PROJECT_NAME=ushadow + +# ========================================== +# PORT CONFIGURATION +# ========================================== +PORT_OFFSET=10 +BACKEND_PORT=8010 +WEBUI_PORT=3010 + +# ========================================== +# DATABASE ISOLATION +# ========================================== +MONGODB_DATABASE=ushadow +REDIS_DATABASE=0 + +# ========================================== +# CORS & FRONTEND CONFIGURATION +# ========================================== +CORS_ORIGINS=http://localhost:3010,http://127.0.0.1:3010,http://localhost:8010,http://127.0.0.1:8010 +VITE_BACKEND_URL=http://localhost:8010 +VITE_ENV_NAME=ushadow +HOST_IP=localhost + +# Development mode +DEV_MODE=true + +# ========================================== +# SHARE LINK CONFIGURATION +# ========================================== +# Base URL for share links (highest priority if set) +# SHARE_BASE_URL=https://ushadow.tail12345.ts.net + +# Public gateway URL for external friend sharing (requires share-gateway deployment) +# SHARE_PUBLIC_GATEWAY=https://share.yourdomain.com + +# Share feature toggles +SHARE_VALIDATE_RESOURCES=false # Enable strict resource validation +SHARE_VALIDATE_TAILSCALE=false # Enable Tailscale IP validation + +# ========================================== +# KEYCLOAK CONFIGURATION +# ========================================== +# SECURITY: Change these defaults in production! +KEYCLOAK_ADMIN=admin +KEYCLOAK_ADMIN_PASSWORD=changeme +KEYCLOAK_PORT=8081 +KEYCLOAK_MGMT_PORT=9000 diff --git a/.githooks/README.md b/.githooks/README.md new file mode 100644 index 00000000..d7e000fb --- /dev/null +++ b/.githooks/README.md @@ -0,0 +1,49 @@ +# Git Hooks + +This directory contains git hooks that are **committed to the repository**. + +## Setup (One-Time) + +After cloning, configure git to use these hooks: + +```bash +git config core.hooksPath .githooks +``` + +## Automatic Setup + +Add this to your `~/.gitconfig` to automatically use `.githooks` in all repos: + +```ini +[init] + templateDir = ~/.git-templates +``` + +Then create `~/.git-templates/hooks/post-clone`: +```bash +#!/bin/bash +if [ -d .githooks ]; then + git config core.hooksPath .githooks +fi +``` + +## Available Hooks + +### post-checkout +Automatically configures sparse checkout for chronicle and mycelia submodules to prevent circular dependencies. + +**What it does:** +- Chronicle: Excludes `extras/mycelia/` +- Mycelia: Excludes `friend/` + +**When it runs:** +- After `git checkout` +- After `git submodule update` +- After initial clone (with setup) + +## Testing + +Test the hook manually: +```bash +./.githooks/post-checkout +``` diff --git a/.githooks/post-checkout b/.githooks/post-checkout new file mode 100755 index 00000000..5537edff --- /dev/null +++ b/.githooks/post-checkout @@ -0,0 +1,45 @@ +#!/bin/bash +# Post-checkout hook to configure sparse checkout for submodules +# This prevents circular dependencies between chronicle and mycelia + +set -e + +echo "๐Ÿ”ง Configuring sparse checkout for submodules..." + +# Configure chronicle to exclude extras/mycelia +if [ -d "chronicle" ]; then + CHRONICLE_GIT="$(cd chronicle && git rev-parse --git-dir 2>/dev/null || echo "")" + if [ -n "$CHRONICLE_GIT" ] && [ -d "$CHRONICLE_GIT" ]; then + echo " ๐Ÿ“ Configuring chronicle (excluding extras/mycelia)" + mkdir -p "$CHRONICLE_GIT/info" + cat > "$CHRONICLE_GIT/info/sparse-checkout" <<'SPARSE' +/* +!extras/mycelia/ +SPARSE + (cd chronicle && git config core.sparseCheckout true && git read-tree -mu HEAD 2>/dev/null || true) + fi +fi + +# Configure mycelia to exclude friend +if [ -d "mycelia" ]; then + MYCELIA_GIT="$(cd mycelia && git rev-parse --git-dir 2>/dev/null || echo "")" + if [ -n "$MYCELIA_GIT" ] && [ -d "$MYCELIA_GIT" ]; then + echo " ๐Ÿ“ Configuring mycelia (excluding friend)" + mkdir -p "$MYCELIA_GIT/info" + cat > "$MYCELIA_GIT/info/sparse-checkout" <<'SPARSE' +/* +!friend/ +SPARSE + (cd mycelia && git config core.sparseCheckout true && git read-tree -mu HEAD 2>/dev/null || true) + fi +fi + +# Configure openmemory (no exclusions needed currently) +if [ -d "openmemory" ]; then + OPENMEMORY_GIT="$(cd openmemory && git rev-parse --git-dir 2>/dev/null || echo "")" + if [ -n "$OPENMEMORY_GIT" ] && [ -d "$OPENMEMORY_GIT" ]; then + echo " ๐Ÿ“ Openmemory configured (no exclusions)" + fi +fi + +echo "โœ… Sparse checkout configured successfully" diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..2bc78456 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,12 @@ +[submodule "chronicle"] + path = chronicle + url = https://github.com/Ushadow-io/chronicle.git + update = checkout + +[submodule "mycelia"] + path = mycelia + url = https://github.com/mycelia-tech/mycelia.git + update = checkout +[submodule "openmemory"] + path = openmemory + url = https://github.com/Ushadow-io/mem0.git diff --git a/DECISION_POINT_1.md b/DECISION_POINT_1.md new file mode 100644 index 00000000..a4c773b6 --- /dev/null +++ b/DECISION_POINT_1.md @@ -0,0 +1,193 @@ +# Decision Point #1: Resource Validation + +## Current Status + +โœ… **Feature is ready to use** - Sharing works with lazy validation (no resource checking) +๐Ÿ“ **Your choice** - Implement strict validation if you want to prevent broken share links + +## How It Works Now + +When you create a share link, the system: +1. โœ… Creates share token in MongoDB +2. โœ… Generates share URL +3. โš ๏ธ **Does NOT verify** the conversation/resource exists +4. โœ… Returns link to user + +**Result**: Share links are created instantly, but might be broken if the resource doesn't exist. + +--- + +## Enabling Strict Validation + +### Step 1: Set Environment Variable + +Add to your `.env` file: +```bash +SHARE_VALIDATE_RESOURCES=true +``` + +This tells the share service to validate resources before creating share links. + +### Step 2: Implement Validation Logic + +**Location**: `ushadow/backend/src/services/share_service.py` line ~340 + +I've prepared the function structure. You need to add **5-10 lines** of code to validate the resource exists. + +--- + +## Implementation Options + +Since Mycelia uses a resource-based API (not REST), you have two approaches: + +### Option A: Validate via Mycelia Objects API (Recommended) + +```python +# In _validate_resource_exists(), around line 340: + +if resource_type == ResourceType.CONVERSATION: + try: + async with httpx.AsyncClient(timeout=5.0) as client: + # Call Mycelia objects resource with "get" action + response = await client.post( + "http://mycelia-backend:8000/api/resource/tech.mycelia.objects", + json={ + "action": "get", + "id": resource_id + }, + headers={"Authorization": f"Bearer {self._get_service_token()}"} + ) + + if response.status_code == 404: + raise ValueError(f"Conversation {resource_id} not found in Mycelia") + elif response.status_code != 200: + raise ValueError(f"Failed to validate conversation: {response.status_code}") + + except httpx.RequestError as e: + logger.error(f"Failed to connect to Mycelia: {e}") + raise ValueError("Could not connect to Mycelia to validate conversation") +``` + +**Pros**: Validates against Mycelia directly +**Cons**: Requires service token for authentication + +--- + +### Option B: Validate via Ushadow Generic Proxy + +```python +if resource_type == ResourceType.CONVERSATION: + try: + async with httpx.AsyncClient(timeout=5.0) as client: + # Use ushadow's generic proxy to Mycelia + response = await client.post( + "http://localhost:8080/api/services/mycelia-backend/proxy/api/resource/tech.mycelia.objects", + json={ + "action": "get", + "id": resource_id + } + ) + + if response.status_code == 404: + raise ValueError(f"Conversation {resource_id} not found") + elif response.status_code != 200: + raise ValueError(f"Failed to validate conversation: {response.status_code}") + + except httpx.RequestError as e: + logger.error(f"Mycelia validation failed: {e}") + raise ValueError("Could not validate conversation") +``` + +**Pros**: Leverages existing proxy, handles auth automatically +**Cons**: Assumes ushadow proxy is available + +--- + +### Option C: Skip Validation (Current Behavior) + +Don't set `SHARE_VALIDATE_RESOURCES=true` and leave the TODO as-is. + +**Pros**: Instant share creation, no API calls +**Cons**: Users might create broken share links + +--- + +## Trade-offs to Consider + +| Aspect | Lazy Validation | Strict Validation | +|--------|----------------|-------------------| +| **Speed** | โœ… Instant (~5ms) | โš ๏ธ Slower (~50-100ms) | +| **Reliability** | โš ๏ธ Might create broken links | โœ… Only valid links | +| **UX** | โœ… Fast feedback | โš ๏ธ Slight delay | +| **Dependencies** | โœ… No backend calls | โš ๏ธ Requires Mycelia/Chronicle | +| **Error handling** | โš ๏ธ Broken links fail silently | โœ… Immediate error feedback | + +--- + +## My Recommendation + +**Start with Lazy Validation (current behavior)** because: +1. It's simpler - no extra code needed +2. Users rarely share non-existent conversations +3. When they access a broken link, they get a clear "not found" error +4. You can always add strict validation later if needed + +**Implement Strict Validation if:** +- You have frequent issues with broken share links +- You want immediate feedback during share creation +- The ~50-100ms delay is acceptable for your UX + +--- + +## Testing Your Implementation + +Once you've implemented validation: + +```bash +# Test with valid conversation +curl -X POST http://localhost:8080/api/share/create \ + -H "Content-Type: application/json" \ + -H "Cookie: ushadow_auth=YOUR_TOKEN" \ + -d '{ + "resource_type": "conversation", + "resource_id": "VALID_CONVERSATION_ID", + "permissions": ["read"] + }' + +# Expected: 201 Created with share URL + +# Test with invalid conversation +curl -X POST http://localhost:8080/api/share/create \ + -H "Content-Type: application/json" \ + -H "Cookie: ushadow_auth=YOUR_TOKEN" \ + -d '{ + "resource_type": "conversation", + "resource_id": "INVALID_ID_12345", + "permissions": ["read"] + }' + +# Expected: 400 Bad Request with "Conversation not found" error +``` + +--- + +## Questions? + +**Q: What if Mycelia/Chronicle is down during validation?** +A: The validation will fail with "Could not connect" error, preventing share creation. Consider adding retry logic or circuit breaker. + +**Q: Should I validate memories too?** +A: Yes, add similar logic for `ResourceType.MEMORY` if users can share individual memories. + +**Q: Can I validate asynchronously (background job)?** +A: Not recommended - user needs immediate feedback. If validation is slow, consider caching resource existence. + +--- + +## Next Steps + +1. **Decide**: Lazy vs Strict validation +2. **If Strict**: Set `SHARE_VALIDATE_RESOURCES=true` in `.env` +3. **Implement**: Add 5-10 lines in `share_service.py` (see options above) +4. **Test**: Create shares with valid/invalid IDs +5. **Move to Decision Point #2**: User authorization checks diff --git a/DECISION_POINT_3.md b/DECISION_POINT_3.md new file mode 100644 index 00000000..1c9531ff --- /dev/null +++ b/DECISION_POINT_3.md @@ -0,0 +1,186 @@ +# Decision Point #3: Tailscale Network Validation + +## Current Status + +โœ… **Feature is optional** - Tailscale validation only applies when users create shares with `tailscale_only=true` +๐Ÿ“ **Your choice** - Implement if you want to restrict certain shares to your Tailscale network + +## How It Works Now + +**Without Tailscale validation** (current default): +- `tailscale_only=false` shares โ†’ Accessible from anywhere โœ… +- `tailscale_only=true` shares โ†’ Still accessible from anywhere โš ๏ธ (validation disabled) + +**With Tailscale validation** (when implemented): +- `tailscale_only=false` shares โ†’ Accessible from anywhere โœ… +- `tailscale_only=true` shares โ†’ Only accessible from Tailnet โœ… (validated) + +--- + +## When Do You Need This? + +**Skip Tailscale validation if:** +- You only use the public share gateway (all shares are `tailscale_only=false`) +- You trust users not to abuse `tailscale_only` flag +- Simpler setup is more important than this specific security control + +**Implement Tailscale validation if:** +- You want users to create Tailnet-only shares (private conversations) +- You expose ushadow directly to your Tailnet (not just via gateway) +- You need strong network-based access control + +--- + +## Implementation Options + +### Option A: IP Range Check (Recommended for Direct Tailscale) + +If ushadow runs **directly as a Tailscale node** (not behind a proxy): + +```python +# In share_service.py:_validate_tailscale_access(), around line 465: + +try: + ip = ipaddress.ip_address(request_ip) + tailscale_range = ipaddress.ip_network("100.64.0.0/10") + is_tailscale = ip in tailscale_range + logger.debug(f"IP {request_ip} {'is' if is_tailscale else 'is NOT'} in Tailscale range") + return is_tailscale +except ValueError: + logger.warning(f"Invalid IP address: {request_ip}") + return False +``` + +**How it works**: +- Tailscale uses CGNAT IP range 100.64.0.0/10 +- Check if request IP falls in this range +- Fast, no API calls + +**Pros**: Simple, fast, no external dependencies +**Cons**: Only works if ushadow is directly on Tailscale (not behind nginx/proxy) + +**Enable**: Set `SHARE_VALIDATE_TAILSCALE=true` in `.env` + +--- + +### Option B: Tailscale Serve Headers (For Tailscale Serve Setup) + +If you expose ushadow via **Tailscale Serve** (reverse proxy): + +**Current limitation**: This requires passing the full `Request` object, not just IP. + +**Architecture change needed**: +```python +# In share_service.py:validate_share_access() +# Instead of: +is_tailscale = await self._validate_tailscale_access(request_ip) + +# Pass full request: +is_tailscale = await self._validate_tailscale_access(request) + +# In _validate_tailscale_access(): +async def _validate_tailscale_access(self, request: Request) -> bool: + tailscale_user = request.headers.get("X-Tailscale-User") + if tailscale_user: + logger.debug(f"Validated Tailscale user: {tailscale_user}") + return True + return False +``` + +**How it works**: +- Tailscale Serve adds `X-Tailscale-User` header with authenticated user +- If header present โ†’ user is on your Tailnet +- Cryptographically verified by Tailscale + +**Pros**: Most secure, user identity available +**Cons**: Requires refactoring to pass Request object, only works with Tailscale Serve + +--- + +### Option C: Skip Validation (Current Default) + +Don't set `SHARE_VALIDATE_TAILSCALE=true` and leave as-is. + +**What happens**: +- All shares work regardless of IP +- `tailscale_only` flag is ignored (becomes cosmetic) +- Simpler setup, no code changes needed + +**Trade-off**: Users can't create truly Tailnet-restricted shares + +--- + +## My Recommendation + +### For Your Use Case (Public Gateway Architecture): + +**Skip Tailscale validation for now** because: + +1. **Your architecture**: Friends access via public gateway, not directly to ushadow +2. **Gateway handles it**: The gateway itself is on your Tailnet, providing network isolation +3. **Simpler**: One less thing to configure and maintain +4. **The flag still useful**: Even without validation, `tailscale_only` serves as metadata/intent + +**When you WOULD need it**: +- If users access ushadow directly via Tailscale (not just gateway) +- If you want to enforce Tailnet-only shares for specific conversations + +--- + +## Architecture Reminder + +``` +Public Share (tailscale_only=false): +Friend โ†’ Public Gateway โ†’ [Tailscale] โ†’ ushadow + +Tailscale-Only Share (tailscale_only=true): +Friend on your Tailnet โ†’ ushadow (direct access) + โ†‘ THIS is where Tailscale validation matters +``` + +The validation prevents a friend from accessing a `tailscale_only` share via the public gateway or from outside your network. + +--- + +## Testing Your Implementation + +Once implemented: + +```bash +# 1. Create Tailscale-only share +curl -X POST http://localhost:8080/api/share/create \ + -H "Content-Type: application/json" \ + -H "Cookie: ushadow_auth=YOUR_TOKEN" \ + -d '{ + "resource_type": "conversation", + "resource_id": "abc123", + "permissions": ["read"], + "tailscale_only": true + }' + +# 2. Try to access from Tailscale IP (100.64.x.x) +# Expected: โœ… Access granted + +# 3. Try to access from public IP (not Tailscale) +# Expected: โŒ 403 "Access restricted to Tailscale network" +``` + +--- + +## Summary + +| Option | When to Use | Complexity | Security | +|--------|-------------|------------|----------| +| **Skip** | Public gateway only | โญ Easy | Medium (gateway isolated) | +| **IP Range** | Direct Tailscale access | โญโญ Medium | High (network-level) | +| **Serve Headers** | Tailscale Serve setup | โญโญโญ Complex | Highest (crypto verified) | + +**Recommended**: Skip for now, implement later if needed. + +--- + +## Next Steps + +1. **Decide**: Do you need Tailscale-only shares? +2. **If No**: Leave as-is, move to frontend integration +3. **If Yes**: Set `SHARE_VALIDATE_TAILSCALE=true` and add 5-10 lines (Option A) diff --git a/KEYCLOAK_LOGIN_FIXES.md b/KEYCLOAK_LOGIN_FIXES.md new file mode 100644 index 00000000..4ede3654 --- /dev/null +++ b/KEYCLOAK_LOGIN_FIXES.md @@ -0,0 +1,296 @@ +# Keycloak Login Fixes - Complete Summary + +## Issues Fixed + +### 1. โœ… Login Page Shows Password Fields +**Problem**: Login page displayed username/password fields, but these were non-functional (Keycloak handles credentials, not the app). + +**Solution**: Replaced the entire login form with a single "Sign in with Keycloak" button that clearly indicates users will be redirected to Keycloak for authentication. + +**File Changed**: `ushadow/frontend/src/pages/LoginPage.tsx` + +**Before**: +```tsx + + + +``` + +**After**: +```tsx + +

You'll be redirected to Keycloak for secure authentication

+``` + +--- + +### 2. โœ… OAuth Callback Route Missing +**Problem**: After successful Keycloak login, users were redirected to `/oauth/callback?code=...`, but this route wasn't registered in the app. React Router didn't recognize it, so it redirected to the login page, creating an infinite loop. + +**Solution**: Added the OAuth callback route as a public route in App.tsx. + +**File Changed**: `ushadow/frontend/src/App.tsx` + +**Changes**: +1. Imported OAuthCallback component: + ```tsx + import OAuthCallback from './auth/OAuthCallback' + ``` + +2. Registered the route: + ```tsx + {/* Public Routes */} + } /> + ``` + +--- + +### 3. โœ… Keycloak Disabled in Backend +**Problem**: Keycloak was not enabled in the backend configuration, so: +- Redirect URI auto-registration didn't run +- Keycloak token validation wasn't active +- Backend defaulted to legacy JWT auth + +**Solution**: Enabled Keycloak in configuration files. + +**Files Changed**: +1. `config/config.defaults.yaml` - Added Keycloak configuration: + ```yaml + keycloak: + enabled: true + url: http://keycloak:8080 # Internal Docker URL + public_url: http://localhost:8081 # External browser URL + realm: ushadow + backend_client_id: ushadow-backend + frontend_client_id: ushadow-frontend + admin_user: admin + ``` + +2. `config/SECRETS/secrets.yaml` - Added Keycloak secrets: + ```yaml + keycloak: + admin_password: changeme + backend_client_secret: '' # Set after Keycloak setup + ``` + +--- + +## How OAuth Login Works Now + +### Flow Diagram + +``` +User clicks "Sign in with Keycloak" + โ†“ +Frontend redirects to Keycloak + (http://localhost:8081/realms/ushadow/protocol/openid-connect/auth) + โ†“ +User enters credentials at Keycloak + โ†“ +Keycloak redirects back to /oauth/callback?code=abc123&state=xyz + โ†“ +OAuthCallback component intercepts + โ†“ +Exchanges authorization code for tokens + (calls backend /api/auth/token/exchange) + โ†“ +Stores tokens in sessionStorage + โ†“ +Redirects to original destination (or /) + โ†“ +โœ… User is logged in! +``` + +### Security Features + +1. **PKCE Flow**: Code Challenge prevents authorization code interception +2. **State Parameter**: CSRF protection via random state token +3. **Session Storage**: Tokens stored in sessionStorage (cleared on tab close) +4. **Automatic Refresh**: Tokens auto-refresh 60 seconds before expiry + +--- + +## Testing the Login Flow + +### 1. Start Keycloak +```bash +docker-compose up -d keycloak +``` + +Wait for Keycloak to be ready (check logs): +```bash +docker-compose logs -f keycloak | grep "started" +``` + +### 2. Restart Backend +This triggers automatic redirect URI registration: +```bash +docker-compose restart backend +``` + +Check for successful registration: +```bash +docker-compose logs backend | grep KC-STARTUP +``` + +You should see: +``` +[KC-STARTUP] ๐Ÿ” Registering redirect URIs with Keycloak... +[KC-STARTUP] Environment: PORT_OFFSET=10 +[KC-STARTUP] โœ… Redirect URIs registered successfully +``` + +### 3. Test Login +1. Navigate to `http://localhost:3010/login` +2. Click "Sign in with Keycloak" +3. You'll be redirected to Keycloak at `http://localhost:8081` +4. Login with Keycloak credentials (default: admin / changeme) +5. You'll be redirected back to the app and logged in + +### 4. Verify Token Storage +Open browser DevTools โ†’ Application โ†’ Session Storage โ†’ `http://localhost:3010` + +You should see: +- `kc_access_token`: JWT access token +- `kc_refresh_token`: Refresh token +- `kc_id_token`: ID token with user info + +--- + +## Troubleshooting + +### Redirect URI Error +**Symptom**: "Invalid parameter: redirect_uri" when clicking login + +**Cause**: Keycloak client doesn't have the redirect URI whitelisted + +**Solution**: +1. Check if auto-registration succeeded: + ```bash + docker-compose logs backend | grep KC-STARTUP + ``` + +2. If it failed, manually register the URI: + - Go to Keycloak admin: `http://localhost:8081` + - Login with admin credentials + - Navigate to: Clients โ†’ ushadow-frontend โ†’ Settings + - Add to "Valid Redirect URIs": `http://localhost:3010/oauth/callback` + - Click "Save" + +### Still Redirects to Login +**Symptom**: After Keycloak login, you're sent back to the login page + +**Cause**: OAuth callback route not working + +**Check**: +1. Open browser DevTools โ†’ Console +2. Look for errors during callback processing +3. Check Network tab for failed API calls to `/api/auth/token/exchange` + +**Common Issues**: +- Backend not running +- Keycloak not reachable from backend +- CORS issues (check backend CORS configuration) + +### Token Exchange Fails +**Symptom**: Error message on callback page: "Authentication failed" + +**Check Backend Logs**: +```bash +docker-compose logs -f backend | grep -i keycloak +``` + +**Common Causes**: +- Keycloak client secret not configured +- Backend can't reach Keycloak at `http://keycloak:8080` +- PKCE verification failed (check code_verifier in sessionStorage) + +--- + +## Architecture Notes + +### Dual Authentication System + +The system now supports **both** authentication methods simultaneously: + +1. **Keycloak OAuth (Primary)** + - Modern SSO with federated identity + - Supports Google, GitHub, etc. (when configured in Keycloak) + - Used by default for new users + +2. **Legacy JWT (Fallback)** + - Email/password in ushadow database + - Backward compatible with existing users + - Used for admin access if Keycloak is down + +### Provider Hierarchy + +``` +App +โ”œโ”€ KeycloakAuthProvider (outer) +โ”‚ โ””โ”€ Provides: isAuthenticated, login, logout (OAuth) +โ”‚ +โ””โ”€ AuthProvider (inner) + โ””โ”€ Provides: user, token (legacy JWT) +``` + +LoginPage uses KeycloakAuthProvider exclusively. Protected routes can check either provider. + +--- + +## Next Steps + +### 1. Configure Keycloak Client Secret +For production, set a proper client secret: + +1. Generate a secret in Keycloak admin console +2. Update `config/SECRETS/secrets.yaml`: + ```yaml + keycloak: + backend_client_secret: 'your-generated-secret' + ``` + +### 2. Set Up User Federation +Configure Keycloak to sync with external identity providers: +- Google OAuth +- GitHub OAuth +- Corporate LDAP/AD + +### 3. Test Share Feature with Keycloak +Now that login works, test the complete share flow: + +1. Login with Keycloak +2. Navigate to a conversation +3. Click "Share" button +4. Create a share link +5. Verify the share URL uses your Tailscale hostname + +--- + +## Files Modified + +### Frontend +- โœ… `ushadow/frontend/src/pages/LoginPage.tsx` - Simplified to SSO button only +- โœ… `ushadow/frontend/src/App.tsx` - Added OAuth callback route +- โœ… `ushadow/frontend/package.json` - Added jwt-decode dependency + +### Backend Configuration +- โœ… `config/config.defaults.yaml` - Enabled Keycloak +- โœ… `config/SECRETS/secrets.yaml` - Added Keycloak credentials + +### Share Feature (from previous work) +- โœ… `ushadow/backend/src/routers/share.py` - Implemented share URL strategy +- โœ… `ushadow/frontend/src/pages/ConversationDetailPage.tsx` - Added share button +- โœ… Complete conversation sharing infrastructure + +--- + +## Summary + +**Before**: Login page had non-functional password fields, OAuth callback wasn't registered, and Keycloak was disabled. + +**After**: Clean SSO login flow with Keycloak, automatic redirect URI registration, and complete OAuth callback handling. + +**Impact**: Users can now successfully log in via Keycloak and access the full share feature with proper authentication! diff --git a/Makefile b/Makefile index 968d2f52..1a14ff75 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,8 @@ go install status health dev prod \ svc-list svc-restart svc-start svc-stop svc-status \ chronicle-env-export chronicle-build-local chronicle-up-local chronicle-down-local chronicle-dev \ - release + chronicle-push mycelia-push openmemory-push \ + release env-sync env-sync-apply env-info # Read .env for display purposes only (actual logic is in run.py) -include .env @@ -44,6 +45,11 @@ help: @echo " make chronicle-down-local - Stop local Chronicle" @echo " make chronicle-dev - Build + run (full dev cycle)" @echo "" + @echo "Build & Push to GHCR:" + @echo " make chronicle-push [TAG=latest] - Build and push Chronicle (backend+workers+webui)" + @echo " make mycelia-push [TAG=latest] - Build and push Mycelia backend" + @echo " make openmemory-push [TAG=latest] - Build and push OpenMemory server" + @echo "" @echo "Service management:" @echo " make rebuild - Rebuild service from compose/-compose.yml" @echo " (e.g., make rebuild mycelia, make rebuild chronicle)" @@ -71,12 +77,23 @@ help: @echo " make lint - Run linters" @echo " make format - Format code" @echo "" + @echo "Environment commands:" + @echo " make env-info - Show current environment info" + @echo " make env-sync - Check for missing variables from .env.example" + @echo " make env-sync-apply - Add missing variables to .env" + @echo "" @echo "Cleanup commands:" @echo " make clean-logs - Remove log files" @echo " make clean-cache - Remove Python cache files" @echo " make reset - Full reset (stop all, remove volumes, clean)" @echo " make reset-tailscale - Reset Tailscale (container, state, certs)" @echo "" + @echo "Keycloak realm management:" + @echo " make keycloak-delete-realm - Delete the ushadow realm" + @echo " make keycloak-create-realm - Create realm from realm-export.json" + @echo " make keycloak-reset-realm - Delete and recreate realm" + @echo " make keycloak-fresh-start - Complete fresh setup (stop, clear DB, restart, import)" + @echo "" @echo "Launcher release:" @echo " make release VERSION=x.y.z [PLATFORMS=all] [DRAFT=true]" @echo " - Build, commit, and trigger GitHub release workflow" @@ -194,6 +211,24 @@ chronicle-down-local: chronicle-dev: chronicle-build-local chronicle-up-local @echo "๐ŸŽ‰ Chronicle dev environment ready" +# ============================================================================= +# Build & Push to GHCR +# ============================================================================= +# Build and push multi-arch images to GitHub Container Registry +# Requires: docker login ghcr.io -u USERNAME --password-stdin + +# Chronicle - Build and push backend + webui +chronicle-push: + @./scripts/build-push-images.sh chronicle $(TAG) + +# Mycelia - Build and push backend +mycelia-push: + @./scripts/build-push-images.sh mycelia $(TAG) + +# OpenMemory - Build and push server +openmemory-push: + @./scripts/build-push-images.sh openmemory $(TAG) + # ============================================================================= # Service Management (via ushadow API) # ============================================================================= @@ -421,6 +456,14 @@ env-info: @echo "CHRONICLE_PORT: $${CHRONICLE_PORT:-8000}" @echo "MONGODB_DATABASE: $${MONGODB_DATABASE:-ushadow}" +# Sync .env with .env.example (show missing variables) +env-sync: + @uv run scripts/sync-env.py + +# Sync .env with .env.example (apply missing variables) +env-sync-apply: + @uv run scripts/sync-env.py --apply + # Launcher release - triggers GitHub Actions workflow # Usage: make release VERSION=0.4.2 [PLATFORMS=macos] [DRAFT=true] [RELEASE_NAME="Bug Fixes"] release: @@ -454,3 +497,54 @@ release: @echo "โœ… Release workflow triggered!" @echo " View progress: gh run list --workflow=launcher-release.yml" @echo " Or visit: https://github.com/$$(git config --get remote.origin.url | sed 's/.*github.com[:/]\(.*\)\.git/\1/')/actions" + +# Keycloak realm management +keycloak-delete-realm: + @echo "๐Ÿ—‘๏ธ Deleting Keycloak realm 'ushadow'..." + @docker exec keycloak /opt/keycloak/bin/kcadm.sh config credentials \ + --server http://localhost:8080 \ + --realm master \ + --user admin \ + --password admin > /dev/null 2>&1 || \ + (echo "โš ๏ธ Keycloak not running" && exit 1) + @docker exec keycloak /opt/keycloak/bin/kcadm.sh delete realms/ushadow 2>/dev/null || \ + (echo "โš ๏ธ Realm doesn't exist" && exit 1) + @echo "โœ… Realm deleted" + +keycloak-create-realm: + @echo "๐Ÿ“ฆ Creating Keycloak realm 'ushadow' from realm-export.json..." + @if [ ! -f config/keycloak/realm-export.json ]; then \ + echo "โŒ Error: config/keycloak/realm-export.json not found"; \ + exit 1; \ + fi + @docker cp config/keycloak/realm-export.json keycloak:/tmp/realm-import.json + @docker exec keycloak /opt/keycloak/bin/kcadm.sh config credentials \ + --server http://localhost:8080 \ + --realm master \ + --user admin \ + --password admin + @docker exec keycloak /opt/keycloak/bin/kcadm.sh create realms \ + -f /tmp/realm-import.json + @echo "โœ… Realm created and configured" + +keycloak-reset-realm: keycloak-delete-realm keycloak-create-realm + @echo "โœ… Realm reset complete" + +keycloak-fresh-start: + @echo "๐Ÿ”„ Starting fresh Keycloak setup..." + @echo "1. Stopping Keycloak..." + @docker stop keycloak 2>/dev/null || true + @docker rm keycloak 2>/dev/null || true + @echo "2. Clearing Keycloak database..." + @docker exec postgres psql -U ushadow -d ushadow -c "DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public;" 2>/dev/null || \ + echo "โš ๏ธ Database already clean or Postgres not running" + @echo "3. Starting Keycloak..." + @docker-compose -f compose/docker-compose.infra.yml --profile infra up -d keycloak + @echo "4. Waiting for Keycloak to start (30s)..." + @sleep 30 + @echo "5. Creating realm from export..." + @$(MAKE) keycloak-create-realm || echo "โš ๏ธ Realm creation failed - may need manual setup" + @echo "โœ… Fresh Keycloak setup complete" + @echo " Admin console: http://localhost:8081" + @echo " Username: admin" + @echo " Password: admin" diff --git a/README.md b/README.md index b463cbbb..5559a477 100644 --- a/README.md +++ b/README.md @@ -178,7 +178,7 @@ ushadow/ โ”œโ”€โ”€ scripts/ # Utility scripts โ”œโ”€โ”€ tests/ # Test suites โ”œโ”€โ”€ docker-compose.yml # Main application compose -โ”œโ”€โ”€ quickstart.sh # Quick start script +โ”œโ”€โ”€ go.sh # Quick start script (./dev.sh for development) โ””โ”€โ”€ README.md ``` @@ -292,22 +292,37 @@ make test ### Start All Services ```bash -./quickstart.sh +./go.sh ``` Or manually: ```bash # Start infrastructure -docker compose -f deployment/docker-compose.infra.yml up -d +docker compose -f compose/docker-compose.infra.yml up -d # Start Chronicle -docker compose -f deployment/docker-compose.chronicle.yml up -d +docker compose -f compose/chronicle-compose.yaml up -d # Start ushadow docker compose up -d ``` +### Start infrastructure only: `make infra-up` + +Starts MongoDB, Redis, and Qdrant (and memory services). Use it when: + +- You want to run the ushadow app separately (e.g. `make up` or `./dev.sh` after infra is up) +- Infrastructure was stopped (`make infra-down`) and you need it again +- You're running frontend/backend manually and only need the databases + +**Summary:** Use `./go.sh` or `./dev.sh` for a full start (they bring up infra too). Use `make infra-up` when you only want infra or when you start the app in a separate step. + +```bash +make infra-up # Start infrastructure +make infra-down # Stop infrastructure +``` + ### Stop Services ```bash @@ -316,8 +331,8 @@ docker compose down # Stop everything docker compose down -docker compose -f deployment/docker-compose.chronicle.yml down -docker compose -f deployment/docker-compose.infra.yml down +docker compose -f compose/chronicle-compose.yaml down +docker compose -f compose/docker-compose.infra.yml down ``` ### View Logs diff --git a/SHARE_FEATURE_SUMMARY.md b/SHARE_FEATURE_SUMMARY.md new file mode 100644 index 00000000..9e65b5c3 --- /dev/null +++ b/SHARE_FEATURE_SUMMARY.md @@ -0,0 +1,194 @@ +# Share Feature - Complete Implementation Summary + +## What Users Will See + +When clicking "Share" on a conversation, users will get URLs in this format: + +### Default (No Configuration) +If you have Tailscale configured: +``` +https://your-machine.tail12345.ts.net/share/a1b2c3d4-e5f6-7890-abcd-ef1234567890 +``` + +If Tailscale is not configured (development): +``` +http://localhost:3000/share/a1b2c3d4-e5f6-7890-abcd-ef1234567890 +``` + +### With Environment Variable Override +If you set `SHARE_BASE_URL` in `.env`: +```bash +SHARE_BASE_URL=https://ushadow.mycompany.com +``` +Users get: +``` +https://ushadow.mycompany.com/share/a1b2c3d4-e5f6-7890-abcd-ef1234567890 +``` + +### With Public Gateway +If you set `SHARE_PUBLIC_GATEWAY` in `.env`: +```bash +SHARE_PUBLIC_GATEWAY=https://share.yourdomain.com +``` +Users get: +``` +https://share.yourdomain.com/share/a1b2c3d4-e5f6-7890-abcd-ef1234567890 +``` + +--- + +## How Share Links Work + +1. **User clicks "Share" button** in conversation detail page +2. **ShareDialog opens** with options: + - Expiration date (optional) + - Max view count (optional) + - Require authentication (toggle) + - Tailscale-only access (toggle) +3. **Backend creates token** with ownership validation +4. **Frontend displays link** with copy-to-clipboard button +5. **Recipient clicks link** โ†’ Token validated โ†’ Conversation displayed + +--- + +## Complete Feature Set + +### โœ… Frontend Integration +- **ConversationsPage** (`/conversations`) - Multi-source list view (Chronicle + Mycelia) +- **ConversationDetailPage** (`/conversations/{id}?source={source}`) - Full conversation view with: + - Audio playback (full + segment-level) + - Memory integration + - Transcript display + - **Share button** (green button next to "Play Full Audio") +- **ShareDialog** - Full-featured modal with all share options +- **useShare hook** - State management for share dialog + +### โœ… Backend API +- `POST /api/share/create` - Create new share token +- `GET /api/share/{token}` - Access shared resource (public endpoint) +- `DELETE /api/share/{token}` - Revoke share token +- `GET /api/share/resource/{type}/{id}` - List shares for resource +- `GET /api/share/{token}/logs` - View access logs +- `POST /api/share/conversations/{id}` - Convenience endpoint for conversations + +### โœ… Security Features +- **Ownership validation** - Users can only share their own conversations +- **Superuser bypass** - Admins can share anything +- **Optional features** (environment variable gates): + - Resource validation (`SHARE_VALIDATE_RESOURCES`) + - Tailscale IP validation (`SHARE_VALIDATE_TAILSCALE`) +- **Access logging** - Audit trail of all share access +- **Expiration** - Time-based token expiry +- **View limits** - Maximum number of accesses + +### โœ… URL Configuration +- **Strategy hierarchy** (priority order): + 1. `SHARE_BASE_URL` environment variable + 2. `SHARE_PUBLIC_GATEWAY` environment variable + 3. Tailscale hostname (auto-detected) + 4. Localhost fallback (development) + +--- + +## Testing the Feature + +### 1. Start ushadow +```bash +# Check that backend logs show: +# "Share service initialized with base_url: https://..." +docker-compose up -d +docker-compose logs -f backend | grep "Share service" +``` + +### 2. Navigate to conversations +``` +http://localhost:3010/conversations +``` + +### 3. Click any conversation to view details +``` +http://localhost:3010/conversations/{id}?source=mycelia +``` + +### 4. Click "Share" button +- Creates share token +- Displays URL with your configured base URL +- Shows existing shares below + +### 5. Test the share link +- Copy the generated URL +- Open in incognito/private window +- Should show conversation details (if public) +- OR require Tailscale access (if `tailscale_only: true`) + +--- + +## Configuration Examples + +### Scenario 1: Development (No Config Needed) +```bash +# No environment variables set +# URLs will be: http://localhost:3000/share/{token} +``` + +### Scenario 2: Tailscale Deployment +```bash +# Tailscale auto-detected from tailscale-config.json +# URLs will be: https://ushadow.tail12345.ts.net/share/{token} +``` + +### Scenario 3: Custom Domain +```bash +# In .env: +SHARE_BASE_URL=https://ushadow.mycompany.com + +# URLs will be: https://ushadow.mycompany.com/share/{token} +``` + +### Scenario 4: Public Gateway +```bash +# In .env: +SHARE_PUBLIC_GATEWAY=https://share.yourdomain.com + +# URLs will be: https://share.yourdomain.com/share/{token} +# Requires deploying share-gateway/ to public VPS +``` + +--- + +## Next Steps (Optional) + +### Implement Resource Fetching +Currently the share access endpoint returns placeholder data. To show actual conversation content, implement resource fetching in: +- `ushadow/backend/src/routers/share.py` line 136 +- Call Mycelia API to fetch conversation data +- Filter sensitive fields before returning + +### Deploy Share Gateway (For External Sharing) +If you want external friends to access shares: +1. Deploy `share-gateway/` to public VPS +2. Set `SHARE_PUBLIC_GATEWAY` environment variable +3. Configure gateway to proxy back through Tailscale + +### Enable Tailscale Funnel (Alternative to Gateway) +If you want external access without deploying a gateway: +```bash +tailscale funnel --bg --https=443 --set-path=/share https+insecure://localhost:8010 +``` + +--- + +## Architecture Decision: Why This Approach? + +โ˜… **Flexible URL Configuration** +The hierarchy allows you to start simple (Tailscale auto-detection) and upgrade later (public gateway) without changing code. Just set an environment variable. + +โ˜… **Security by Default** +Ownership validation ensures users can only share their own content. Superuser bypass provides admin flexibility for support/moderation. + +โ˜… **Progressive Enhancement** +- Basic: Tailnet-only sharing (zero config) +- Intermediate: Funnel for selective public access +- Advanced: Full public gateway with rate limiting + +This matches your "behind Tailscale" deployment while keeping external sharing as an option when you're ready. diff --git a/SHARE_URL_CONFIGURATION.md b/SHARE_URL_CONFIGURATION.md new file mode 100644 index 00000000..e06ce70d --- /dev/null +++ b/SHARE_URL_CONFIGURATION.md @@ -0,0 +1,246 @@ +# Share URL Configuration for Tailscale Deployments + +## The Challenge + +When running ushadow behind Tailscale, you face a fundamental question: **Who should be able to access shared links?** + +Your share links will look like: +``` +https://YOUR_BASE_URL/share/a1b2c3d4-e5f6-7890-abcd-ef1234567890 +``` + +But what should `YOUR_BASE_URL` be? + +--- + +## Three Sharing Strategies + +### Strategy 1: Tailnet-Only Sharing (Simplest) + +**Best for:** Sharing with colleagues/friends who are already on your Tailnet + +**Setup:** +```bash +# In your .env file +SHARE_BASE_URL=https://ushadow.tail12345.ts.net +``` + +**How it works:** +1. User clicks "Share" in conversation detail page +2. Gets link like: `https://ushadow.tail12345.ts.net/share/{token}` +3. Only people connected to your Tailnet can access + +**Implementation:** +```python +# In ushadow/backend/src/routers/share.py, implement _get_share_base_url(): +def _get_share_base_url() -> str: + # Try explicit override first + if base_url := os.getenv("SHARE_BASE_URL"): + return base_url.rstrip("/") + + # Use Tailscale hostname + try: + config = read_tailscale_config() + if config and config.hostname: + return f"https://{config.hostname}" + except Exception: + pass + + # Fallback + return "http://localhost:3000" +``` + +**Pros:** +- โœ… Simple - no extra infrastructure +- โœ… Secure - protected by Tailscale ACLs +- โœ… Works immediately + +**Cons:** +- โŒ Recipients must join your Tailnet +- โŒ Not suitable for external friends + +--- + +### Strategy 2: Tailscale Funnel (Public Access via Tailscale) + +**Best for:** Sharing with external friends without deploying separate infrastructure + +**Setup:** +```bash +# Enable Funnel for specific paths +tailscale funnel --bg --https=443 --set-path=/share https+insecure://localhost:8010 + +# In your .env file +SHARE_BASE_URL=https://ushadow.tail12345.ts.net +``` + +**How it works:** +1. Tailscale Funnel exposes `/share/*` endpoints publicly through Tailscale's infrastructure +2. Share links use your Tailscale hostname +3. External users access via public internet โ†’ Tailscale Funnel โ†’ Your ushadow instance + +**Implementation:** Same as Strategy 1 (Funnel is transparent to your app) + +**Pros:** +- โœ… No separate VPS needed +- โœ… Tailscale handles SSL certificates +- โœ… Can selectively expose endpoints + +**Cons:** +- โŒ Requires Tailscale Funnel configuration +- โŒ Funnel has bandwidth limits +- โŒ May not work with all Tailscale plans + +--- + +### Strategy 3: Public Gateway (Maximum Flexibility) + +**Best for:** Production deployments with external sharing and fine-grained control + +**Setup:** +1. Deploy `share-gateway/` to a public VPS (e.g., DigitalOcean) +2. Configure gateway to proxy back to your Tailscale network +3. Set environment variable: + +```bash +# In your .env file +SHARE_PUBLIC_GATEWAY=https://share.yourdomain.com +``` + +**How it works:** +1. User clicks "Share" in conversation +2. Gets link like: `https://share.yourdomain.com/share/{token}` +3. Gateway validates token with your ushadow backend via Tailscale +4. Gateway proxies the conversation data back to external user + +**Implementation:** +```python +def _get_share_base_url() -> str: + # Public gateway for external sharing (highest priority) + if gateway_url := os.getenv("SHARE_PUBLIC_GATEWAY"): + return gateway_url.rstrip("/") + + # Explicit override + if base_url := os.getenv("SHARE_BASE_URL"): + return base_url.rstrip("/") + + # Fallback to Tailscale hostname + try: + config = read_tailscale_config() + if config and config.hostname: + return f"https://{config.hostname}" + except Exception: + pass + + return "http://localhost:3000" +``` + +**Gateway Deployment:** +```bash +cd share-gateway/ +docker build -t ushadow-share-gateway . +docker run -d -p 443:8000 \ + -e USHADOW_BACKEND_URL=https://ushadow.tail12345.ts.net \ + -e RATE_LIMIT_PER_IP=10 \ + ushadow-share-gateway +``` + +**Pros:** +- โœ… Full control over public endpoint +- โœ… Custom domain and SSL +- โœ… Rate limiting and security controls +- โœ… No bandwidth limits + +**Cons:** +- โŒ Requires deploying separate service +- โŒ Monthly VPS cost (~$5-10/month) +- โŒ More complex architecture + +--- + +## Recommended Implementation + +Here's the complete implementation for `_get_share_base_url()` in `ushadow/backend/src/routers/share.py`: + +```python +def _get_share_base_url() -> str: + """Determine the base URL for share links. + + Strategy hierarchy: + 1. SHARE_BASE_URL environment variable (highest priority) + 2. SHARE_PUBLIC_GATEWAY environment variable (for external sharing) + 3. Tailscale hostname (for Tailnet-only sharing) + 4. Fallback to localhost (development only) + + Returns: + Base URL string (e.g., "https://ushadow.tail12345.ts.net") + """ + # Explicit override (for testing or custom deployments) + if base_url := os.getenv("SHARE_BASE_URL"): + logger.info(f"Using explicit SHARE_BASE_URL: {base_url}") + return base_url.rstrip("/") + + # Public gateway for external sharing + if gateway_url := os.getenv("SHARE_PUBLIC_GATEWAY"): + logger.info(f"Using public gateway: {gateway_url}") + return gateway_url.rstrip("/") + + # Use Tailscale hostname (works with or without Funnel) + try: + config = read_tailscale_config() + if config and config.hostname: + tailscale_url = f"https://{config.hostname}" + logger.info(f"Using Tailscale hostname: {tailscale_url}") + return tailscale_url + except Exception as e: + logger.warning(f"Failed to read Tailscale config: {e}") + + # Fallback for development + logger.warning("Using localhost fallback - shares will only work locally!") + return "http://localhost:3000" +``` + +--- + +## Quick Start + +**For immediate Tailnet-only sharing:** +```bash +# No configuration needed! Just use the Tailscale hostname detection +# Share links will automatically use: https://ushadow.tail{xxx}.ts.net +``` + +**To override:** +```bash +# Add to your .env file +SHARE_BASE_URL=https://your-custom-url.com +``` + +--- + +## Testing Your Configuration + +1. Start ushadow backend +2. Check logs for: `Share service initialized with base_url: ...` +3. Create a share link from conversation detail page +4. Verify the URL format matches your expected base URL + +--- + +## Security Considerations + +### Tailnet-Only Sharing +- Protected by Tailscale ACLs +- No public exposure +- Requires recipients to join Tailnet + +### Funnel Sharing +- Only `/share/*` endpoints exposed +- Still uses Tailscale authentication for admin features +- Funnel has rate limiting built-in + +### Public Gateway Sharing +- Gateway validates all tokens before proxying +- Rate limiting per IP (default: 10 requests/minute) +- Admin endpoints still require Tailscale access +- Consider adding additional authentication for sensitive shares diff --git a/SHARING_IMPLEMENTATION.md b/SHARING_IMPLEMENTATION.md new file mode 100644 index 00000000..f634882d --- /dev/null +++ b/SHARING_IMPLEMENTATION.md @@ -0,0 +1,739 @@ +# Ushadow Sharing System - Implementation Guide + +## Overview + +This document describes the conversation sharing system I've implemented for Ushadow, designed to integrate with Keycloak Fine-Grained Authorization (FGA) while remaining functional with the current JWT authentication system. + +## ๐ŸŒ Architecture: Behind Tailscale + Public Sharing + +Since ushadow runs **behind your private Tailscale network**, external users cannot directly access it. The sharing system supports **two modes**: + +### Mode 1: Tailscale-Only Sharing +- User sets `tailscale_only=true` on share link +- Friend must join your Tailnet (temporarily or permanently) +- Friend accesses ushadow directly via Tailscale +- Most secure, zero trust + +### Mode 2: Public Share Gateway (Recommended) +- User sets `tailscale_only=false` on share link +- Share link points to public gateway: `https://share.yourdomain.com/c/{token}` +- Gateway validates token, proxies ONLY shared resource +- Gateway connects to ushadow via Tailscale (private connection) +- Friend never has direct access to your Tailnet + +**Gateway Architecture**: +``` +Public Internet +โ”‚ +โ”œโ”€โ”€ Friend visits: https://share.yourdomain.com/c/550e8400-... +โ”‚ +โ–ผ +Share Gateway (Public VPS, ~$5/month) +โ”‚ - Validates share token +โ”‚ - Rate limited (10 req/min per IP) +โ”‚ - Audit logging +โ”‚ - Only exposes /c/{token} endpoint +โ”‚ +โ–ผ (via Tailscale) +Your Private Tailnet +โ”œโ”€โ”€ ushadow backend โ† Friend NEVER accesses directly +โ”œโ”€โ”€ MongoDB +โ””โ”€โ”€ Your devices +``` + +**Gateway Implementation**: See `share-gateway/` directory for complete deployment-ready code. + +## What's Been Built + +### โœ… Backend (Complete) + +**Models** (`ushadow/backend/src/models/share.py`): +- `ShareToken` - Beanie document for MongoDB storage +- `ShareTokenCreate` - API request model +- `ShareTokenResponse` - API response model +- `KeycloakPolicy` - Keycloak-compatible policy structure +- Enums: `ResourceType`, `SharePermission` + +**Service** (`ushadow/backend/src/services/share_service.py`): +- `ShareService` - Business logic for share management +- Token creation/validation/revocation +- Audit logging for all access +- Keycloak integration stubs (ready for implementation) + +**API Router** (`ushadow/backend/src/routers/share.py`): +- `POST /api/share/create` - Create share token +- `GET /api/share/{token}` - Access shared resource +- `DELETE /api/share/{token}` - Revoke share +- `GET /api/share/resource/{type}/{id}` - List shares for resource +- `GET /api/share/{token}/logs` - View access audit logs +- Convenience endpoints: `/api/share/conversations/{id}` + +### โœ… Frontend (Complete) + +**Components** (`ushadow/frontend/src/components/`): +- `ShareDialog.tsx` - Full-featured share management UI + - Create share links with expiration/view limits + - List existing shares + - Copy links to clipboard + - Revoke access with confirmation + +**Hooks** (`ushadow/frontend/src/hooks/`): +- `useShare.ts` - Share dialog state management + +### ๐Ÿ“‹ Configuration + +**Database**: ShareToken collection added to Beanie initialization in `main.py`: +```python +await init_beanie(database=db, document_models=[User, ShareToken]) +``` + +**Router**: Share router registered in `main.py`: +```python +app.include_router(share.router, tags=["sharing"]) +``` + +--- + +## ๐ŸŽฏ Key Decision Points (TODO for You) + +I've intentionally left several business logic decisions for you to implement. These are marked with `TODO` comments in the code and represent strategic choices that should align with your security and UX requirements. + +### 1. Resource Validation (`share_service.py:260-273`) + +**Location**: `ShareService._validate_resource_exists()` + +**Current State**: Placeholder that skips validation + +**Decision Point**: How should we verify that a conversation/memory/resource exists before creating a share link? + +```python +async def _validate_resource_exists( + self, + resource_type: ResourceType, + resource_id: str, +): + """Validate that resource exists and is accessible. + + TODO: Implement resource validation + - For conversations: Check Chronicle API + - For memories: Check Mycelia API + - Raise ValueError if resource doesn't exist + """ +``` + +**Options**: +1. **Strict**: Call Chronicle/Mycelia API to verify resource exists +2. **Lazy**: Assume resource exists, fail when accessed +3. **Cache-based**: Check local cache/database first + +**Trade-offs**: +- Strict validation prevents sharing non-existent resources but adds API latency +- Lazy validation is faster but could create broken share links +- Cache-based is fast but might be stale + +**Recommended Implementation**: +```python +# Example for conversations +if resource_type == ResourceType.CONVERSATION: + response = await httpx.get( + f"{CHRONICLE_URL}/conversations/{resource_id}", + headers={"Authorization": f"Bearer {token}"} + ) + if response.status_code == 404: + raise ValueError(f"Conversation {resource_id} not found") +``` + +--- + +### 2. Authorization Check (`share_service.py:275-291`) + +**Location**: `ShareService._validate_user_can_share()` + +**Current State**: Allows all authenticated users + +**Decision Point**: Who should be allowed to share a resource? + +```python +async def _validate_user_can_share( + self, + user: User, + resource_type: ResourceType, + resource_id: str, +): + """Validate user has permission to share resource. + + TODO: DECISION POINT - Implement authorization check + Options: + 1. Strict: Only resource owner can share + 2. Permissive: Anyone with read access can share + 3. Role-based: Only users with "share" permission can share + """ +``` + +**Options**: +1. **Owner-only**: Only the user who created the resource can share it +2. **Viewer-based**: Anyone who can view the resource can share it +3. **Role-based**: Check Keycloak roles/permissions +4. **Admin-only**: Only superusers can create shares + +**Trade-offs**: +- Owner-only is most secure but limits collaboration +- Viewer-based enables viral sharing but may leak sensitive data +- Role-based requires Keycloak integration +- Admin-only prevents user-driven sharing + +**Recommended Implementation**: +```python +# Option 1: Owner-only (strictest) +conversation = await get_conversation(resource_id) +if str(conversation.user_id) != str(user.id) and not user.is_superuser: + raise ValueError("Only the conversation owner can create share links") + +# Option 2: Viewer-based (most permissive) +# If user can fetch the resource, they can share it +# (validation happens in _validate_resource_exists) + +# Option 3: Role-based (Keycloak) +if not await keycloak.has_permission(user.id, resource_id, "share"): + raise ValueError("User lacks share permission for this resource") +``` + +--- + +### 3. Tailscale Network Validation (`share_service.py:293-308`) + +**Location**: `ShareService._validate_tailscale_access()` + +**Current State**: Always returns True (allows all) + +**Decision Point**: How should we verify requests are from your Tailscale network? + +```python +async def _validate_tailscale_access(self, request_ip: Optional[str]) -> bool: + """Validate request is from Tailscale network. + + TODO: DECISION POINT - Implement Tailscale validation + Options: + 1. Check IP ranges (Tailscale CGNAT 100.64.0.0/10) + 2. Validate via Tailscale API + 3. Trust X-Forwarded-For from Tailscale reverse proxy + """ +``` + +**Options**: +1. **IP Range Check**: Verify IP is in Tailscale CGNAT range (100.64.0.0/10) +2. **Tailscale API**: Call Tailscale API to verify device membership +3. **Reverse Proxy Headers**: Trust `X-Tailscale-User` header from Tailscale Serve +4. **Mutual TLS**: Validate client certificates + +**Trade-offs**: +- IP range check is fast but can be spoofed if not behind Tailscale +- API validation is authoritative but adds latency +- Header trust is fast but requires secure reverse proxy setup +- mTLS is most secure but complex to set up + +**Recommended Implementation**: +```python +# Option 1: IP Range Check (simple, fast) +import ipaddress + +if not request_ip: + return False + +ip = ipaddress.ip_address(request_ip) +tailscale_range = ipaddress.ip_network("100.64.0.0/10") +return ip in tailscale_range + +# Option 3: Header Trust (requires Tailscale Serve) +def get_tailscale_user(request: Request) -> Optional[str]: + return request.headers.get("X-Tailscale-User") + +if share_token.tailscale_only and not get_tailscale_user(request): + return False, "Access restricted to Tailscale network" +``` + +--- + +### 4. Keycloak FGA Integration (`share_service.py:310-330`) + +**Location**: `ShareService._register_with_keycloak()` and `_unregister_from_keycloak()` + +**Current State**: Stub methods with debug logging + +**Decision Point**: How should share tokens integrate with Keycloak Fine-Grained Authorization? + +```python +async def _register_with_keycloak(self, share_token: ShareToken): + """Register share token with Keycloak FGA. + + TODO: Implement Keycloak FGA registration + This should: + 1. Create Keycloak resource for the shared item + 2. Create Keycloak authorization policies + 3. Store keycloak_policy_id and keycloak_resource_id on share_token + """ +``` + +**Implementation Steps**: +1. Create Keycloak resource: + ```python + resource = await keycloak.create_resource( + name=f"{share_token.resource_type}:{share_token.resource_id}", + type=share_token.resource_type, + owner=str(share_token.created_by) + ) + share_token.keycloak_resource_id = resource["_id"] + ``` + +2. Create authorization policies: + ```python + for policy in share_token.policies: + kc_policy = await keycloak.create_policy( + name=f"share-{share_token.token}", + resources=[resource["_id"]], + scopes=[policy.action], + logic="POSITIVE", + decision_strategy="UNANIMOUS" + ) + share_token.keycloak_policy_id = kc_policy["id"] + ``` + +3. Grant permissions to anonymous users (if `require_auth=False`): + ```python + if not share_token.require_auth: + await keycloak.create_permission( + name=f"anon-access-{share_token.token}", + policy=kc_policy["id"], + resources=[resource["_id"]], + decision_strategy="AFFIRMATIVE" + ) + ``` + +**Libraries to Consider**: +- `python-keycloak` - Official Python client +- `httpx` - Direct REST API calls to Keycloak + +--- + +### 5. Base URL Configuration (`share.py:32` and `share_service.py:26`) + +**Location**: `get_share_service()` in `share.py` + +**Current State**: Hardcoded to `http://localhost:3000` + +**Decision Point**: How should the frontend URL be configured? + +```python +def get_share_service(db: AsyncIOMotorDatabase = Depends(get_database)) -> ShareService: + # TODO: Get base_url from settings + base_url = "http://localhost:3000" + return ShareService(db=db, base_url=base_url) +``` + +**Options**: +1. **Environment Variable**: `FRONTEND_URL` in `.env` +2. **Settings File**: Add to `config/config.defaults.yaml` +3. **Auto-detect**: Use request.base_url from FastAPI +4. **Per-environment**: Different URLs for dev/prod + +**Recommended Implementation**: +```python +from src.config.omegaconf_settings import get_settings + +async def get_share_service( + db: AsyncIOMotorDatabase = Depends(get_database) +) -> ShareService: + settings = get_settings() + base_url = await settings.get( + "network.frontend_url", + default="http://localhost:3000" + ) + return ShareService(db=db, base_url=base_url) +``` + +--- + +## ๐Ÿ“š Usage Examples + +### Creating a Share Link (Frontend) + +```tsx +import ShareDialog from '@/components/ShareDialog' +import { useShare } from '@/hooks/useShare' +import { Share2 } from 'lucide-react' + +function ConversationView({ conversationId }: { conversationId: string }) { + const shareProps = useShare({ + resourceType: 'conversation', + resourceId: conversationId + }) + + return ( +
+ + + +
+ ) +} +``` + +### Accessing a Shared Resource (API) + +```bash +# Public access (no auth required) +curl https://ushadow.example.com/api/share/550e8400-e29b-41d4-a716-446655440000 + +# Response +{ + "share_token": { + "token": "550e8400-e29b-41d4-a716-446655440000", + "share_url": "https://ushadow.example.com/share/550e8400-...", + "permissions": ["read"], + "expires_at": "2026-02-08T14:35:00Z", + "view_count": 1 + }, + "resource": { + "type": "conversation", + "id": "conv_123", + "data": "Placeholder for conversation:conv_123" + } +} +``` + +### Revoking a Share Link + +```typescript +// From ShareDialog component +const revokeShareMutation = useMutation({ + mutationFn: async (token: string) => { + const response = await fetch(`/api/share/${token}`, { + method: 'DELETE', + credentials: 'include', + }) + if (!response.ok) throw new Error('Failed to revoke') + } +}) + +await revokeShareMutation.mutateAsync(shareToken) +``` + +--- + +## ๐Ÿ” Security Features + +### Built-in Protections + +1. **Expiration**: Tokens can have TTL (expires_at) +2. **View Limits**: Tokens can have max_views +3. **Authentication**: `require_auth` flag enforces login +4. **Network Restriction**: `tailscale_only` limits to your private network +5. **Email Allowlist**: `allowed_emails` restricts to specific users +6. **Audit Logging**: Every access is logged with timestamp, user/IP, and metadata + +### Audit Trail Example + +```json +{ + "timestamp": "2026-02-01T15:30:00Z", + "user_identifier": "friend@example.com", + "action": "view", + "view_count": 3, + "metadata": { + "ip": "100.64.0.5", + "user_agent": "Mozilla/5.0..." + } +} +``` + +--- + +## ๐Ÿงช Testing + +### Manual Testing Checklist + +- [ ] Create share link with expiration +- [ ] Create share link with view limit +- [ ] Create Tailscale-only share +- [ ] Create auth-required share +- [ ] Copy share link to clipboard +- [ ] Access share link (anonymous) +- [ ] Access share link (authenticated) +- [ ] Revoke share link +- [ ] View audit logs +- [ ] Share link expires correctly +- [ ] View limit enforced + +### API Testing + +```bash +# 1. Create share token +curl -X POST http://localhost:8080/api/share/create \ + -H "Content-Type: application/json" \ + -H "Cookie: ushadow_auth=YOUR_TOKEN" \ + -d '{ + "resource_type": "conversation", + "resource_id": "test_conv_123", + "permissions": ["read"], + "expires_in_days": 7, + "require_auth": false, + "tailscale_only": false + }' + +# 2. Access share token (public) +curl http://localhost:8080/api/share/SHARE_TOKEN_UUID + +# 3. List shares for resource +curl http://localhost:8080/api/share/resource/conversation/test_conv_123 \ + -H "Cookie: ushadow_auth=YOUR_TOKEN" + +# 4. Revoke share +curl -X DELETE http://localhost:8080/api/share/SHARE_TOKEN_UUID \ + -H "Cookie: ushadow_auth=YOUR_TOKEN" +``` + +--- + +## ๐Ÿ“‹ Next Steps + +1. **Implement Decision Points** (above) + - Resource validation + - Authorization checks + - Tailscale validation + - Keycloak integration + - Base URL configuration + +2. **Update Chronicle Integration** + - Modify conversation routes to support share token access + - See section below for guidance + +3. **Frontend Integration** + - Add share button to Chronicle conversation UI + - Import and use ShareDialog component + +4. **Production Configuration** + - Set `FRONTEND_URL` environment variable + - Configure Keycloak if using FGA + - Set up Tailscale Serve if using network restriction + +--- + +## ๐Ÿ”ง Chronicle Integration Guide + +To allow shared conversations to be accessed via share tokens, you'll need to modify the Chronicle conversation routes. + +**File**: `chronicle/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py` + +**Current State**: +```python +@router.get("/conversations/{conversation_id}") +async def get_conversation( + conversation_id: str, + current_user: User = Depends(current_active_user) +): + # Check ownership + if not current_user.is_superuser and conversation.user_id != str(current_user.id): + raise HTTPException(403) +``` + +**Required Changes**: + +1. Add optional share token parameter: +```python +from typing import Optional, Union +from fastapi import Query + +@router.get("/conversations/{conversation_id}") +async def get_conversation( + conversation_id: str, + share_token: Optional[str] = Query(None), # Add this + current_user: Optional[User] = Depends(get_optional_current_user), # Make optional +): +``` + +2. Add share token validation: +```python +# If share token provided, validate it +if share_token: + share_service = ShareService(db=db, base_url=BASE_URL) + is_valid, token_obj, reason = await share_service.validate_share_access( + token=share_token, + user_email=current_user.email if current_user else None, + request_ip=request.client.host if request.client else None + ) + + if not is_valid: + raise HTTPException(403, detail=reason) + + # Verify token is for this conversation + if token_obj.resource_id != conversation_id: + raise HTTPException(403, detail="Share token not valid for this conversation") + + # Record access + user_identifier = current_user.email if current_user else request.client.host + await share_service.record_share_access( + share_token=token_obj, + user_identifier=user_identifier, + action="view", + metadata={"user_agent": request.headers.get("user-agent")} + ) + + # Skip ownership check - share token grants access +else: + # Original ownership check + if not current_user: + raise HTTPException(401, detail="Authentication required") + + if not current_user.is_superuser and conversation.user_id != str(current_user.id): + raise HTTPException(403, detail="Access denied") +``` + +--- + +## ๐Ÿ“Š Database Schema + +### ShareToken Collection + +```python +{ + "_id": ObjectId("..."), + "token": "550e8400-e29b-41d4-a716-446655440000", # UUID, indexed + "resource_type": "conversation", # Indexed + "resource_id": "conv_123", # Indexed + "created_by": ObjectId("..."), # User who created + "policies": [ + { + "resource": "conversation:conv_123", + "action": "read", + "effect": "allow" + } + ], + "permissions": ["read"], + "require_auth": false, + "tailscale_only": false, + "allowed_emails": [], + "expires_at": ISODate("2026-02-08T14:35:00Z"), + "max_views": null, + "view_count": 5, + "last_accessed_at": ISODate("2026-02-01T15:30:00Z"), + "last_accessed_by": "friend@example.com", + "access_log": [ + { + "timestamp": ISODate("2026-02-01T15:30:00Z"), + "user_identifier": "friend@example.com", + "action": "view", + "view_count": 5, + "metadata": { + "ip": "100.64.0.5", + "user_agent": "Mozilla/5.0..." + } + } + ], + "keycloak_policy_id": null, + "keycloak_resource_id": null, + "created_at": ISODate("2026-02-01T14:35:00Z"), + "updated_at": ISODate("2026-02-01T15:30:00Z") +} +``` + +### Indexes + +- `token` (unique) +- `resource_type` +- `resource_id` +- `created_by` +- `expires_at` +- Compound: `(resource_type, resource_id)` + +--- + +## ๐ŸŽ“ Architecture Decisions + +### Why Keycloak-Compatible from Day One? + +The share token system uses `KeycloakPolicy` structures even though Keycloak isn't integrated yet because: + +1. **Future-proof**: When Keycloak FGA is added, migration is trivial +2. **Standards-based**: Follows OAuth2/UMA patterns +3. **Mycelia-compatible**: Matches existing policy structure in Mycelia +4. **Flexible**: Supports both simple permissions and complex policies + +### Why Separate from User Authentication? + +Share tokens are independent of the user auth system because: + +1. **Anonymous sharing**: Users without accounts can access shares +2. **Revocation**: Revoking a share doesn't affect user permissions +3. **Audit trail**: Clear separation between user actions and share access +4. **Expiration**: Shares can expire independently of user sessions + +--- + +## ๐Ÿ› Troubleshooting + +### "Database not initialized" Error + +**Cause**: FastAPI app.state.db not set + +**Fix**: Ensure `main.py` lifespan sets `app.state.db = db` + +### Share Links Not Working + +**Cause**: Router not registered + +**Fix**: Verify `app.include_router(share.router)` in `main.py` + +### "Share token not found" + +**Cause**: Token not in database or expired + +**Debug**: +```python +# In MongoDB shell +db.share_tokens.find({ token: "YOUR_TOKEN_UUID" }) + +# Check expiration +db.share_tokens.find({ + token: "YOUR_TOKEN_UUID", + expires_at: { $gt: new Date() } +}) +``` + +### Frontend Can't Fetch Shares + +**Cause**: CORS or auth cookies + +**Fix**: Check middleware setup in `main.py`: +```python +# CORS must allow credentials +app.add_middleware( + CORSMiddleware, + allow_credentials=True, + allow_origins=["http://localhost:3000"], +) +``` + +--- + +## ๐Ÿ“ Summary + +You now have a complete sharing system with: +- โœ… Backend models, service, and API +- โœ… Frontend UI and hooks +- โœ… Audit logging +- โœ… Keycloak-ready architecture +- ๐Ÿ“‹ Clear decision points for customization + +The system is ready to use once you implement the 5 decision points marked with TODO comments. Start with resource validation and authorization, then add Tailscale/Keycloak integration as needed for your security requirements. diff --git a/chronicle b/chronicle new file mode 160000 index 00000000..c170a02d --- /dev/null +++ b/chronicle @@ -0,0 +1 @@ +Subproject commit c170a02d291e9962fc938becbaf65cc81497e060 diff --git a/clear.sh b/clear.sh index 3c6435d1..a86185b5 100755 --- a/clear.sh +++ b/clear.sh @@ -18,7 +18,7 @@ echo "โš ๏ธ WARNING: This will:" echo " - Remove ALL admin users from the database" echo " - Delete config/SECRETS/secrets.yaml (all API keys and credentials)" echo " - Delete config/config.overrides.yaml (wizard state and service preferences)" -echo " - Allow you to run ./quick-start.sh for a fresh setup" +echo " - Allow you to run ./dev.sh or ./go.sh for a fresh setup" echo "" read -p "Are you sure? (yes/no): " -r echo "" @@ -89,7 +89,9 @@ echo "โœ… Admin reset complete!" echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" echo "" echo "๐Ÿš€ Next steps:" -echo " 1. Run ./start-dev.sh to regenerate secrets and setup" +echo " 1. Regenerate secrets and start:" +echo " โ€ข ./dev.sh - development (hot-reload for frontend/backend)" +echo " โ€ข ./go.sh - quick start (production build, opens browser)" echo " 2. Clear your browser cache (Cmd+Shift+R or hard refresh)" echo " 3. Log in with your new admin credentials" echo "" diff --git a/compose/backend.yml b/compose/backend.yml index ee7e3697..6e76dabe 100644 --- a/compose/backend.yml +++ b/compose/backend.yml @@ -28,6 +28,8 @@ services: - CONFIG_DIR=/config - MONGODB_DATABASE=${MONGODB_DATABASE:-ushadow} - CORS_ORIGINS=${CORS_ORIGINS:-http://localhost:5173,http://localhost:3000,http://localhost:${WEBUI_PORT}} + # Rich console width for logging (prevents log wrapping) + - COLUMNS=200 volumes: - ../ushadow/backend:/app - ../config:/config # Mount config directory (read-write for feature flags) diff --git a/compose/chronicle-compose.yaml b/compose/chronicle-compose.yaml index efa6e172..1bcbe810 100644 --- a/compose/chronicle-compose.yaml +++ b/compose/chronicle-compose.yaml @@ -1,4 +1,4 @@ -# Chronicle backend service definition with sidecar workers +# Chronicle backend service definition with separate workers # Environment variables are passed directly via docker compose subprocess env # No .env files needed - CapabilityResolver provides all values @@ -8,26 +8,20 @@ x-ushadow: # Services share namespace with main ushadow for unified auth (AUTH_SECRET_KEY) chronicle-backend: - display_name: "Chronicle" + display_name: "Chronicle api" description: "AI-powered voice journal and life logger with transcription and LLM analysis" - requires: [llm, transcription, audio_input] + requires: [llm, transcription] optional: [memory] # Uses memory if available, works without it - route_path: /chronicle # Tailscale Serve route - all /chronicle/* requests go here + # route_path: /chronicle # Tailscale Serve route - all /chronicle/* requests go here exposes: - name: audio_intake type: audio - path: /ws_pcm + path: /ws port: 8000 # Internal container port metadata: protocol: wyoming - formats: [pcm] - - name: audio_intake_opus - type: audio - path: /ws_omi - port: 8000 # Internal container port - metadata: - protocol: wyoming - formats: [opus] + formats: [pcm, opus] + codec_param: true # Clients specify codec via ?codec=pcm or ?codec=opus - name: http_api type: http path: / @@ -36,6 +30,10 @@ x-ushadow: type: health path: /health port: 8000 + chronicle-workers: + display_name: "Chronicle Workers" + description: "Background workers for Chronicle (transcription, memory, audio processing)" + requires: [llm, transcription] chronicle-webui: display_name: "Chronicle Web UI" description: "Web interface for Chronicle voice journal" @@ -43,20 +41,15 @@ x-ushadow: services: chronicle-backend: - image: ghcr.io/ushadow-io/chronicle/backend:latest + build: + context: ../chronicle/backends/advanced + dockerfile: Dockerfile + target: dev + # image: ghcr.io/ushadow-io/chronicle/backend:latest + image: chronicle-backend:latest container_name: ${COMPOSE_PROJECT_NAME:-ushadow}-chronicle-backend - # Sidecar mode: Run both workers and backend in same container - command: - - /bin/bash - - -c - - | - echo "๐Ÿš€ Starting Chronicle with sidecar workers..." - ./start-workers.sh & - sleep 3 - echo "๐ŸŒ Starting FastAPI backend..." - exec uv run --extra deepgram python src/advanced_omi_backend/main.py ports: - - "${CHRONICLE_PORT:-8080}:8000" + - "${CHRONICLE_BACKEND_PORT:-8090}:8000" environment: # Infrastructure connections (from CapabilityResolver or defaults) - MONGODB_URI=${MONGODB_URI:-mongodb://mongo:27017} @@ -75,7 +68,9 @@ services: - DEEPGRAM_API_KEY=${DEEPGRAM_API_KEY:-} - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} # Memory capability (optional, from selected provider) + - MEMORY_PROVIDER=${MEMORY_PROVIDER:-openmemory_mcp} - MEMORY_SERVER_URL=${MEMORY_SERVER_URL:-} + - OPENMEMORY_USER_ID=${ADMIN_EMAIL:-admin@example.com} # Security (from settings) - AUTH_SECRET_KEY is required for JWT auth - AUTH_SECRET_KEY=${AUTH_SECRET_KEY} @@ -84,16 +79,17 @@ services: # CORS - CORS_ORIGINS=${CORS_ORIGINS:-*} + + volumes: # Data persistence - chronicle_audio:/app/audio_chunks - chronicle_data:/app/data - chronicle_debug:/app/debug_dir - # Model registry - defines available LLMs, embeddings, STT, TTS - # Use PROJECT_ROOT for absolute host paths (relative paths don't work from backend container) - - ${PROJECT_ROOT}/config/config.yml:/app/config/config.yml:ro - - ${PROJECT_ROOT}/config/defaults.yml:/app/config/defaults.yml:ro + # Config directory - contains config files, feature flags, secrets, etc. + # Mount entire ushadow config directory to override built-in configs + - ${PROJECT_ROOT}/config:/app/config:ro networks: - ushadow-network @@ -118,14 +114,68 @@ services: reservations: memory: 2G + chronicle-workers: + # image: ghcr.io/ushadow-io/chronicle/backend:latest + image: chronicle-wworkers:latest + build: + context: ../chronicle/backends/advanced + dockerfile: Dockerfile + target: prod + container_name: ${COMPOSE_PROJECT_NAME:-ushadow}-chronicle-workers + command: ["uv", "run", "python", "worker_orchestrator.py"] + environment: + # Infrastructure connections + - AUTH_SECRET_KEY=${AUTH_SECRET_KEY} + - ADMIN_PASSWORD=${ADMIN_PASSWORD:-} + - MONGODB_URI=${MONGODB_URI:-mongodb://mongo:27017} + - MONGODB_DATABASE=${MONGODB_DATABASE} + - REDIS_URL=${REDIS_URL:-redis://redis:6379/1} + - QDRANT_BASE_URL=${QDRANT_BASE_URL:-qdrant} + - QDRANT_PORT=${QDRANT_PORT:-6333} + + # LLM capability + - OPENAI_API_KEY=${OPENAI_API_KEY} + - OPENAI_BASE_URL=${OPENAI_BASE_URL:-https://api.openai.com/v1} + - OPENAI_MODEL=${OPENAI_MODEL:-gpt-4o-mini} + + # Transcription capability + - DEEPGRAM_API_KEY=${DEEPGRAM_API_KEY:-} + # - PARAKEET_ASR_URL=${PARAKEET_ASR_URL} + - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} + - OPENMEMORY_USER_ID=${ADMIN_EMAIL:-admin@example.com} + + # Worker orchestrator configuration + - WORKER_CHECK_INTERVAL=${WORKER_CHECK_INTERVAL:-10} + - MIN_RQ_WORKERS=${MIN_RQ_WORKERS:-6} + - WORKER_STARTUP_GRACE_PERIOD=${WORKER_STARTUP_GRACE_PERIOD:-30} + - WORKER_SHUTDOWN_TIMEOUT=${WORKER_SHUTDOWN_TIMEOUT:-30} + + volumes: + # Data persistence (shared with backend) + - chronicle_audio:/app/audio_chunks + - chronicle_data:/app/data + - chronicle_debug:/app/debug_dir + + # Config directory + - ${PROJECT_ROOT}/config:/app/config:ro + + networks: + - ushadow-network + + restart: unless-stopped + chronicle-webui: - image: ghcr.io/ushadow-io/chronicle/webui:nodeps2 + # image: ghcr.io/ushadow-io/chronicle/webui:nodeps2 + image: chronicle-webui:latest + build: + context: ../chronicle/backends/advanced/webui + dockerfile: Dockerfile container_name: ${COMPOSE_PROJECT_NAME:-ushadow}-chronicle-webui ports: - "${CHRONICLE_WEBUI_PORT:-3080}:80" environment: - - VITE_BACKEND_URL=http://localhost:${CHRONICLE_PORT:-8080} - - BACKEND_URL=${CHRONICLE_BACKEND_URL:-http://chronicle-backend:8080} + - VITE_BACKEND_URL=http://localhost:${CHRONICLE_PORT:-8090} + - BACKEND_URL=${CHRONICLE_BACKEND_URL:-http://chronicle-backend:8000} networks: - ushadow-network depends_on: diff --git a/compose/docker-compose.infra.yml b/compose/docker-compose.infra.yml index 48078a77..d84970dd 100644 --- a/compose/docker-compose.infra.yml +++ b/compose/docker-compose.infra.yml @@ -94,7 +94,7 @@ services: postgres: image: postgres:16-alpine container_name: postgres - profiles: ["memory","metamcp","postgres"] + profiles: ["memory","metamcp","postgres","infra"] ports: - "5432:5432" environment: @@ -125,7 +125,10 @@ services: - "7474:7474" # HTTP - "7687:7687" # Bolt environment: - # - NEO4J_AUTH=${NEO4J_USERNAME:-neo4j}/${NEO4J_PASSWORD} + # Basic auth (JWT requires Neo4j Enterprise - use auth proxy for Community Edition) + - NEO4J_AUTH=${NEO4J_USERNAME:-neo4j}/${NEO4J_PASSWORD:-password} + + # Plugins - NEO4J_PLUGINS=["apoc"] volumes: - neo4j_data:/data @@ -141,6 +144,44 @@ services: retries: 5 start_period: 30s + keycloak: + image: quay.io/keycloak/keycloak:26.0 + container_name: keycloak + profiles: ["infra"] + ports: + - "${KEYCLOAK_PORT:-8081}:8080" + - "${KEYCLOAK_MGMT_PORT:-9000}:9000" # Management + environment: + - KEYCLOAK_ADMIN=${KEYCLOAK_ADMIN:-admin} + - KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN_PASSWORD:-admin} + - KC_DB=postgres + - KC_DB_URL=jdbc:postgresql://postgres:5432/ushadow + - KC_DB_USERNAME=ushadow + - KC_DB_PASSWORD=ushadow + - KC_HOSTNAME_STRICT=false + - KC_HOSTNAME_STRICT_HTTPS=false + - KC_HTTP_ENABLED=true + - KC_HEALTH_ENABLED=true + volumes: + - ../ushadow/frontend/keycloak-theme:/opt/keycloak/themes/ushadow:ro + - ../config/keycloak:/opt/keycloak/data/import:ro + command: + - start-dev + - --import-realm + depends_on: + postgres: + condition: service_healthy + networks: + - ushadow-network + - infra-network + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/health/ready"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + # tailscale: # image: tailscale/tailscale:latest # container_name: ushadow-tailscale diff --git a/compose/openmemory-compose.yaml b/compose/openmemory-compose.yaml index 8b6117b6..0a4e5634 100644 --- a/compose/openmemory-compose.yaml +++ b/compose/openmemory-compose.yaml @@ -22,16 +22,17 @@ x-ushadow: services: mem0: - image: ghcr.io/ushadow-io/mem0-api:latest + image: ghcr.io/ushadow-io/u-mem0-api:v1.0.4 container_name: ${COMPOSE_PROJECT_NAME:-ushadow}-mem0 pull_policy: always # Requires qdrant from infra (started via infra_services in x-ushadow) ports: - "${OPENMEMORY_PORT:-8765}:8765" environment: - # SQLite for persistent storage (default, stored in mem0_data volume) - # To use PostgreSQL instead, uncomment: - # - DATABASE_URL=postgresql://ushadow:ushadow@postgres:5432/openmemory + # Database configuration + # SQLite (default, persisted in mem0_data volume): sqlite:////app/data/openmemory.db + # PostgreSQL (when supported): postgresql://user:password@host:port/database + - DATABASE_URL=${OPENMEMORY_DATABASE_URL:-sqlite:////app/data/openmemory.db} # Qdrant connection (from CapabilityResolver or defaults) - QDRANT_HOST=${QDRANT_HOST:-qdrant} @@ -44,17 +45,16 @@ services: - OPENAI_MODEL=${OPENAI_MODEL:-gpt-4o-mini} # Neo4j (for graph memory) - FEATURE FLAG: uncomment to enable graph mode - - NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687} - - NEO4J_USER=${NEO4J_USER:-neo4j} + - NEO4J_URL=${NEO4J_URL:-bolt://neo4j:7687} + - NEO4J_USERNAME=${NEO4J_USERNAME:-neo4j} - NEO4J_PASSWORD=${NEO4J_PASSWORD:-password} - NEO4J_DB=${NEO4J_DB:-neo4j} - - USER=${USER:-user@example.com} volumes: - mem0_data:/app/data networks: - ushadow-network healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8765/health"] + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8765/api/v1/config/'); exit(0)"] interval: 10s timeout: 5s retries: 5 @@ -62,13 +62,14 @@ services: restart: unless-stopped mem0-ui: - image: ghcr.io/ushadow-io/u-mem0-ui:latest + image: ghcr.io/ushadow-io/u-mem0-ui:v1.0.4 container_name: ${COMPOSE_PROJECT_NAME:-ushadow}-mem0-ui ports: - "3002:3000" environment: - VITE_API_URL=http://localhost:${OPENMEMORY_PORT:-8765} - API_URL=http://mem0:8765 + - NEXT_PUBLIC_USER_ID=${ADMIN_EMAIL:-admin@example.com} networks: - ushadow-network depends_on: diff --git a/config/config.defaults.yaml b/config/config.defaults.yaml index 7cba8e18..4a03b3a6 100644 --- a/config/config.defaults.yaml +++ b/config/config.defaults.yaml @@ -17,6 +17,16 @@ auth: admin_email: admin@example.com admin_name: admin +# Keycloak OAuth Configuration +keycloak: + enabled: true + url: http://keycloak:8080 # Internal Docker URL + public_url: http://localhost:8081 # External browser URL + realm: ushadow + backend_client_id: ushadow-backend + frontend_client_id: ushadow-frontend + admin_user: admin + # Speech Detection Settings speech_detection: min_words: 5 diff --git a/config/config.yml b/config/config.yml index a5c9eb17..08be3cef 100644 --- a/config/config.yml +++ b/config/config.yml @@ -2,190 +2,16 @@ defaults: llm: openai-llm embedding: openai-embed stt: stt-deepgram + stt_stream: stt-deepgram-stream tts: tts-http vector_store: vs-qdrant -models: -- name: emberfang-llm - description: Emberfang One LLM - model_type: llm - model_provider: openai - model_name: gpt-oss-20b-f16 - model_url: http://192.168.1.166:8084/v1 - api_key: '1234' - model_params: - temperature: 0.2 - max_tokens: 2000 - model_output: json -- name: emberfang-embed - description: Emberfang embeddings (nomic-embed-text) - model_type: embedding - model_provider: openai - model_name: nomic-embed-text-v1.5 - model_url: http://192.168.1.166:8084/v1 - api_key: '1234' - embedding_dimensions: 768 - model_output: vector -- name: local-llm - description: Local Ollama LLM - model_type: llm - model_provider: ollama - api_family: openai - model_name: llama3.1:latest - model_url: http://localhost:11434/v1 - api_key: ${OPENAI_API_KEY:-ollama} - model_params: - temperature: 0.2 - max_tokens: 2000 - model_output: json -- name: local-embed - description: Local embeddings via Ollama nomic-embed-text - model_type: embedding - model_provider: ollama - api_family: openai - model_name: nomic-embed-text:latest - model_url: http://localhost:11434/v1 - api_key: ${OPENAI_API_KEY:-ollama} - embedding_dimensions: 768 - model_output: vector -- name: openai-llm - description: OpenAI GPT-4o-mini - model_type: llm - model_provider: openai - api_family: openai - model_name: gpt-4o-mini - model_url: https://api.openai.com/v1 - api_key: ${OPENAI_API_KEY:-} - model_params: - temperature: 0.2 - max_tokens: 2000 - model_output: json -- name: openai-embed - description: OpenAI text-embedding-3-small - model_type: embedding - model_provider: openai - api_family: openai - model_name: text-embedding-3-small - model_url: https://api.openai.com/v1 - api_key: ${OPENAI_API_KEY:-} - embedding_dimensions: 1536 - model_output: vector -- name: groq-llm - description: Groq LLM via OpenAI-compatible API - model_type: llm - model_provider: groq - api_family: openai - model_name: llama-3.1-70b-versatile - model_url: https://api.groq.com/openai/v1 - api_key: ${GROQ_API_KEY:-} - model_params: - temperature: 0.2 - max_tokens: 2000 - model_output: json -- name: vs-qdrant - description: Qdrant vector database - model_type: vector_store - model_provider: qdrant - api_family: qdrant - model_url: http://${QDRANT_BASE_URL:-qdrant}:${QDRANT_PORT:-6333} - model_params: - host: ${QDRANT_BASE_URL:-qdrant} - port: ${QDRANT_PORT:-6333} - collection_name: omi_memories -- name: stt-parakeet-batch - description: Parakeet NeMo ASR (batch) - model_type: stt - model_provider: parakeet - api_family: http - model_url: http://172.17.0.1:8767 - api_key: '' - operations: - stt_transcribe: - method: POST - path: /transcribe - content_type: multipart/form-data - response: - type: json - extract: - text: text - words: words - segments: segments -- name: stt-deepgram - description: Deepgram Nova 3 (batch) - model_type: stt - model_provider: deepgram - api_family: http - model_url: https://api.deepgram.com/v1 - api_key: ${DEEPGRAM_API_KEY:-} - operations: - stt_transcribe: - method: POST - path: /listen - headers: - Authorization: Token ${DEEPGRAM_API_KEY:-} - Content-Type: audio/raw - query: - model: nova-3 - language: multi - smart_format: 'true' - punctuate: 'true' - diarize: false - encoding: linear16 - sample_rate: 16000 - channels: '1' - response: - type: json - extract: - text: results.channels[0].alternatives[0].transcript - words: results.channels[0].alternatives[0].words - segments: results.channels[0].alternatives[0].paragraphs.paragraphs -- name: tts-http - description: Generic JSON TTS endpoint - model_type: tts - model_provider: custom - api_family: http - model_url: http://localhost:9000 - operations: - tts_synthesize: - method: POST - path: /synthesize - headers: - Content-Type: application/json - response: - type: json -- name: stt-parakeet-stream - description: Parakeet streaming transcription over WebSocket - model_type: stt_stream - model_provider: parakeet - api_family: websocket - model_url: ws://localhost:9001/stream - operations: - start: - message: - type: transcribe - config: - vad_enabled: true - vad_silence_ms: 1000 - time_interval_seconds: 30 - return_interim_results: true - min_audio_seconds: 0.5 - chunk_header: - message: - type: audio_chunk - rate: 16000 - width: 2 - channels: 1 - end: - message: - type: stop - expect: - interim_type: interim_result - final_type: final_result - extract: - text: text - words: words - segments: segments + +# Enable provider segments from Deepgram diarization +misc_settings: + use_provider_segments: true + memory: - provider: chronicle + provider: openmemory_mcp timeout_seconds: 1200 extraction: enabled: true @@ -195,9 +21,9 @@ memory: ' openmemory_mcp: - server_url: http://localhost:8765 + server_url: ${oc.env:MEMORY_SERVER_URL,http://localhost:8765} client_name: chronicle - user_id: default + user_id: ${oc.env:OPENMEMORY_USER_ID,default} timeout: 30 mycelia: api_url: http://localhost:5173 diff --git a/config/defaults.yml b/config/defaults.yml index e286d518..46ce632b 100644 --- a/config/defaults.yml +++ b/config/defaults.yml @@ -1,54 +1,99 @@ -# Default model registry configuration -# These provide fallback defaults when config.yml is missing or incomplete -# Priority: config.yml > environment variables > defaults.yml +# Chronicle Default Configuration +# This file provides sensible defaults for all configuration options. +# User overrides in config.yml take precedence over these defaults. defaults: llm: openai-llm embedding: openai-embed stt: stt-deepgram + stt_stream: stt-deepgram-stream + tts: tts-http vector_store: vs-qdrant models: - # OpenAI LLM (default) + # =========================== + # LLM Models + # =========================== - name: openai-llm description: OpenAI GPT-4o-mini model_type: llm model_provider: openai api_family: openai - model_name: ${OPENAI_MODEL:-gpt-4o-mini} - model_url: ${OPENAI_BASE_URL:-https://api.openai.com/v1} - api_key: ${OPENAI_API_KEY:-} + model_name: gpt-4o-mini + model_url: https://api.openai.com/v1 + api_key: ${oc.env:OPENAI_API_KEY,''} model_params: temperature: 0.2 max_tokens: 2000 model_output: json - # OpenAI Embeddings (default) + - name: local-llm + description: Local Ollama LLM + model_type: llm + model_provider: ollama + api_family: openai + model_name: llama3.1:latest + model_url: http://localhost:11434/v1 + api_key: ${oc.env:OPENAI_API_KEY,ollama} + model_params: + temperature: 0.2 + max_tokens: 2000 + model_output: json + + - name: groq-llm + description: Groq LLM via OpenAI-compatible API + model_type: llm + model_provider: groq + api_family: openai + model_name: llama-3.1-70b-versatile + model_url: https://api.groq.com/openai/v1 + api_key: ${oc.env:GROQ_API_KEY,''} + model_params: + temperature: 0.2 + max_tokens: 2000 + model_output: json + + # =========================== + # Embedding Models + # =========================== - name: openai-embed description: OpenAI text-embedding-3-small model_type: embedding model_provider: openai api_family: openai model_name: text-embedding-3-small - model_url: ${OPENAI_BASE_URL:-https://api.openai.com/v1} - api_key: ${OPENAI_API_KEY:-} + model_url: https://api.openai.com/v1 + api_key: ${oc.env:OPENAI_API_KEY,''} embedding_dimensions: 1536 model_output: vector - # Deepgram STT (default) + - name: local-embed + description: Local embeddings via Ollama nomic-embed-text + model_type: embedding + model_provider: ollama + api_family: openai + model_name: nomic-embed-text:latest + model_url: http://localhost:11434/v1 + api_key: ${oc.env:OPENAI_API_KEY,ollama} + embedding_dimensions: 768 + model_output: vector + + # =========================== + # Speech-to-Text Models + # =========================== - name: stt-deepgram description: Deepgram Nova 3 (batch) model_type: stt model_provider: deepgram api_family: http model_url: https://api.deepgram.com/v1 - api_key: ${DEEPGRAM_API_KEY:-} + api_key: ${oc.env:DEEPGRAM_API_KEY,''} operations: stt_transcribe: method: POST path: /listen headers: - Authorization: Token ${DEEPGRAM_API_KEY:-} + Authorization: Token ${oc.env:DEEPGRAM_API_KEY,''} Content-Type: audio/raw query: model: nova-3 @@ -56,41 +101,259 @@ models: smart_format: 'true' punctuate: 'true' diarize: 'true' - utterances: 'true' encoding: linear16 - sample_rate: '16000' + sample_rate: 16000 channels: '1' response: type: json extract: + text: results.channels[0].alternatives[0].transcript + words: results.channels[0].alternatives[0].words + segments: results.channels[0].alternatives[0].paragraphs.paragraphs + + - name: stt-parakeet-batch + description: Parakeet NeMo ASR (batch) + model_type: stt + model_provider: parakeet + api_family: http + model_url: http://${oc.env:PARAKEET_ASR_URL,172.17.0.1:8767} + api_key: '' + operations: + stt_transcribe: + method: POST + path: /transcribe + content_type: multipart/form-data + response: + type: json + extract: + text: text + words: words + segments: segments + + # =========================== + # Text-to-Speech Models + # =========================== + - name: tts-http + description: Generic JSON TTS endpoint + model_type: tts + model_provider: custom + api_family: http + model_url: http://localhost:9000 + operations: + tts_synthesize: + method: POST + path: /synthesize + headers: + Content-Type: application/json + response: + type: json + + # =========================== + # Streaming STT Models + # =========================== + - name: stt-deepgram-stream + description: Deepgram Nova 3 streaming transcription over WebSocket + model_type: stt_stream + model_provider: deepgram + api_family: websocket + model_url: wss://api.deepgram.com/v1/listen + api_key: ${oc.env:DEEPGRAM_API_KEY,''} + operations: + query: + model: nova-3 + language: multi + smart_format: 'true' + punctuate: 'true' + encoding: linear16 + sample_rate: 16000 + channels: '1' + end: + message: + type: CloseStream + expect: + interim_type: Results + final_type: Results + extract: text: results.channels[0].alternatives[0].transcript words: results.channels[0].alternatives[0].words segments: results.utterances - # Qdrant Vector Store (default) + - name: stt-parakeet-stream + description: Parakeet streaming transcription over WebSocket + model_type: stt_stream + model_provider: parakeet + api_family: websocket + model_url: ws://localhost:9001/stream + operations: + start: + message: + type: transcribe + config: + vad_enabled: true + vad_silence_ms: 1000 + time_interval_seconds: 30 + return_interim_results: true + min_audio_seconds: 0.5 + chunk_header: + message: + type: audio_chunk + rate: 16000 + width: 2 + channels: 1 + end: + message: + type: stop + expect: + interim_type: interim_result + final_type: final_result + extract: + text: text + words: words + segments: segments + + # =========================== + # Vector Store + # =========================== - name: vs-qdrant description: Qdrant vector database model_type: vector_store model_provider: qdrant api_family: qdrant - model_url: http://${QDRANT_BASE_URL:-qdrant}:${QDRANT_PORT:-6333} + model_url: http://${oc.env:QDRANT_BASE_URL,qdrant}:${oc.env:QDRANT_PORT,6333} model_params: - host: ${QDRANT_BASE_URL:-qdrant} - port: ${QDRANT_PORT:-6333} + host: ${oc.env:QDRANT_BASE_URL,qdrant} + port: ${oc.env:QDRANT_PORT,6333} collection_name: omi_memories +# =========================== +# Memory Configuration +# =========================== memory: provider: chronicle timeout_seconds: 1200 extraction: enabled: true - prompt: 'Extract important information from this conversation and return a JSON object with an array named "facts". Include personal preferences, plans, names, dates, locations, numbers, and key details. Keep items concise and useful. + prompt: | + Extract important information from this conversation and return a JSON object with an array named "facts". + Include personal preferences, plans, names, dates, locations, numbers, and key details. + Keep items concise and useful. + + # OpenMemory MCP provider settings (used when provider: openmemory_mcp) + openmemory_mcp: + server_url: http://localhost:8765 + client_name: chronicle + user_id: default + timeout: 30 - ' + # Mycelia provider settings (used when provider: mycelia) + mycelia: + api_url: http://localhost:5173 + timeout: 30 + # Obsidian Neo4j provider settings (legacy) + obsidian: + enabled: false + neo4j_host: neo4j-mem0 + timeout: 30 + +# =========================== +# Speaker Recognition +# =========================== speaker_recognition: - enabled: false + # Enable/disable speaker recognition (overrides DISABLE_SPEAKER_RECOGNITION env var) + enabled: true + # Service URL (defaults to SPEAKER_SERVICE_URL env var if not specified) service_url: null + # Request timeout in seconds timeout: 60 -chat: {} + # Hugging Face token for PyAnnote models (secret loaded from .env) + hf_token: ${oc.env:HF_TOKEN,''} + + # Speaker identification threshold + similarity_threshold: 0.15 + + # Diarization chunking configuration (speaker service self-managed chunking) + # Maximum audio duration (seconds) for single PyAnnote call + # Files longer than this will be chunked automatically by the speaker service + max_diarize_duration: 60 + # Overlap (seconds) between chunks for speaker continuity + diarize_chunk_overlap: 5.0 + # Backend API URL for fetching audio segments (used by speaker service) + backend_api_url: http://host.docker.internal:8000 + + # Optional: Deepgram API key for wrapper service + deepgram_api_key: ${oc.env:DEEPGRAM_API_KEY,''} + +# =========================== +# Chat Configuration +# =========================== +chat: + system_prompt: | + You are a helpful AI assistant with access to the user's conversation history and memories. + Provide clear, concise, and accurate responses based on the context available to you. + +# =========================== +# Backend Configuration +# =========================== +backend: + # Authentication settings (secrets loaded from .env) + auth: + secret_key: ${oc.env:AUTH_SECRET_KEY,''} + admin_email: ${oc.env:ADMIN_EMAIL,''} + admin_password: ${oc.env:ADMIN_PASSWORD,''} + + # LLM provider configuration + llm: + provider: openai # or ollama + api_key: ${oc.env:OPENAI_API_KEY,''} + base_url: https://api.openai.com/v1 + model: gpt-4o-mini + timeout: 60 + + # Audio processing settings + audio: + # When enabled, always persist audio even if no speech is detected + # This creates conversations for all audio sessions regardless of speech content + always_persist_enabled: false + + # Transcription provider configuration + transcription: + provider: deepgram # or parakeet + api_key: ${oc.env:DEEPGRAM_API_KEY,''} + base_url: https://api.deepgram.com + # Fallback to provider segments when speaker service unavailable + # When true: Use segments from transcription provider (e.g., mock provider in tests) + # When false: Expect speaker service to create segments via diarization (default production behavior) + use_provider_segments: false + + # Diarization settings + diarization: + diarization_source: pyannote + similarity_threshold: 0.15 + min_duration: 0.5 + collar: 2.0 + min_duration_off: 1.5 + min_speakers: 2 + max_speakers: 6 + + # Cleanup settings for soft-deleted conversations + cleanup: + auto_cleanup_enabled: false + retention_days: 30 + + # Speech detection thresholds + speech_detection: + min_words: ${oc.decode:${oc.env:SPEECH_DETECTION_MIN_WORDS,10}} # Minimum words to create conversation + min_confidence: ${oc.decode:${oc.env:SPEECH_DETECTION_MIN_CONFIDENCE,0.7}} # Word confidence threshold + min_duration: ${oc.decode:${oc.env:SPEECH_DETECTION_MIN_DURATION,10.0}} # Minimum speech duration in seconds + + # Conversation stop conditions + conversation_stop: + transcription_buffer_seconds: 120 # Periodic transcription interval (2 minutes) + speech_inactivity_threshold: 60 # Speech gap threshold for closure (1 minute) + + # Audio storage paths + audio_storage: + audio_base_path: /app/data + audio_chunks_path: /app/data/audio_chunks diff --git a/config/feature_flags.yaml b/config/feature_flags.yaml index 3844a874..4a2f4193 100644 --- a/config/feature_flags.yaml +++ b/config/feature_flags.yaml @@ -76,7 +76,8 @@ flags: # Timeline - visualize memories on an interactive timeline timeline: enabled: true - description: "Timeline - Visualize memories with time ranges on Gantt charts and D3 timelines" + description: "Timeline - Visualize memories with time ranges on Gantt charts and + D3 timelines" type: release # ServiceConfigs Management - Service instance deployment and wiring @@ -89,7 +90,22 @@ flags: # Service Configs - Show custom service instance configurations service_configs: enabled: false - description: "Show custom service config instances in the Services tab (multi-instance per template)" + description: "Show custom service config instances in the Services tab (multi-instance + per template)" + type: release + + # Split Services View - Organize services into API/Workers and UI tabs + split_services: + enabled: true + description: "Split services into API & Workers and UI Services tabs with automatic + worker grouping" + type: release + + # Legacy Services Page - Old Docker service configuration page + legacy_services_page: + enabled: false + description: "Legacy Services page (replaced by Instances page) - Docker service + configuration" type: release # Add your feature flags here following this format: diff --git a/config/keycloak/realm-export.json b/config/keycloak/realm-export.json new file mode 100644 index 00000000..8599a511 --- /dev/null +++ b/config/keycloak/realm-export.json @@ -0,0 +1,197 @@ +{ + "realm": "ushadow", + "enabled": true, + "displayName": "Ushadow", + "displayNameHtml": "
Ushadow
", + "loginTheme": "ushadow", + "accountTheme": "keycloak", + "adminTheme": "keycloak", + "emailTheme": "keycloak", + "sslRequired": "none", + "registrationAllowed": true, + "registrationEmailAsUsername": true, + "rememberMe": true, + "verifyEmail": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": true, + "editUsernameAllowed": false, + "bruteForceProtected": true, + "permanentLockout": false, + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 5, + "accessTokenLifespan": 300, + "accessTokenLifespanForImplicitFlow": 900, + "ssoSessionIdleTimeout": 1800, + "ssoSessionMaxLifespan": 36000, + "offlineSessionIdleTimeout": 2592000, + "accessCodeLifespan": 60, + "accessCodeLifespanUserAction": 300, + "accessCodeLifespanLogin": 1800, + "actionTokenGeneratedByAdminLifespan": 43200, + "actionTokenGeneratedByUserLifespan": 300, + "oauth2DeviceCodeLifespan": 600, + "oauth2DevicePollingInterval": 5, + "clientScopes": [ + { + "name": "openid", + "description": "OpenID Connect scope for id_token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "name": "sub", + "protocol": "openid-connect", + "protocolMapper": "oidc-sub-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + }, + { + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + } + ] + }, + { + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + }, + { + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + } + ] + } + ], + "defaultDefaultClientScopes": ["openid", "profile", "email"], + "clients": [ + { + "clientId": "ushadow-frontend", + "name": "Ushadow Frontend", + "description": "Ushadow web application frontend", + "enabled": true, + "publicClient": true, + "protocol": "openid-connect", + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "authorizationServicesEnabled": false, + "fullScopeAllowed": true, + "redirectUris": [ + "http://localhost:3000/oauth/callback" + + ], + "webOrigins": [ + "http://localhost:3000" + + ], + "attributes": { + "pkce.code.challenge.method": "S256", + "post.logout.redirect.uris": "http://localhost:3000/" + } + } + ], + "users": [], + "roles": { + "realm": [ + { + "name": "user", + "description": "Standard user role", + "composite": false, + "clientRole": false + }, + { + "name": "admin", + "description": "Administrator role", + "composite": false, + "clientRole": false + } + ] + } +} diff --git a/config/service_configs.yaml b/config/service_configs.yaml new file mode 100644 index 00000000..d126b6b8 --- /dev/null +++ b/config/service_configs.yaml @@ -0,0 +1,7 @@ +instances: + chronicle-backend-ushadow--leader-: + template_id: chronicle-backend + name: chronicle-backend (ushadow (Leader)) + description: Docker deployment to ushadow (Leader) + created_at: '2026-02-03T00:39:13.236265+00:00' + updated_at: '2026-02-03T00:39:13.236265+00:00' diff --git a/config/tailscale-serve.json b/config/tailscale-serve.json deleted file mode 100644 index 7463ba25..00000000 --- a/config/tailscale-serve.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "version": "alpha0", - "TCP": { - "443": { - "HTTPS": true - } - }, - "Web": { - "gold.spangled-kettle.ts.net:443": { - "Handlers": { - "/auth": { - "Proxy": "http://ushadow-gold-backend:8000/auth" - }, - "/api": { - "Proxy": "http://ushadow-gold-backend:8000/api" - }, - "/": { - "Proxy": "http://ushadow-gold-webui:5173" - } - } - } - } -} \ No newline at end of file diff --git a/config/wiring.yaml b/config/wiring.yaml index 3a138c22..eb7b5ce1 100644 --- a/config/wiring.yaml +++ b/config/wiring.yaml @@ -35,3 +35,26 @@ wiring: source_capability: transcription target_config_id: chronicle-backend-ushadow-purple--leader- target_capability: transcription +<<<<<<< HEAD +======= +- id: a6167961 + source_config_id: openai + source_capability: llm + target_config_id: chronicle-backend + target_capability: llm +- id: 1dd92eb0 + source_config_id: deepgram + source_capability: transcription + target_config_id: chronicle-backend + target_capability: transcription +- id: 08e43d57 + source_config_id: openai + source_capability: llm + target_config_id: mycelia-backend + target_capability: llm +- id: ecef1236 + source_config_id: whisper-local + source_capability: transcription + target_config_id: mycelia-backend + target_capability: transcription +>>>>>>> 0e9fc19e (feat: Add Keycloak SSO integration with conversation sharing) diff --git a/docker-compose.yml b/docker-compose.yml index 08f9023a..66f6015a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,8 +5,8 @@ # # Recommended workflow: # ./go.sh # First time (handles everything) -#. ./quick-start.sh # Setup infra + app and options -# make up # Daily use (auto-starts infra if needed) +# ./dev.sh # Development (hot-reload) +# make up # Daily use (auto-starts infra if needed) # # Direct docker compose (manual): # docker compose -f docker-compose.infra.yml up -d # Start infra first @@ -14,7 +14,7 @@ # docker compose down # Stop app only # # Environment: -# Uses .env.default (auto-generated by ./quick-start.sh) +# Uses .env (auto-generated by ./go.sh or ./dev.sh) name: ushadow diff --git a/docs/BUILDING_IMAGES.md b/docs/BUILDING_IMAGES.md new file mode 100644 index 00000000..3b55a633 --- /dev/null +++ b/docs/BUILDING_IMAGES.md @@ -0,0 +1,200 @@ +# Building and Pushing Images to GHCR + +This guide explains how to build and push Chronicle and Mycelia Docker images to GitHub Container Registry (ghcr.io). + +## Prerequisites + +### 1. Docker Buildx + +Ensure you have Docker with buildx support: +```bash +docker buildx version +``` + +### 2. GitHub Container Registry Access + +Login to GHCR with a Personal Access Token (PAT): + +```bash +# Create a PAT at https://github.com/settings/tokens +# Required scopes: write:packages, read:packages + +echo $GITHUB_TOKEN | docker login ghcr.io -u YOUR_GITHUB_USERNAME --password-stdin +``` + +## Quick Commands + +### Build and Push Chronicle + +```bash +# Build and push with default tag (latest) +make chronicle-push + +# Build and push with specific tag +make chronicle-push TAG=v1.0.0 +``` + +**This builds:** +- `ghcr.io/ushadow-io/chronicle-backend:latest` (or your TAG) +- `ghcr.io/ushadow-io/chronicle-webui:latest` (or your TAG) + +**Platforms:** +- linux/amd64 +- linux/arm64 + +### Build and Push Mycelia + +```bash +# Build and push with default tag (latest) +make mycelia-push + +# Build and push with specific tag +make mycelia-push TAG=v2.0.0 +``` + +**This builds:** +- `ghcr.io/ushadow-io/mycelia-backend:latest` (or your TAG) + +**Platforms:** +- linux/amd64 +- linux/arm64 + +## What Happens Under the Hood + +The Makefile targets use `scripts/build-and-push.sh` which: + +1. **Creates a buildx builder** (if needed): `ushadow-builder` +2. **Builds multi-arch images** for AMD64 and ARM64 +3. **Pushes to ghcr.io/ushadow-io** registry +4. **Tags with your specified version** + +### Chronicle Build Details + +```bash +# Backend +Context: chronicle/backends/advanced/ +Dockerfile: chronicle/backends/advanced/Dockerfile +Image: ghcr.io/ushadow-io/chronicle-backend:TAG + +# WebUI +Context: chronicle/backends/advanced/webui/ +Dockerfile: chronicle/backends/advanced/webui/Dockerfile +Image: ghcr.io/ushadow-io/chronicle-webui:TAG +``` + +### Mycelia Build Details + +```bash +# Backend (context is mycelia root) +Context: mycelia/ +Dockerfile: mycelia/backend/Dockerfile +Image: ghcr.io/ushadow-io/mycelia-backend:TAG +``` + +Note: Mycelia's Dockerfile is at `mycelia/backend/Dockerfile` but the build context is `mycelia/` because it needs to copy from multiple subdirectories (`./backend`, `./myceliasdk`, etc.). + +## Advanced Usage + +### Using the Build Script Directly + +If you need more control, use the underlying script: + +```bash +# Chronicle backend +./scripts/build-and-push.sh chronicle/backends/advanced latest chronicle-backend + +# Chronicle webui +./scripts/build-and-push.sh chronicle/backends/advanced/webui latest chronicle-webui + +# Mycelia backend (from mycelia directory) +cd mycelia +../scripts/build-and-push.sh . latest mycelia-backend +``` + +### Building Without Pushing + +For local testing without pushing to GHCR: + +```bash +# Chronicle backend (local only) +docker buildx build \ + --platform linux/amd64,linux/arm64 \ + --tag chronicle-backend:test \ + chronicle/backends/advanced + +# Load for local use (single platform) +docker buildx build \ + --platform linux/amd64 \ + --tag chronicle-backend:test \ + --load \ + chronicle/backends/advanced +``` + +## Troubleshooting + +### Builder Not Found + +```bash +# Create the buildx builder manually +docker buildx create --name ushadow-builder --driver docker-container --bootstrap +docker buildx use ushadow-builder +``` + +### Authentication Errors + +```bash +# Re-login to GHCR +docker logout ghcr.io +echo $GITHUB_TOKEN | docker login ghcr.io -u YOUR_GITHUB_USERNAME --password-stdin +``` + +### Build Failures + +Check the Dockerfile exists: +```bash +ls -la chronicle/backends/advanced/Dockerfile +ls -la chronicle/backends/advanced/webui/Dockerfile +ls -la mycelia/backend/Dockerfile +``` + +## CI/CD Integration + +These same commands can be used in GitHub Actions: + +```yaml +- name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + +- name: Build and Push Chronicle + run: make chronicle-push TAG=${{ github.ref_name }} +``` + +## Image Visibility + +By default, images pushed to ghcr.io are private. To make them public: + +1. Go to https://github.com/orgs/ushadow-io/packages +2. Find your package (chronicle-backend, mycelia-backend, etc.) +3. Click "Package settings" +4. Scroll to "Change package visibility" +5. Choose "Public" + +## Pulling Images + +After pushing, others can pull: + +```bash +docker pull ghcr.io/ushadow-io/chronicle-backend:latest +docker pull ghcr.io/ushadow-io/chronicle-webui:latest +docker pull ghcr.io/ushadow-io/mycelia-backend:latest +``` + +## Related Commands + +- `make chronicle-build-local` - Build Chronicle locally without pushing +- `make chronicle-dev` - Build and run Chronicle locally for development +- See `make help` for all available commands diff --git a/docs/KEYCLOAK_THEMING_GUIDE.md b/docs/KEYCLOAK_THEMING_GUIDE.md new file mode 100644 index 00000000..a86506de --- /dev/null +++ b/docs/KEYCLOAK_THEMING_GUIDE.md @@ -0,0 +1,204 @@ +# Keycloak Theming Guide + +This guide explains how the Ushadow custom theme for Keycloak login/registration pages works. + +## โœ… Current Status + +The Ushadow theme is **fully configured and active**: +- โœ… Theme files mounted in Keycloak container +- โœ… CSS customized with Ushadow brand colors +- โœ… Realm configured to use the theme +- โœ… Dark theme matching main app design + +## Theme Structure + +``` +config/keycloak/themes/ushadow/ +โ”œโ”€โ”€ theme.properties # Theme configuration +โ””โ”€โ”€ login/ + โ”œโ”€โ”€ theme.properties # Login-specific config + โ””โ”€โ”€ resources/ + โ”œโ”€โ”€ css/ + โ”‚ โ””โ”€โ”€ login.css # Custom CSS (main styling) + โ””โ”€โ”€ img/ + โ”œโ”€โ”€ logo.png # Ushadow logo (80x80px) + โ””โ”€โ”€ README.md +``` + +## How It Works + +### 1. Theme Mounting +The theme is mounted into the Keycloak container via docker-compose: + +```yaml +# In compose/docker-compose.infra.yml +keycloak: + volumes: + - ../config/keycloak/themes:/opt/keycloak/themes:ro +``` + +### 2. Theme Configuration +The `login/theme.properties` file tells Keycloak: +- Inherit from the base `keycloak` theme +- Override styles with our custom `css/login.css` + +### 3. CSS Customization +The `login.css` file uses your design system colors: +- **Primary Green**: `#4ade80` (buttons, focus states) +- **Accent Purple**: `#a855f7` (social login, accents) +- **Dark Backgrounds**: Zinc-900/800/700 palette +- **Text Colors**: Zinc-100/400/500 for hierarchy + +### 4. Realm Assignment +The realm configuration points to the theme: + +```json +{ + "loginTheme": "ushadow", + "accountTheme": "keycloak", + "emailTheme": "keycloak" +} +``` + +## Design System Integration + +The theme matches your main app's design system: + +### Color Variables +```css +:root { + /* Primary Color - Bright Blue */ + --ushadow-primary: #3B82F6; /* Blue-500 - Buttons */ + + /* Accent Colors - Logo colors */ + --ushadow-green: #4ade80; /* Green-400 - Register link */ + --ushadow-purple: #a855f7; /* Purple-500 - Logo */ + + /* Dark Theme Backgrounds */ + --ushadow-bg-page: #0a0a0a; /* Almost black */ + --ushadow-bg-card: #1a1a1a; /* Card background */ + --ushadow-bg-input: #0f0f0f; /* Input fields */ + + /* Text Colors */ + --ushadow-text-primary: #ffffff; /* Pure white */ + --ushadow-text-secondary: #71717a; /* Zinc-500 */ + + /* Link Colors */ + --ushadow-link-blue: #60a5fa; /* Blue-400 - "Forgot Password?" */ + --ushadow-link-green: #4ade80; /* Green-400 - "Register" */ +} +``` + +### UI Elements +- **Inputs**: Very dark backgrounds (#0f0f0f) with blue focus rings +- **Primary Button**: Bright blue (#3B82F6) with white text and hover effects +- **Social Buttons**: Dark backgrounds with subtle borders +- **Cards**: Dark (#1a1a1a) background with minimal borders +- **Logo**: Square format (64x64) with rounded corners and subtle glow +- **Links**: Blue "Forgot Password?" and green "Register" links +- **Checkbox**: Blue accent color for "Remember me" +- **Background**: Geometric grid pattern overlay + +## Applying the Theme + +### For New Environments +When Keycloak starts with `--import-realm`, it automatically uses the theme specified in `realm-export.json`. + +### For Existing Keycloak Instances +Run the theme application script: + +```bash +./scripts/apply_keycloak_theme.sh +``` + +Or manually via Keycloak Admin UI: +1. Log into Keycloak Admin Console +2. Navigate to: Realm Settings โ†’ Themes +3. Set "Login theme" to "ushadow" +4. Save + +## Customization + +### Updating Colors +Edit `config/keycloak/themes/ushadow/login/resources/css/login.css`: + +1. **Update CSS Variables** (lines 24-53) +2. **Restart Keycloak** to load changes: + ```bash + docker compose -f compose/docker-compose.infra.yml restart keycloak + ``` + +### Changing the Logo +Replace `config/keycloak/themes/ushadow/login/resources/img/logo.png`: + +- **Recommended Size**: 80x80px (square) +- **Format**: PNG with transparent background +- **Restart Keycloak** after replacing + +### Adding Custom Templates +To customize the HTML (not just CSS): + +1. Create `login/` directory with FreeMarker templates +2. Copy templates from base theme to override +3. Modify as needed +4. Restart Keycloak + +## Troubleshooting + +### Theme Not Showing +1. **Check theme is mounted**: + ```bash + docker compose -f compose/docker-compose.infra.yml exec keycloak ls -la /opt/keycloak/themes/ushadow + ``` + +2. **Verify realm configuration**: + ```bash + curl -s http://localhost:8081/admin/realms/ushadow \ + -H "Authorization: Bearer $TOKEN" | grep loginTheme + ``` + +3. **Check Keycloak logs**: + ```bash + docker compose -f compose/docker-compose.infra.yml logs keycloak | grep -i theme + ``` + +### CSS Changes Not Appearing +- **Browser cache**: Hard refresh (Cmd+Shift+R / Ctrl+Shift+R) +- **Keycloak restart**: Required after CSS changes +- **Theme cache**: Clear by restarting Keycloak + +### Wrong Theme Still Active +Re-apply the theme: +```bash +./scripts/apply_keycloak_theme.sh +``` + +## Testing + +Visit your Keycloak login page: +``` +http://localhost:8081/realms/ushadow/protocol/openid-connect/auth?client_id=ushadow-frontend&redirect_uri=http://localhost:3010/oauth/callback&response_type=code&scope=openid +``` + +You should see: +- โœ… Very dark background with geometric pattern +- โœ… Ushadow logo (green/purple U) at top +- โœ… Bright blue primary button +- โœ… Very dark input fields +- โœ… Blue "Forgot Password?" and green "Register" links +- โœ… Blue checkbox accent color +- โœ… Consistent styling matching the main login page + +## Related Files + +- **Theme CSS**: `config/keycloak/themes/ushadow/login/resources/css/login.css` +- **Theme Config**: `config/keycloak/themes/ushadow/login/theme.properties` +- **Realm Config**: `config/keycloak/realm-export.json` +- **Docker Config**: `compose/docker-compose.infra.yml` +- **Apply Script**: `scripts/apply_keycloak_theme.sh` + +## Resources + +- [Keycloak Theming Guide](https://www.keycloak.org/docs/latest/server_development/#_themes) +- [PatternFly CSS Classes](https://www.patternfly.org/) (Base theme framework) +- [FreeMarker Templates](https://freemarker.apache.org/) (Template engine) diff --git a/docs/KEYCLOAK_URL_CONFIGURATION.md b/docs/KEYCLOAK_URL_CONFIGURATION.md new file mode 100644 index 00000000..755a78ef --- /dev/null +++ b/docs/KEYCLOAK_URL_CONFIGURATION.md @@ -0,0 +1,214 @@ +# Keycloak URL Configuration + +This guide explains how to configure Keycloak to accept OAuth redirects from different URLs (localhost ports, Tailscale domains, production domains, etc.). + +## ๐ŸŽฏ Three Configuration Methods + +### **Option 1: Manual Configuration** (Quick Testing) + +Best for: Quick testing, single environment + +1. Access Keycloak Admin Console: + ```bash + open http://localhost:8081 + ``` + +2. Login with admin credentials from `.env`: + - Username: `KEYCLOAK_ADMIN` (default: `admin`) + - Password: `KEYCLOAK_ADMIN_PASSWORD` (default: `admin`) + +3. Navigate to **Clients** โ†’ **ushadow-frontend** โ†’ **Settings** + +4. Add redirect URIs to **Valid redirect URIs**: + ``` + http://localhost:3000/oauth/callback + http://localhost:3010/oauth/callback + http://localhost:3020/oauth/callback + https://*.ts.net/oauth/callback + https://yourdomain.com/oauth/callback + ``` + +5. Add post-logout URIs to **Valid post logout redirect URIs**: + ``` + http://localhost:3000/* + http://localhost:3010/* + http://localhost:3020/* + https://*.ts.net/* + https://yourdomain.com/* + ``` + +6. Click **Save** + +**Pros**: Immediate, no code changes +**Cons**: Manual, lost on container restart, doesn't scale + +--- + +### **Option 2: Automatic Registration** (Recommended) โœ… + +Best for: Multi-worktree development, dynamic environments + +The backend automatically registers its redirect URIs on startup using the Keycloak Admin API. + +#### How It Works + +1. **Automatic on Backend Startup**: When the backend starts, it: + - Detects the current `PORT_OFFSET` environment variable + - Calculates the frontend port (3000 + PORT_OFFSET) + - Registers `http://localhost:{port}/oauth/callback` with Keycloak + - Also registers Tailscale hostname if `TAILSCALE_HOSTNAME` is set + +2. **Environment Variables**: + ```bash + # In your .env file + PORT_OFFSET=10 # Frontend runs on 3010 + TAILSCALE_HOSTNAME=myapp.ts.net # Optional: Tailscale domain + FRONTEND_URL=https://app.example.com # Optional: Custom domain + KEYCLOAK_AUTO_REGISTER=true # Enable auto-registration (default) + ``` + +3. **Multi-Worktree Example**: + ```bash + # Worktree 1: ushadow (PORT_OFFSET=10) + # โ†’ Registers http://localhost:3010/oauth/callback + + # Worktree 2: ushadow-orange (PORT_OFFSET=20) + # โ†’ Registers http://localhost:3020/oauth/callback + + # Each environment auto-registers its own URIs! + ``` + +#### Manual Script Registration + +You can also manually register URIs using the included script: + +```bash +# Register a specific redirect URI +python scripts/register_keycloak_redirects.py http://localhost:3010/oauth/callback + +# Register Tailscale domain +python scripts/register_keycloak_redirects.py https://myapp.ts.net/oauth/callback + +# Register production domain +python scripts/register_keycloak_redirects.py https://app.example.com/oauth/callback +``` + +**Pros**: Automatic, scales to any number of environments, persists across container restarts +**Cons**: Requires backend to be running, needs Keycloak admin credentials + +--- + +### **Option 3: API-Based Configuration** (For Advanced Use Cases) + +Best for: Production deployments, infrastructure-as-code + +Use the Keycloak Admin API endpoints directly: + +```bash +# Get current client configuration +curl http://localhost:8000/api/keycloak/clients/ushadow-frontend/config + +# Enable PKCE for the client (security best practice) +curl -X POST http://localhost:8000/api/keycloak/clients/ushadow-frontend/enable-pkce +``` + +You can also use the `KeycloakAdminClient` service in Python: + +```python +from src.services.keycloak_admin import get_keycloak_admin + +admin_client = get_keycloak_admin() + +# Add redirect URIs +await admin_client.update_client_redirect_uris( + client_id="ushadow-frontend", + redirect_uris=[ + "http://localhost:3010/oauth/callback", + "https://app.example.com/oauth/callback" + ], + merge=True # Merge with existing URIs +) + +# Add post-logout redirect URIs +await admin_client.update_post_logout_redirect_uris( + client_id="ushadow-frontend", + post_logout_redirect_uris=[ + "http://localhost:3010", + "https://app.example.com" + ], + merge=True +) +``` + +**Pros**: Programmatic, can be integrated into deployment pipelines +**Cons**: Requires code, more complex + +--- + +## ๐Ÿ”ง Configuration Reference + +### Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `PORT_OFFSET` | `10` | Port offset for frontend (3000 + offset) | +| `FRONTEND_URL` | - | Custom frontend URL for production | +| `TAILSCALE_HOSTNAME` | - | Tailscale hostname (e.g., `myapp.ts.net`) | +| `KEYCLOAK_AUTO_REGISTER` | `true` | Enable automatic redirect URI registration | +| `KEYCLOAK_URL` | `http://localhost:8081` | Keycloak URL (internal) | +| `KEYCLOAK_ADMIN` | `admin` | Keycloak admin username | +| `KEYCLOAK_ADMIN_PASSWORD` | `admin` | Keycloak admin password | + +### Redirect URI Patterns + +| Pattern | Purpose | Example | +|---------|---------|---------| +| `http://localhost:{port}/oauth/callback` | Local development | `http://localhost:3010/oauth/callback` | +| `https://*.ts.net/oauth/callback` | Tailscale domains (wildcard) | `https://myapp.ts.net/oauth/callback` | +| `https://yourdomain.com/oauth/callback` | Production domain | `https://app.example.com/oauth/callback` | + +### Post-Logout Redirect URI Patterns + +| Pattern | Purpose | Example | +|---------|---------|---------| +| `http://localhost:{port}/*` | Local development | `http://localhost:3010/` | +| `https://*.ts.net/*` | Tailscale domains (wildcard) | `https://myapp.ts.net/` | +| `https://yourdomain.com/*` | Production domain | `https://app.example.com/` | + +--- + +## ๐Ÿšจ Troubleshooting + +### "Invalid redirect_uri" Error + +**Cause**: The redirect URI is not registered in Keycloak + +**Solutions**: +1. Check backend logs for auto-registration status +2. Manually add the URI using Option 1 (Admin Console) +3. Run the registration script: `python scripts/register_keycloak_redirects.py ` +4. Verify `KEYCLOAK_AUTO_REGISTER=true` in your `.env` + +### Auto-Registration Not Working + +**Check**: +1. Keycloak is running: `docker ps | grep keycloak` +2. Admin credentials are correct in `.env` +3. Backend logs show registration attempt +4. Keycloak is accessible from backend: `docker exec -it ushadow-backend curl http://keycloak:8080` + +**Workaround**: Use manual registration (Option 1) while debugging + +### Multi-Worktree Conflicts + +**Issue**: Multiple worktrees trying to register URIs simultaneously + +**Solution**: Auto-registration merges URIs by default, so this should not cause conflicts. Each worktree adds its own URI to the shared list. + +--- + +## ๐Ÿ“š Related Documentation + +- [Keycloak OAuth Implementation](./KEYCLOAK_OAUTH.md) (TODO) +- [Multi-Worktree Setup](./MULTI_WORKTREE.md) (TODO) +- [Keycloak Admin API](https://www.keycloak.org/docs-api/latest/rest-api/) diff --git a/mycelia b/mycelia new file mode 160000 index 00000000..9586a3c3 --- /dev/null +++ b/mycelia @@ -0,0 +1 @@ +Subproject commit 9586a3c332becdee1050069b9a7efe3507ae05e2 diff --git a/openmemory b/openmemory new file mode 160000 index 00000000..8c092aae --- /dev/null +++ b/openmemory @@ -0,0 +1 @@ +Subproject commit 8c092aaefa4567d3b55d57890a5ed4fe079dd738 diff --git a/pixi.lock b/pixi.lock index f1667233..d92426bf 100644 --- a/pixi.lock +++ b/pixi.lock @@ -7,10 +7,20 @@ environments: pypi-prerelease-mode: if-necessary-or-explicit packages: osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.12.1-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_8.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.6-hc919400_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-75.1-hfee45f7_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20250512.1-cxx17_hd41c47c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.2.0-hc919400_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.2.0-hc919400_1.conda @@ -24,17 +34,43 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.51.2-h1b79a29_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.51.0-h6caf38d_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nodejs-25.2.1-h5230ea7_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.0-h5503f6c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.52-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.12-h18782d2_1_cpython.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-14.3.1-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.92.0-h4ff7c5d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.92.0-hf6ec828_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h892fb3f_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/uv-0.9.28-h9b11cc2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda packages: +- conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.12.1-pyhcf101f3_0.conda + sha256: eb0c4e2b24f1fbefaf96ce6c992c6bd64340bc3c06add4d7415ab69222b201da + md5: 11a2b8c732d215d977998ccd69a9d5e8 + depends: + - exceptiongroup >=1.0.2 + - idna >=2.8 + - python >=3.10 + - typing_extensions >=4.5 + - python + constrains: + - trio >=0.32.0 + - uvloop >=0.21 + license: MIT + license_family: MIT + size: 145175 + timestamp: 1767719033569 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_8.conda sha256: b456200636bd5fecb2bec63f7e0985ad2097cf1b83d60ce0b6968dffa6d02aa1 md5: 58fd217444c2a5701a44244faf518206 @@ -61,6 +97,92 @@ packages: license: ISC size: 146519 timestamp: 1767500828366 +- conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda + sha256: 110338066d194a715947808611b763857c15458f8b3b97197387356844af9450 + md5: eacc711330cd46939f66cd401ff9c44b + depends: + - python >=3.10 + license: ISC + size: 150969 + timestamp: 1767500900768 +- conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + sha256: ee6cf346d017d954255bbcbdb424cddea4d14e4ed7e9813e429db1d795d01144 + md5: 8e662bd460bda79b1ea39194e3c4c9ab + depends: + - python >=3.10 + - typing_extensions >=4.6.0 + license: MIT and PSF-2.0 + size: 21333 + timestamp: 1763918099466 +- conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + sha256: 96cac6573fd35ae151f4d6979bab6fbc90cb6b1fb99054ba19eb075da9822fcb + md5: b8993c19b0c32a2f7b66cbb58ca27069 + depends: + - python >=3.10 + - typing_extensions + - python + license: MIT + license_family: MIT + size: 39069 + timestamp: 1767729720872 +- conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + sha256: 84c64443368f84b600bfecc529a1194a3b14c3656ee2e832d15a20e0329b6da3 + md5: 164fc43f0b53b6e3a7bc7dce5e4f1dc9 + depends: + - python >=3.10 + - hyperframe >=6.1,<7 + - hpack >=4.1,<5 + - python + license: MIT + license_family: MIT + size: 95967 + timestamp: 1756364871835 +- conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + sha256: 6ad78a180576c706aabeb5b4c8ceb97c0cb25f1e112d76495bff23e3779948ba + md5: 0a802cb9888dd14eeefc611f05c40b6e + depends: + - python >=3.9 + license: MIT + license_family: MIT + size: 30731 + timestamp: 1737618390337 +- conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + sha256: 04d49cb3c42714ce533a8553986e1642d0549a05dc5cc48e0d43ff5be6679a5b + md5: 4f14640d58e2cc0aa0819d9d8ba125bb + depends: + - python >=3.9 + - h11 >=0.16 + - h2 >=3,<5 + - sniffio 1.* + - anyio >=4.0,<5.0 + - certifi + - python + license: BSD-3-Clause + license_family: BSD + size: 49483 + timestamp: 1745602916758 +- conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + sha256: cd0f1de3697b252df95f98383e9edb1d00386bfdd03fdf607fa42fe5fcb09950 + md5: d6989ead454181f4f9bc987d3dc4e285 + depends: + - anyio + - certifi + - httpcore 1.* + - idna + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + size: 63082 + timestamp: 1733663449209 +- conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + sha256: 77af6f5fe8b62ca07d09ac60127a30d9069fdc3c68d6b256754d0ffb1f7779f8 + md5: 8e6923fc12f1fe8f8c4e5c9f343256ac + depends: + - python >=3.9 + license: MIT + license_family: MIT + size: 17397 + timestamp: 1737618427549 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-75.1-hfee45f7_0.conda sha256: 9ba12c93406f3df5ab0a43db8a4b4ef67a5871dfd401010fbe29b218b2cbe620 md5: 5eb22c1d7b3fc4abb50d92d621583137 @@ -70,6 +192,15 @@ packages: license_family: MIT size: 11857802 timestamp: 1720853997952 +- conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda + sha256: ae89d0299ada2a3162c2614a9d26557a92aa6a77120ce142f8e0109bbf0342b0 + md5: 53abe63df7e10a6ba605dc5f9f961d36 + depends: + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + size: 50721 + timestamp: 1760286526795 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20250512.1-cxx17_hd41c47c_0.conda sha256: 7f0ee9ae7fa2cf7ac92b0acf8047c8bac965389e48be61bf1d463e057af2ea6a md5: 360dbb413ee2c170a0a684a33c4fc6b8 @@ -202,6 +333,25 @@ packages: license_family: Other size: 46438 timestamp: 1727963202283 +- conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + sha256: 7b1da4b5c40385791dbc3cc85ceea9fad5da680a27d5d3cb8bfaa185e304a89e + md5: 5b5203189eb668f042ac2b0826244964 + depends: + - mdurl >=0.1,<1 + - python >=3.10 + license: MIT + license_family: MIT + size: 64736 + timestamp: 1754951288511 +- conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda + sha256: 78c1bbe1723449c52b7a9df1af2ee5f005209f67e40b6e1d3c7619127c43b1c7 + md5: 592132998493b3ff25fd7479396e8351 + depends: + - python >=3.9 + license: MIT + license_family: MIT + size: 14465 + timestamp: 1733255681319 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda sha256: 2827ada40e8d9ca69a153a45f7fd14f32b2ead7045d3bbb5d10964898fe65733 md5: 068d497125e4bf8a66bf707254fff5ae @@ -243,6 +393,36 @@ packages: license_family: Apache size: 3108371 timestamp: 1762839712322 +- conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + sha256: 4817651a276016f3838957bfdf963386438c70761e9faec7749d411635979bae + md5: edb16f14d920fb3faf17f5ce582942d6 + depends: + - python >=3.10 + - wcwidth + constrains: + - prompt_toolkit 3.0.52 + license: BSD-3-Clause + license_family: BSD + size: 273927 + timestamp: 1756321848365 +- conda: https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.52-hd8ed1ab_0.conda + sha256: e79922a360d7e620df978417dd033e66226e809961c3e659a193f978a75a9b0b + md5: 6d034d3a6093adbba7b24cb69c8c621e + depends: + - prompt-toolkit >=3.0.52,<3.0.53.0a0 + license: BSD-3-Clause + license_family: BSD + size: 7212 + timestamp: 1756321849562 +- conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda + sha256: 5577623b9f6685ece2697c6eb7511b4c9ac5fb607c9babc2646c811b428fd46a + md5: 6b6ece66ebcae2d5f326c77ef2c5a066 + depends: + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + size: 889287 + timestamp: 1750615908735 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.12-h18782d2_1_cpython.conda build_number: 1 sha256: 626da9bb78459ce541407327d1e22ee673fd74e9103f1a0e0f4e3967ad0a23a7 @@ -275,6 +455,19 @@ packages: license_family: GPL size: 313930 timestamp: 1765813902568 +- conda: https://conda.anaconda.org/conda-forge/noarch/rich-14.3.1-pyhcf101f3_0.conda + sha256: 8d9c9c52bb4d3684d467a6e31814d8c9fccdacc8c50eb1e3e5025e88d6d57cb4 + md5: 83d94f410444da5e2f96e5742b7a4973 + depends: + - markdown-it-py >=2.2.0 + - pygments >=2.13.0,<3.0.0 + - python >=3.10 + - typing_extensions >=4.0.0,<5.0.0 + - python + license: MIT + license_family: MIT + size: 208244 + timestamp: 1769302653091 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.92.0-h4ff7c5d_0.conda sha256: 7cc5407dc6d559ef90118931faa4063c282dfed0472be562eacb12bf09b096c9 md5: 0ea02a89903b4f23918ac8aa20500919 @@ -295,6 +488,15 @@ packages: license_family: MIT size: 34887424 timestamp: 1765820242072 +- conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda + sha256: dce518f45e24cd03f401cb0616917773159a210c19d601c5f2d4e0e5879d30ad + md5: 03fe290994c5e4ec17293cfb6bdce520 + depends: + - python >=3.10 + license: Apache-2.0 + license_family: Apache + size: 15698 + timestamp: 1762941572482 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h892fb3f_3.conda sha256: ad0c67cb03c163a109820dc9ecf77faf6ec7150e942d1e8bb13e5d39dc058ab7 md5: a73d54a5abba6543cb2f0af1bfbd6851 @@ -305,12 +507,42 @@ packages: license_family: BSD size: 3125484 timestamp: 1763055028377 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + sha256: 032271135bca55aeb156cee361c81350c6f3fb203f57d024d7e5a1fc9ef18731 + md5: 0caa1af407ecff61170c9437a808404d + depends: + - python >=3.10 + - python + license: PSF-2.0 + license_family: PSF + size: 51692 + timestamp: 1756220668932 - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda sha256: 1d30098909076af33a35017eed6f2953af1c769e273a0626a04722ac4acaba3c md5: ad659d0a2b3e47e38d829aa8cad2d610 license: LicenseRef-Public-Domain size: 119135 timestamp: 1767016325805 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/uv-0.9.28-h9b11cc2_0.conda + sha256: a318724fbe294f9564f45a27121358d465e7c7300cda3841efac82d24895f1ee + md5: a888f6d3d5dadf7917c1a9c286ea3bc3 + depends: + - __osx >=11.0 + - libcxx >=19 + constrains: + - __osx >=11.0 + license: Apache-2.0 OR MIT + size: 15777800 + timestamp: 1769721396484 +- conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.5.2-pyhd8ed1ab_0.conda + sha256: 8cd3605c84960bbd7626f80fdd19c46d44564cfdf87c12e5c3d71f2ea01adfbb + md5: 76f0a1179bd0324c03a5d7032b7b73b9 + depends: + - python >=3.10 + license: MIT + license_family: MIT + size: 69057 + timestamp: 1769769550636 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda sha256: 9485ba49e8f47d2b597dd399e88f4802e100851b27c21d7525625b0b4025a5d9 md5: ab136e4c34e97f34fb621d2592a393d8 diff --git a/pixi.toml b/pixi.toml index d0716817..664281d2 100644 --- a/pixi.toml +++ b/pixi.toml @@ -6,8 +6,22 @@ platforms = ["osx-arm64"] version = "0.1.0" [tasks] +# Install ushadow backend in editable mode +install-ushadow = "cd ushadow/backend && uv pip install -e . --python $CONDA_PREFIX/bin/python" + +# Run ush CLI tool +ush = { cmd = "python ush", depends-on = ["install-ushadow"] } [dependencies] python = "3.12.*" nodejs = ">=25.2.1,<25.3" rust = ">=1.92.0,<1.93" + +# Python packages for ush CLI tool +rich = ">=13.0.0" +httpx = ">=0.27.0" +prompt_toolkit = ">=3.0.0" +pyyaml = ">=6.0.0" + +# uv for fast Python package management +uv = ">=0.5.0" diff --git a/scripts/build-push-images.sh b/scripts/build-push-images.sh new file mode 100755 index 00000000..68e71a21 --- /dev/null +++ b/scripts/build-push-images.sh @@ -0,0 +1,172 @@ +#!/bin/bash +# Build and push multi-arch Docker images to GitHub Container Registry +# +# Usage: +# ./scripts/build-push-images.sh [tag] +# +# Examples: +# ./scripts/build-push-images.sh chronicle +# ./scripts/build-push-images.sh chronicle v1.0.0 +# ./scripts/build-push-images.sh mycelia latest + +set -e + +SERVICE="${1:-}" +TAG="${2:-latest}" +REGISTRY="ghcr.io/ushadow-io" +PLATFORMS="linux/amd64,linux/arm64" +BUILDER_NAME="ushadow-builder" + +# Colors +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +error() { + echo -e "${RED}ERROR: $1${NC}" >&2 + exit 1 +} + +info() { + echo -e "${GREEN}$1${NC}" +} + +warn() { + echo -e "${YELLOW}$1${NC}" +} + +# Ensure buildx builder exists +ensure_builder() { + if ! docker buildx inspect "$BUILDER_NAME" &>/dev/null; then + info "Creating buildx builder: ${BUILDER_NAME}" + docker buildx create --name "$BUILDER_NAME" --driver docker-container --bootstrap + fi + docker buildx use "$BUILDER_NAME" +} + +# Build and push an image +build_and_push() { + local context="$1" + local dockerfile="$2" + local image_name="$3" + local full_image="${REGISTRY}/${image_name}:${TAG}" + + if [[ ! -d "$context" ]]; then + error "Context directory not found: ${context}" + fi + + if [[ ! -f "$dockerfile" ]]; then + error "Dockerfile not found: ${dockerfile}" + fi + + info "---------------------------------------------" + info "Building ${image_name}" + info " Context: ${context}" + info " Dockerfile: ${dockerfile}" + info " Image: ${full_image}" + info " Platforms: ${PLATFORMS}" + info "---------------------------------------------" + + docker buildx build \ + --platform "$PLATFORMS" \ + --tag "$full_image" \ + --file "$dockerfile" \ + --push \ + "$context" + + info "โœ… Pushed: ${full_image}" + echo "" +} + +# Main script +case "$SERVICE" in + chronicle) + info "=============================================" + info "Building Chronicle (tag: ${TAG})" + info "=============================================" + ensure_builder + + # Build backend + build_and_push \ + "chronicle/backends/advanced" \ + "chronicle/backends/advanced/Dockerfile" \ + "chronicle-backend" + + # Build workers (same Dockerfile as backend, different tag) + build_and_push \ + "chronicle/backends/advanced" \ + "chronicle/backends/advanced/Dockerfile" \ + "chronicle-workers" + + # Build webui + build_and_push \ + "chronicle/backends/advanced/webui" \ + "chronicle/backends/advanced/webui/Dockerfile" \ + "chronicle-webui" + + info "=============================================" + info "Chronicle images pushed successfully!" + info " ${REGISTRY}/chronicle-backend:${TAG}" + info " ${REGISTRY}/chronicle-workers:${TAG}" + info " ${REGISTRY}/chronicle-webui:${TAG}" + info "=============================================" + ;; + + mycelia) + info "=============================================" + info "Building Mycelia (tag: ${TAG})" + info "=============================================" + ensure_builder + + # Build backend (context is mycelia root, Dockerfile is in backend/) + build_and_push \ + "mycelia" \ + "mycelia/backend/Dockerfile" \ + "mycelia-backend" + + info "=============================================" + info "Mycelia images pushed successfully!" + info " ${REGISTRY}/mycelia-backend:${TAG}" + info "=============================================" + ;; + + openmemory) + info "=============================================" + info "Building OpenMemory (tag: ${TAG})" + info "=============================================" + ensure_builder + + # Build server + build_and_push \ + "openmemory/server" \ + "openmemory/server/Dockerfile" \ + "openmemory-server" + + info "=============================================" + info "OpenMemory images pushed successfully!" + info " ${REGISTRY}/openmemory-server:${TAG}" + info "=============================================" + ;; + + *) + echo "Usage: $0 [tag]" + echo "" + echo "Available services:" + echo " chronicle - Build Chronicle backend + workers + webui" + echo " mycelia - Build Mycelia backend" + echo " openmemory - Build OpenMemory server" + echo "" + echo "Examples:" + echo " $0 chronicle" + echo " $0 chronicle v1.0.0" + echo " $0 mycelia latest" + echo " $0 openmemory v2.0.0" + echo "" + echo "Prerequisites:" + echo " 1. Docker with buildx support" + echo " 2. Login to GHCR:" + echo " echo \$GITHUB_TOKEN | docker login ghcr.io -u USERNAME --password-stdin" + exit 1 + ;; +esac diff --git a/scripts/register_keycloak_redirects.py b/scripts/register_keycloak_redirects.py new file mode 100755 index 00000000..c698f455 --- /dev/null +++ b/scripts/register_keycloak_redirects.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 +""" +Register Redirect URIs with Keycloak + +This script adds redirect URIs to the ushadow-frontend Keycloak client. +Use this to register new URLs (localhost ports, Tailscale domains, etc.) + +Usage: + python scripts/register_keycloak_redirects.py http://localhost:3010/oauth/callback + python scripts/register_keycloak_redirects.py https://myapp.ts.net/oauth/callback +""" + +import asyncio +import sys +import os + +# Add backend to Python path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "ushadow", "backend")) + +from src.services.keycloak_admin import KeycloakAdminClient + + +async def main(): + if len(sys.argv) < 2: + print("Usage: python register_keycloak_redirects.py ") + print("Example: python register_keycloak_redirects.py http://localhost:3010/oauth/callback") + sys.exit(1) + + redirect_uri = sys.argv[1] + + # Get Keycloak config from environment + keycloak_url = os.getenv("KEYCLOAK_URL", "http://localhost:8081") + realm = os.getenv("KEYCLOAK_REALM", "ushadow") + admin_user = os.getenv("KEYCLOAK_ADMIN", "admin") + admin_password = os.getenv("KEYCLOAK_ADMIN_PASSWORD", "admin") + client_id = "ushadow-frontend" + + print(f"๐Ÿ” Registering redirect URI with Keycloak") + print(f" Keycloak: {keycloak_url}") + print(f" Realm: {realm}") + print(f" Client: {client_id}") + print(f" Redirect URI: {redirect_uri}") + print() + + # Create admin client + admin_client = KeycloakAdminClient( + keycloak_url=keycloak_url, + realm=realm, + admin_user=admin_user, + admin_password=admin_password, + ) + + # Register the redirect URI (merges with existing) + success = await admin_client.register_redirect_uri(client_id, redirect_uri) + + if success: + print("โœ… Success! Redirect URI registered.") + + # Also register as post-logout redirect URI + base_url = redirect_uri.replace("/oauth/callback", "") + logout_success = await admin_client.update_post_logout_redirect_uris( + client_id, + [base_url, base_url + "/"], + merge=True + ) + + if logout_success: + print(f"โœ… Post-logout redirect URIs also registered: {base_url}") + + print() + print("You can now use OAuth login from this URL!") + else: + print("โŒ Failed to register redirect URI") + print("Check that:") + print(" 1. Keycloak is running") + print(" 2. Admin credentials are correct") + print(" 3. The realm and client exist") + sys.exit(1) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/scripts/setup-repo.sh b/scripts/setup-repo.sh new file mode 100755 index 00000000..e6d396b7 --- /dev/null +++ b/scripts/setup-repo.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# One-time setup script for new clones + +set -e + +echo "๐Ÿš€ Setting up repository..." + +# Configure git to use committed hooks +echo "๐Ÿ“Œ Configuring git hooks..." +git config core.hooksPath .githooks + +# Initialize submodules (non-recursive to avoid nested submodules) +echo "๐Ÿ“ฆ Initializing submodules..." +git submodule update --init + +# Run post-checkout hook to configure sparse checkout +echo "๐Ÿ”ง Configuring sparse checkout..." +./.githooks/post-checkout + +echo "" +echo "โœ… Setup complete!" +echo "" +echo "Next steps:" +echo " - Chronicle and Mycelia are now configured with sparse checkout" +echo " - extras/mycelia and friend/ directories are excluded (prevents circular deps)" +echo " - Git hooks will automatically maintain this configuration" diff --git a/scripts/sync-env.py b/scripts/sync-env.py new file mode 100755 index 00000000..2b5a8b2c --- /dev/null +++ b/scripts/sync-env.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python3 +""" +Sync .env with .env.example + +Finds missing variables in .env that exist in .env.example and optionally +appends them with their default values. + +Usage: + uv run scripts/sync-env.py # Show diff only + uv run scripts/sync-env.py --apply # Apply missing variables + uv run scripts/sync-env.py --dry-run # Show what would be added +""" + +import argparse +import re +import sys +from pathlib import Path + + +def parse_env_file(path: Path) -> tuple[dict[str, str], dict[str, str], list[str]]: + """ + Parse .env file and return: + - active_vars: dict of VAR=value (uncommented) + - commented_vars: dict of VAR=value (commented, for reference) + - lines: original lines for context preservation + """ + active_vars = {} + commented_vars = {} + lines = [] + + if not path.exists(): + return active_vars, commented_vars, lines + + content = path.read_text() + lines = content.splitlines() + + for line in lines: + stripped = line.strip() + + # Skip empty lines and section headers + if not stripped or stripped.startswith("# ="): + continue + + # Commented variable (# VAR=value) + match = re.match(r"^#\s*([A-Z][A-Z0-9_]*)=(.*)$", stripped) + if match: + var_name, value = match.groups() + commented_vars[var_name] = value.split("#")[0].strip() # Remove inline comments + continue + + # Active variable (VAR=value) + match = re.match(r"^([A-Z][A-Z0-9_]*)=(.*)$", stripped) + if match: + var_name, value = match.groups() + active_vars[var_name] = value.split("#")[0].strip() + continue + + return active_vars, commented_vars, lines + + +def get_section_for_var(example_lines: list[str], var_name: str) -> str | None: + """Find the section header for a variable in .env.example.""" + current_section = None + + for line in example_lines: + stripped = line.strip() + if stripped.startswith("# =") and stripped.endswith("="): + # This is a section separator, next non-empty comment is section name + continue + elif stripped.startswith("# ") and not stripped.startswith("# ="): + # Potential section name or comment + text = stripped[2:].strip() + if text.isupper() or (text.endswith(":") and len(text) < 50): + current_section = text + elif re.match(rf"^#?\s*{re.escape(var_name)}=", stripped): + return current_section + + return None + + +def extract_missing_blocks( + example_lines: list[str], + env_active: dict[str, str], + env_commented: dict[str, str], +) -> list[tuple[str, list[str]]]: + """ + Extract blocks of missing variables from .env.example, preserving context. + Returns list of (section_name, lines) tuples. + """ + all_env_vars = set(env_active.keys()) | set(env_commented.keys()) + missing_blocks = [] + current_section = None + current_block_lines = [] + in_missing_block = False + + for i, line in enumerate(example_lines): + stripped = line.strip() + + # Section header detection + if stripped.startswith("# ="): + # Save previous block if we were in one + if in_missing_block and current_block_lines: + missing_blocks.append((current_section, current_block_lines.copy())) + current_block_lines = [] + in_missing_block = False + continue + + # Section name (line after ===) + if stripped.startswith("# ") and not "=" in stripped: + text = stripped[2:].strip() + if text.isupper() or text.endswith(":"): + if in_missing_block and current_block_lines: + missing_blocks.append((current_section, current_block_lines.copy())) + current_block_lines = [] + in_missing_block = False + current_section = text + continue + + # Check if this line has a variable + var_match = re.match(r"^#?\s*([A-Z][A-Z0-9_]*)=", stripped) + if var_match: + var_name = var_match.group(1) + if var_name not in all_env_vars: + # This variable is missing + if not in_missing_block: + in_missing_block = True + # Add section header if starting new block + if current_section and not any( + current_section == s for s, _ in missing_blocks + ): + current_block_lines.append(f"\n# {'=' * 42}") + current_block_lines.append(f"# {current_section}") + current_block_lines.append(f"# {'=' * 42}") + current_block_lines.append(line) + else: + # Variable exists, end block if we were in one + if in_missing_block and current_block_lines: + missing_blocks.append((current_section, current_block_lines.copy())) + current_block_lines = [] + in_missing_block = False + elif in_missing_block and stripped.startswith("#"): + # Comment line within a missing block - include it + current_block_lines.append(line) + + # Don't forget the last block + if in_missing_block and current_block_lines: + missing_blocks.append((current_section, current_block_lines.copy())) + + return missing_blocks + + +def main(): + parser = argparse.ArgumentParser( + description="Sync .env with .env.example", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + uv run scripts/sync-env.py # Show missing variables + uv run scripts/sync-env.py --apply # Add missing variables to .env + uv run scripts/sync-env.py --dry-run # Show what would be added + """, + ) + parser.add_argument( + "--apply", + action="store_true", + help="Apply missing variables to .env", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be added (without modifying .env)", + ) + parser.add_argument( + "--example", + type=Path, + default=Path(".env.example"), + help="Path to .env.example (default: .env.example)", + ) + parser.add_argument( + "--env", + type=Path, + default=Path(".env"), + help="Path to .env (default: .env)", + ) + args = parser.parse_args() + + # Find project root (where .env.example is) + script_dir = Path(__file__).parent + project_root = script_dir.parent + + example_path = project_root / args.example if not args.example.is_absolute() else args.example + env_path = project_root / args.env if not args.env.is_absolute() else args.env + + if not example_path.exists(): + print(f"โŒ {example_path} not found") + sys.exit(1) + + if not env_path.exists(): + print(f"โŒ {env_path} not found") + print(f" Run: cp {example_path} {env_path}") + sys.exit(1) + + # Parse both files + example_active, example_commented, example_lines = parse_env_file(example_path) + env_active, env_commented, env_lines = parse_env_file(env_path) + + all_example_vars = set(example_active.keys()) | set(example_commented.keys()) + all_env_vars = set(env_active.keys()) | set(env_commented.keys()) + + missing_vars = all_example_vars - all_env_vars + extra_vars = all_env_vars - all_example_vars + + # Summary + print(f"๐Ÿ“‹ Environment Sync Check") + print(f" Example: {example_path}") + print(f" Env: {env_path}") + print() + + if not missing_vars: + print("โœ… .env is in sync with .env.example") + if extra_vars: + print(f"\n๐Ÿ“ Extra variables in .env (not in .env.example):") + for var in sorted(extra_vars): + print(f" - {var}") + return + + print(f"โš ๏ธ Missing {len(missing_vars)} variable(s) in .env:") + for var in sorted(missing_vars): + if var in example_active: + print(f" + {var}={example_active[var]}") + else: + print(f" + # {var}={example_commented[var]} (commented)") + + if extra_vars: + print(f"\n๐Ÿ“ Extra variables in .env (not in .env.example):") + for var in sorted(extra_vars): + print(f" - {var}") + + # Extract missing blocks with context + missing_blocks = extract_missing_blocks(example_lines, env_active, env_commented) + + if args.dry_run or args.apply: + print("\n" + "=" * 50) + print("Lines to be added to .env:") + print("=" * 50) + + lines_to_add = [] + for section, lines in missing_blocks: + for line in lines: + lines_to_add.append(line) + print(line) + + if args.apply: + # Append to .env + with open(env_path, "a") as f: + f.write("\n") # Ensure newline before new content + for line in lines_to_add: + f.write(line + "\n") + print("\nโœ… Added missing variables to .env") + else: + print(f"\n๐Ÿ’ก Run with --apply to add these to {env_path}") + else: + print(f"\n๐Ÿ’ก Run with --dry-run to see what would be added") + print(f" Run with --apply to add missing variables to .env") + + +if __name__ == "__main__": + main() diff --git a/setup/README.md b/setup/README.md index 15ef135b..90d2766c 100644 --- a/setup/README.md +++ b/setup/README.md @@ -82,7 +82,7 @@ $ python3 setup/setuputils.py get-redis-marker 2 ### Integration Used by: -- `quick-start.sh` - Interactive environment setup +- `go.sh` / `dev.sh` - Environment setup and startup - Multi-worktree configurations - Port and database isolation - CI/CD pipelines - Environment validation diff --git a/setup/run.py b/setup/run.py index 56546a6f..8bcb74aa 100644 --- a/setup/run.py +++ b/setup/run.py @@ -267,6 +267,14 @@ def generate_env_file(env_name: str, port_offset: int, env_file: Path, secrets_f # Development mode DEV_MODE={'true' if dev_mode else 'false'} + +# ========================================== +# KEYCLOAK SSO CONFIGURATION +# ========================================== +KEYCLOAK_ADMIN=admin +KEYCLOAK_ADMIN_PASSWORD=admin +KEYCLOAK_PORT=8081 +KEYCLOAK_MGMT_PORT=9000 """ env_file.write_text(env_content) diff --git a/share-gateway/Dockerfile b/share-gateway/Dockerfile new file mode 100644 index 00000000..38c45be3 --- /dev/null +++ b/share-gateway/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application +COPY main.py models.py ./ + +# Expose port +EXPOSE 8000 + +# Run gateway +CMD ["python", "main.py"] diff --git a/share-gateway/README.md b/share-gateway/README.md new file mode 100644 index 00000000..49ab2498 --- /dev/null +++ b/share-gateway/README.md @@ -0,0 +1,159 @@ +# Share Gateway + +Public-facing proxy for accessing ushadow shared resources. + +## Purpose + +This service allows **external users** (not on your Tailscale network) to access shared conversations via share links, while keeping your main ushadow instance completely private. + +## Architecture + +``` +Public Internet + โ†“ +Share Gateway (this service, on public VPS) + โ†“ (via Tailscale) +Your Private Tailnet + โ””โ”€โ”€ ushadow backend +``` + +## Security Model + +- **Only exposes** `/c/{token}` endpoint +- **Validates** share tokens before proxying +- **Rate limited** to 10 requests/minute per IP +- **Audit logs** all access +- **No direct access** to your ushadow APIs +- **Tailscale-secured** connection to backend + +## Deployment + +### Option 1: Public VPS (DigitalOcean, Linode, AWS, etc.) + +1. Create a $5/month VPS +2. Install Tailscale: + ```bash + curl -fsSL https://tailscale.com/install.sh | sh + tailscale up + ``` + +3. Clone this directory to the VPS: + ```bash + scp -r share-gateway/ user@your-vps:/opt/share-gateway + ``` + +4. Configure environment: + ```bash + cat > /opt/share-gateway/.env < bool: + """Check if token has expired.""" + if self.expires_at is None: + return False + return datetime.utcnow() > self.expires_at + + def is_view_limit_exceeded(self) -> bool: + """Check if view limit exceeded.""" + if self.max_views is None: + return False + return self.view_count >= self.max_views + + +class ShareTokenResponse(BaseModel): + """API response model.""" + + token: str + share_url: str + resource_type: str + resource_id: str + permissions: List[str] + expires_at: Optional[datetime] = None + max_views: Optional[int] = None + view_count: int + require_auth: bool + tailscale_only: bool + created_at: datetime diff --git a/share-gateway/requirements.txt b/share-gateway/requirements.txt new file mode 100644 index 00000000..0469a905 --- /dev/null +++ b/share-gateway/requirements.txt @@ -0,0 +1,7 @@ +fastapi==0.115.0 +uvicorn==0.31.1 +httpx==0.27.2 +motor==3.6.0 +pymongo==4.10.1 +pydantic==2.9.2 +slowapi==0.1.9 # Rate limiting diff --git a/ushadow/backend/main.py b/ushadow/backend/main.py index 548b79ad..0e2ef3d7 100644 --- a/ushadow/backend/main.py +++ b/ushadow/backend/main.py @@ -19,11 +19,12 @@ from motor.motor_asyncio import AsyncIOMotorClient from src.models.user import User # Beanie document model +from src.models.share import ShareToken # Beanie document model from src.routers import health, wizard, chronicle, auth, feature_flags from src.routers import services, deployments, providers, service_configs, chat from src.routers import kubernetes, tailscale, unodes, docker, sse -from src.routers import github_import, audio_relay +from src.routers import github_import, audio_relay, memories, share, keycloak_admin, dashboard from src.routers import settings as settings_api from src.middleware import setup_middleware from src.services.unode_manager import init_unode_manager, get_unode_manager @@ -122,7 +123,7 @@ def send_telemetry(): app.state.db = db # Initialize Beanie ODM with document models - await init_beanie(database=db, document_models=[User]) + await init_beanie(database=db, document_models=[User, ShareToken]) logger.info("โœ“ Beanie ODM initialized") # Create admin user if explicitly configured in secrets.yaml @@ -148,6 +149,13 @@ def send_telemetry(): # Start background task for stale u-node checking stale_check_task = asyncio.create_task(check_stale_unodes_task()) + # Register current environment with Keycloak (non-blocking) + try: + from src.services.keycloak_startup import register_current_environment + await register_current_environment() + except Exception as e: + logger.warning(f"Keycloak auto-registration failed (non-critical): {e}") + yield # Cleanup @@ -187,6 +195,10 @@ def send_telemetry(): app.include_router(sse.router, prefix="/api/sse", tags=["sse"]) app.include_router(github_import.router, prefix="/api/github-import", tags=["github-import"]) app.include_router(audio_relay.router, tags=["audio"]) +app.include_router(memories.router, tags=["memories"]) +app.include_router(share.router, tags=["sharing"]) +app.include_router(keycloak_admin.router, prefix="/api/keycloak", tags=["keycloak-admin"]) +app.include_router(dashboard.router, prefix="/api/dashboard", tags=["dashboard"]) # Setup MCP server for LLM tool access setup_mcp_server(app) diff --git a/ushadow/backend/pyproject.toml b/ushadow/backend/pyproject.toml index 087e58b3..3e928a0e 100644 --- a/ushadow/backend/pyproject.toml +++ b/ushadow/backend/pyproject.toml @@ -22,6 +22,7 @@ dependencies = [ "pymongo>=4.9.0,<4.10", # Motor 3.6.0 requires pymongo<4.10 "motor>=3.6.0", "redis>=5.2.0", + "neo4j>=5.26.0", # Neo4j driver with bearer_auth support # Authentication & Security "fastapi-users[beanie]>=14.0.1", diff --git a/ushadow/backend/src/backend_index.py b/ushadow/backend/src/backend_index.py new file mode 100644 index 00000000..dea41755 --- /dev/null +++ b/ushadow/backend/src/backend_index.py @@ -0,0 +1,442 @@ +""" +Backend Method and Class Index for Agent Discovery. + +This is a STATIC REFERENCE FILE for documentation purposes only. +It is NOT a runtime registry like ComposeRegistry or ProviderRegistry. + +Purpose: +- Help AI agents discover existing backend code before creating new methods +- Provide quick lookup of available services, managers, and utilities +- Reduce code duplication by making existing functionality visible + +Usage: + # Before creating new code, agents should: + cat src/backend_index.py # Read this index + grep -rn "method_name" src/ # Search for existing implementations + cat src/ARCHITECTURE.md # Understand layer rules + +Note: This file should be updated when new services/utilities are added. +""" + +from typing import Dict, List, Any + +# ============================================================================= +# MANAGER INDEX (External System Interfaces) +# ============================================================================= + +MANAGER_INDEX: Dict[str, Dict[str, Any]] = { + "docker": { + "class": "DockerManager", + "module": "src.services.docker_manager", + "purpose": "Docker container lifecycle and service management", + "key_methods": [ + "initialize() -> bool", + "is_available() -> bool", + "validate_service_name(service_name: str) -> tuple[bool, str]", + "get_container_status(service_name: str) -> ServiceStatus", + "start_service(service_name: str) -> ActionResult", + "stop_service(service_name: str) -> ActionResult", + "get_service_logs(service_name: str) -> LogResult", + "get_service_info(service_name: str) -> Optional[ServiceInfo]", + "check_port_conflict(service_name: str) -> Optional[PortConflict]", + ], + "use_when": "Managing Docker containers, checking service status, handling port conflicts", + "dependencies": ["docker client", "compose files"], + "line_count": 1537, + }, + "kubernetes": { + "class": "KubernetesManager", + "module": "src.services.kubernetes_manager", + "purpose": "Kubernetes cluster and deployment management", + "key_methods": [ + "initialize()", + "add_cluster(name: str, kubeconfig: str) -> KubernetesCluster", + "list_clusters() -> List[KubernetesCluster]", + "get_cluster(cluster_id: str) -> Optional[KubernetesCluster]", + "remove_cluster(cluster_id: str) -> bool", + "deploy_service(cluster_id: str, service_config: ServiceConfig) -> DeploymentResult", + "list_pods(cluster_id: str, namespace: str) -> List[Dict]", + "get_pod_logs(cluster_id: str, pod_name: str, namespace: str) -> str", + "scale_deployment(cluster_id: str, deployment_name: str, replicas: int)", + "ensure_namespace_exists(cluster_id: str, namespace: str)", + ], + "use_when": "Deploying to Kubernetes, managing clusters, querying pod status", + "dependencies": ["kubernetes client", "kubeconfig"], + "line_count": 1505, + }, + "unode": { + "class": "UNodeManager", + "module": "src.services.unode_manager", + "purpose": "Distributed cluster node management and orchestration", + "key_methods": [ + "initialize()", + "create_join_token(role: UNodeRole, permissions: List[str]) -> str", + "get_bootstrap_script_bash(token: str) -> str", + "get_bootstrap_script_powershell(token: str) -> str", + "validate_token(token: str) -> Tuple[bool, Optional[JoinToken], str]", + "register_unode(registration: UNodeRegistration) -> UNode", + "process_heartbeat(heartbeat: UNodeHeartbeat) -> bool", + "get_unode(hostname: str) -> Optional[UNode]", + "list_unodes(role: Optional[UNodeRole]) -> List[UNode]", + "upgrade_unode(hostname: str, version: str) -> bool", + ], + "use_when": "Managing cluster nodes, generating join scripts, handling node registration", + "dependencies": ["MongoDB", "Tailscale"], + "line_count": 1670, + "notes": "Large file - consider splitting if adding major features", + }, + "tailscale": { + "class": "TailscaleManager", + "module": "src.services.tailscale_manager", + "purpose": "Tailscale mesh networking configuration and status", + "key_methods": [ + "get_container_name() -> str", + "get_container_status() -> ContainerStatus", + "start_container() -> Dict[str, Any]", + "stop_container() -> Dict[str, Any]", + "clear_auth() -> Dict[str, Any]", + "exec_command(command: str) -> Tuple[int, str, str]", + "get_status() -> TailscaleStatus", + "check_authentication() -> bool", + "configure_serve(ports: List[int])", + ], + "use_when": "Configuring Tailscale, checking network status, managing VPN", + "dependencies": ["Docker", "Tailscale container"], + "line_count": 1024, + }, +} + +# ============================================================================= +# BUSINESS SERVICE INDEX (Orchestration & Workflows) +# ============================================================================= + +SERVICE_INDEX: Dict[str, Dict[str, Any]] = { + "service_orchestrator": { + "class": "ServiceOrchestrator", + "module": "src.services.service_orchestrator", + "purpose": "Coordinate service lifecycle across platforms (Docker/K8s)", + "key_methods": [ + "get_service_summary(service_name: str) -> ServiceSummary", + "start_service(service_name: str, platform: str) -> ActionResult", + "stop_service(service_name: str, platform: str) -> ActionResult", + "get_logs(service_name: str, platform: str) -> LogResult", + "check_health(service_name: str) -> HealthStatus", + ], + "use_when": "High-level service operations, multi-platform coordination", + "dependencies": ["DockerManager", "KubernetesManager"], + "line_count": 942, + }, + "deployment_manager": { + "class": "DeploymentManager", + "module": "src.services.deployment_manager", + "purpose": "Multi-platform deployment strategy and execution", + "key_methods": [ + "deploy(service_config: ServiceConfig, target: DeploymentTarget) -> DeploymentResult", + "list_deployments(platform: Optional[str]) -> List[Deployment]", + "get_deployment_status(deployment_id: str) -> DeploymentStatus", + "rollback_deployment(deployment_id: str) -> bool", + ], + "use_when": "Deploying services, managing deployment lifecycle", + "dependencies": ["deployment_platforms", "service configs"], + "line_count": 1124, + }, + "service_config_manager": { + "class": "ServiceConfigManager", + "module": "src.services.service_config_manager", + "purpose": "Service configuration CRUD and validation", + "key_methods": [ + "get_service_config(service_name: str) -> Optional[ServiceConfig]", + "list_service_configs() -> List[ServiceConfig]", + "create_service_config(config: ServiceConfig) -> ServiceConfig", + "update_service_config(service_name: str, updates: Dict) -> ServiceConfig", + "delete_service_config(service_name: str) -> bool", + "validate_config(config: ServiceConfig) -> ValidationResult", + ], + "use_when": "Managing service configurations, validating service definitions", + "dependencies": ["SettingsStore", "YAML files"], + "line_count": 890, + }, +} + +# ============================================================================= +# REGISTRY INDEX (In-Memory Lookups - Runtime Registries) +# ============================================================================= + +REGISTRY_INDEX: Dict[str, Dict[str, Any]] = { + "compose_registry": { + "class": "ComposeServiceRegistry", + "module": "src.services.compose_registry", + "purpose": "Runtime registry of available Docker Compose services", + "key_methods": [ + "reload_from_compose_files()", + "get_service(service_name: str) -> Optional[ComposeService]", + "list_services() -> List[ComposeService]", + "filter_by_capability(capability: str) -> List[ComposeService]", + ], + "use_when": "Discovering available compose services, querying service capabilities", + "note": "This IS a runtime registry (loads from compose files at startup)", + }, + "provider_registry": { + "class": "ProviderRegistry", + "module": "src.services.provider_registry", + "purpose": "Runtime registry of LLM and service providers", + "key_methods": [ + "get_provider(provider_id: str) -> Optional[Provider]", + "list_providers() -> List[Provider]", + "register_provider(provider: Provider)", + ], + "use_when": "Accessing provider definitions, listing available providers", + "note": "This IS a runtime registry (dynamic provider collection)", + }, +} + +# ============================================================================= +# STORE INDEX (Data Persistence) +# ============================================================================= + +STORE_INDEX: Dict[str, Dict[str, Any]] = { + "settings_store": { + "class": "SettingsStore", + "module": "src.config.store", + "purpose": "Persist and retrieve application settings (YAML files)", + "key_methods": [ + "get(key: str, default: Any) -> Any", + "set(key: str, value: Any) -> None", + "delete(key: str) -> bool", + "save() -> None", + "reload() -> None", + ], + "use_when": "Reading/writing application configuration to disk", + "dependencies": ["YAML files in config directory"], + }, + "secret_store": { + "class": "SecretStore", + "module": "src.config.secret_store", + "purpose": "Secure storage and retrieval of sensitive values", + "key_methods": [ + "get_secret(key: str) -> Optional[str]", + "set_secret(key: str, value: str) -> None", + "delete_secret(key: str) -> bool", + ], + "use_when": "Managing API keys, passwords, and other secrets", + "dependencies": ["Encrypted storage backend"], + }, +} + +# ============================================================================= +# UTILITY INDEX (Pure Functions, Stateless Helpers) +# ============================================================================= + +UTILITY_INDEX: Dict[str, Dict[str, Any]] = { + "settings": { + "functions": [ + "get_settings() -> Settings", + "infer_value_type(value: str) -> str", + "infer_setting_type(name: str) -> str", + "categorize_setting(name: str) -> str", + "mask_secret_value(value: str, path: str) -> str", + ], + "module": "src.config.omegaconf_settings", + "purpose": "Access OmegaConf settings, type inference, secret masking", + "use_when": "Reading configuration, inferring types, displaying masked secrets", + }, + "secrets": { + "functions": [ + "get_auth_secret_key() -> str", + "is_secret_key(name: str) -> bool", + "mask_value(value: str) -> str", + "mask_if_secret(name: str, value: str) -> str", + "mask_dict_secrets(data: dict) -> dict", + ], + "module": "src.config.secrets", + "purpose": "Secret key management and value masking", + "use_when": "Accessing auth secrets, masking sensitive data for logs/UI", + }, + "logging": { + "functions": [ + "setup_logging(level: str) -> None", + "get_logger(name: str) -> logging.Logger", + ], + "module": "src.utils.logging", + "purpose": "Centralized logging configuration", + "use_when": "Setting up logging for modules", + }, + "version": { + "functions": [ + "get_version() -> str", + "get_git_commit() -> Optional[str]", + ], + "module": "src.utils.version", + "purpose": "Application version and build information", + "use_when": "Displaying version info, tracking deployments", + }, + "tailscale_serve": { + "functions": [ + "get_tailscale_status() -> Dict[str, Any]", + "is_tailscale_connected() -> bool", + ], + "module": "src.utils.tailscale_serve", + "purpose": "Quick Tailscale connection status checks", + "use_when": "Checking Tailscale availability without manager overhead", + }, +} + +# ============================================================================= +# COMMON METHOD PATTERNS (Cross-Service) +# ============================================================================= + +METHOD_PATTERNS = """ +Before creating new methods with these names, check if they already exist: + +get_status() / get_container_status(): + - services/docker_manager.py:DockerManager.get_container_status() + - services/tailscale_manager.py:TailscaleManager.get_container_status() + - services/deployment_platforms.py:DockerPlatform.get_status() + - services/deployment_platforms.py:K8sPlatform.get_status() + +deploy() / deploy_service(): + - services/deployment_manager.py:DeploymentManager.deploy() + - services/kubernetes_manager.py:KubernetesManager.deploy_service() + - services/deployment_platforms.py:*Platform.deploy() + +get_logs() / get_service_logs(): + - services/docker_manager.py:DockerManager.get_service_logs() + - services/kubernetes_manager.py:KubernetesManager.get_pod_logs() + - services/service_orchestrator.py:ServiceOrchestrator.get_logs() + +list_*() methods: + - services/kubernetes_manager.py:KubernetesManager.list_clusters() + - services/kubernetes_manager.py:KubernetesManager.list_pods() + - services/unode_manager.py:UNodeManager.list_unodes() + - services/service_config_manager.py:ServiceConfigManager.list_service_configs() + +start_* / stop_* methods: + - services/docker_manager.py:DockerManager.start_service() / stop_service() + - services/tailscale_manager.py:TailscaleManager.start_container() / stop_container() + - services/service_orchestrator.py:ServiceOrchestrator.start_service() / stop_service() + +RECOMMENDATION: +If creating similar functionality, either: +1. Extend existing method if same service +2. Use existing method from another service via composition +3. Create new method only if genuinely different behavior needed +""" + +# ============================================================================= +# LAYER ARCHITECTURE REFERENCE +# ============================================================================= + +LAYER_RULES = """ +Follow strict layer separation: + +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Router โ”‚ HTTP Layer: Parse requests, call services, return responses +โ”‚ โ”‚ - Max 30 lines per endpoint +โ”‚ โ”‚ - Raise HTTPException for errors +โ”‚ โ”‚ - Use Depends() for services +โ”‚ โ”‚ - Return Pydantic models +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ + โ–ผ +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Service โ”‚ Business Logic: Orchestrate, validate, coordinate +โ”‚ โ”‚ - Return data (not HTTP responses) +โ”‚ โ”‚ - Raise domain exceptions (ValueError, RuntimeError) +โ”‚ โ”‚ - Coordinate multiple managers/stores +โ”‚ โ”‚ - Max 800 lines per file +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ + โ–ผ +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Store/Mgr โ”‚ Data/External: Persist data, call external APIs +โ”‚ โ”‚ - Direct DB/file/API access +โ”‚ โ”‚ - No business logic +โ”‚ โ”‚ - Return domain objects +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + +NEVER SKIP LAYERS unless documented exception in ARCHITECTURE.md +""" + +# ============================================================================= +# FILE SIZE WARNINGS (Ruff Enforced) +# ============================================================================= + +FILE_SIZE_LIMITS = { + "routers": { + "max_lines": 500, + "action": "Split by resource domain (e.g., tailscale_setup.py, tailscale_status.py)", + "violations": ["routers/tailscale.py (1522 lines)", "routers/github_import.py (1130 lines)"], + }, + "services": { + "max_lines": 800, + "action": "Extract helper services or use composition pattern", + "violations": ["services/unode_manager.py (1670 lines)", "services/docker_manager.py (1537 lines)"], + }, + "utils": { + "max_lines": 300, + "action": "Split into focused utility modules", + "violations": ["config/yaml_parser.py (591 lines)"], + }, +} + +# ============================================================================= +# USAGE EXAMPLES +# ============================================================================= + +USAGE_EXAMPLES = """ +# Example 1: Check if method exists before creating +$ grep -rn "async def get_status" src/services/ +services/docker_manager.py:145: async def get_container_status(...) +services/tailscale_manager.py:89: async def get_container_status(...) +โ†’ Method exists! Reuse it instead of creating new one. + +# Example 2: Find which manager handles Docker +$ cat src/backend_index.py | grep -A 5 '"docker"' +โ†’ Shows DockerManager with all available methods + +# Example 3: Check layer placement +$ cat src/ARCHITECTURE.md +โ†’ Confirms routers should NOT have business logic + +# Example 4: Find utility for masking secrets +$ grep -A 3 '"secrets"' src/backend_index.py +โ†’ Shows mask_value() in src.config.secrets +""" + +# ============================================================================= +# MAINTENANCE NOTES +# ============================================================================= + +MAINTENANCE = """ +This file should be updated when: +- New managers/services are created +- Major methods are added to existing services +- Service responsibilities change significantly +- Files are split due to size violations + +Update frequency: Monthly or when major features are added + +Last updated: 2025-01-23 (Initial creation for backend excellence initiative) +""" + +if __name__ == "__main__": + # When run directly, print helpful summary + print("=" * 80) + print("BACKEND INDEX - Quick Reference") + print("=" * 80) + print(f"\nManagers: {len(MANAGER_INDEX)} available") + for name, info in MANAGER_INDEX.items(): + print(f" - {info['class']:30s} ({info['line_count']:4d} lines) - {info['purpose']}") + + print(f"\nBusiness Services: {len(SERVICE_INDEX)} available") + for name, info in SERVICE_INDEX.items(): + print(f" - {info['class']:30s} ({info.get('line_count', 0):4d} lines) - {info['purpose']}") + + print(f"\nUtilities: {len(UTILITY_INDEX)} available") + for name, info in UTILITY_INDEX.items(): + print(f" - {name:30s} - {info['purpose']}") + + print("\n" + "=" * 80) + print("Use: grep -A 10 'manager_name' backend_index.py") + print(" Read: BACKEND_QUICK_REF.md for detailed patterns") + print("=" * 80) diff --git a/ushadow/backend/src/config/keycloak_settings.py b/ushadow/backend/src/config/keycloak_settings.py new file mode 100644 index 00000000..0633cd84 --- /dev/null +++ b/ushadow/backend/src/config/keycloak_settings.py @@ -0,0 +1,52 @@ +"""Keycloak configuration settings. + +This module provides configuration for Keycloak integration using OmegaConf. +All sensitive values (passwords, client secrets) are stored in secrets.yaml. +""" + +from src.config import get_settings_store as get_settings + +def get_keycloak_config() -> dict: + """Get Keycloak configuration from OmegaConf settings. + + Returns: + dict with keys: + - enabled: bool + - url: str (internal Docker URL) + - public_url: str (external browser URL) + - realm: str + - backend_client_id: str + - backend_client_secret: str (from secrets.yaml) + - frontend_client_id: str + - admin_user: str + - admin_password: str (from secrets.yaml) + """ + settings = get_settings() + + # Public configuration (from config.defaults.yaml) + config = { + "enabled": settings.get_sync("keycloak.enabled", False), + "url": settings.get_sync("keycloak.url", "http://keycloak:8080"), + "public_url": settings.get_sync("keycloak.public_url", "http://localhost:8080"), + "realm": settings.get_sync("keycloak.realm", "ushadow"), + "backend_client_id": settings.get_sync("keycloak.backend_client_id", "ushadow-backend"), + "frontend_client_id": settings.get_sync("keycloak.frontend_client_id", "ushadow-frontend"), + "admin_user": settings.get_sync("keycloak.admin_user", "admin"), + } + + # Secrets (from config/SECRETS/secrets.yaml) + config["backend_client_secret"] = settings.get_sync("keycloak.backend_client_secret") + config["admin_password"] = settings.get_sync("keycloak.admin_password") + + return config + + +def is_keycloak_enabled() -> bool: + """Check if Keycloak authentication is enabled. + + This allows running both auth systems in parallel during migration: + - keycloak.enabled=false: Use existing fastapi-users auth + - keycloak.enabled=true: Use Keycloak (or hybrid mode) + """ + settings = get_settings() + return settings.get_sync("keycloak.enabled", False) diff --git a/ushadow/backend/src/database.py b/ushadow/backend/src/database.py new file mode 100644 index 00000000..00ff78d1 --- /dev/null +++ b/ushadow/backend/src/database.py @@ -0,0 +1,21 @@ +"""Database dependency injection helpers.""" + +from fastapi import Request +from motor.motor_asyncio import AsyncIOMotorDatabase + + +def get_database(request: Request) -> AsyncIOMotorDatabase: + """Get MongoDB database from FastAPI app state. + + Args: + request: FastAPI request object + + Returns: + MongoDB database instance + + Raises: + RuntimeError: If database not initialized + """ + if not hasattr(request.app.state, "db"): + raise RuntimeError("Database not initialized. Check lifespan events in main.py") + return request.app.state.db diff --git a/ushadow/backend/src/models/__init__.py b/ushadow/backend/src/models/__init__.py index 671ec0ea..5f20feb7 100644 --- a/ushadow/backend/src/models/__init__.py +++ b/ushadow/backend/src/models/__init__.py @@ -2,8 +2,24 @@ from .user import User, UserCreate, UserRead, UserUpdate, get_user_db from .provider import EnvMap, Capability, Provider, DockerConfig +from .share import ( + ShareToken, + ShareTokenCreate, + ShareTokenResponse, + ShareAccessLog, + KeycloakPolicy, + ResourceType, + SharePermission, +) __all__ = [ "User", "UserCreate", "UserRead", "UserUpdate", "get_user_db", "EnvMap", "Capability", "Provider", "DockerConfig", + "ShareToken", + "ShareTokenCreate", + "ShareTokenResponse", + "ShareAccessLog", + "KeycloakPolicy", + "ResourceType", + "SharePermission", ] diff --git a/ushadow/backend/src/models/dashboard.py b/ushadow/backend/src/models/dashboard.py new file mode 100644 index 00000000..e7c60b4c --- /dev/null +++ b/ushadow/backend/src/models/dashboard.py @@ -0,0 +1,52 @@ +"""Dashboard models for activity monitoring and statistics.""" + +from datetime import datetime +from enum import Enum +from typing import Any, Optional + +from pydantic import BaseModel, Field + + +class ActivityType(str, Enum): + """Types of system activities.""" + + CONVERSATION = "conversation" + MEMORY = "memory" + + +class ActivityEvent(BaseModel): + """A single activity event in the system.""" + + id: str = Field(..., description="Unique identifier for the activity") + type: ActivityType = Field(..., description="Type of activity") + title: str = Field(..., description="Human-readable title") + description: Optional[str] = Field(None, description="Detailed description") + timestamp: datetime = Field(..., description="When the activity occurred") + metadata: dict[str, Any] = Field( + default_factory=dict, description="Additional metadata" + ) + source: Optional[str] = Field( + None, description="Source service that generated this activity" + ) + + +class DashboardStats(BaseModel): + """Aggregated statistics for the dashboard.""" + + conversation_count: int = Field(0, description="Total number of conversations") + memory_count: int = Field(0, description="Total number of memories") + + +class DashboardData(BaseModel): + """Complete dashboard data including stats and recent activities.""" + + stats: DashboardStats = Field(..., description="Dashboard statistics") + recent_conversations: list[ActivityEvent] = Field( + default_factory=list, description="Recent conversation activities" + ) + recent_memories: list[ActivityEvent] = Field( + default_factory=list, description="Recent memory activities" + ) + last_updated: datetime = Field( + default_factory=datetime.utcnow, description="When this data was generated" + ) diff --git a/ushadow/backend/src/models/kubernetes.py b/ushadow/backend/src/models/kubernetes.py index 0b56077c..a6fee80e 100644 --- a/ushadow/backend/src/models/kubernetes.py +++ b/ushadow/backend/src/models/kubernetes.py @@ -36,6 +36,24 @@ class KubernetesCluster(BaseModel): # Labels for organization labels: Dict[str, str] = Field(default_factory=dict) + # Ingress configuration (cluster-wide defaults) + ingress_domain: Optional[str] = Field( + None, + description="Base domain for auto-generated ingress (e.g., 'shadow' for *.shadow)" + ) + ingress_class: str = Field( + "nginx", + description="Ingress controller class (nginx, traefik, etc.)" + ) + ingress_enabled_by_default: bool = Field( + False, + description="Auto-enable ingress for new deployments" + ) + tailscale_magicdns_enabled: bool = Field( + False, + description="Whether Tailscale MagicDNS is configured" + ) + @computed_field @property def deployment_target_id(self) -> str: @@ -130,6 +148,18 @@ class KubernetesClusterCreate(BaseModel): labels: Dict[str, str] = Field(default_factory=dict) +class KubernetesClusterUpdate(BaseModel): + """Request to update cluster configuration.""" + + name: Optional[str] = None + namespace: Optional[str] = None + labels: Optional[Dict[str, str]] = None + ingress_domain: Optional[str] = None + ingress_class: Optional[str] = None + ingress_enabled_by_default: Optional[bool] = None + tailscale_magicdns_enabled: Optional[bool] = None + + class KubernetesDeploymentSpec(BaseModel): """Kubernetes-specific deployment configuration.""" diff --git a/ushadow/backend/src/models/share.py b/ushadow/backend/src/models/share.py new file mode 100644 index 00000000..d2200779 --- /dev/null +++ b/ushadow/backend/src/models/share.py @@ -0,0 +1,289 @@ +"""Share token models for conversation and resource sharing. + +This module provides models for secure sharing of conversations and resources +with fine-grained access control compatible with Keycloak FGA policies. +""" + +import logging +from datetime import datetime +from enum import Enum +from typing import Any, Dict, List, Optional +from uuid import uuid4 + +from beanie import Document, Indexed, PydanticObjectId +from pydantic import BaseModel, Field + +logger = logging.getLogger(__name__) + + +class ResourceType(str, Enum): + """Types of resources that can be shared.""" + + CONVERSATION = "conversation" + MEMORY = "memory" + COLLECTION = "collection" + + +class SharePermission(str, Enum): + """Permission levels for shared resources.""" + + READ = "read" + WRITE = "write" + COMMENT = "comment" + DELETE = "delete" + ADMIN = "admin" + + +class KeycloakPolicy(BaseModel): + """Keycloak-compatible authorization policy. + + Matches Mycelia's policy structure: + {"resource": "conversation:123", "action": "read", "effect": "allow"} + """ + + resource: str = Field(..., description="Resource identifier (e.g., 'conversation:123')") + action: str = Field(..., description="Action/permission (read, write, delete)") + effect: str = Field(default="allow", description="Effect of policy (allow/deny)") + + model_config = {"extra": "forbid"} + + +class ShareToken(Document): + """Share token for secure resource sharing. + + Stores information about shared resources including Keycloak-compatible + policies for fine-grained access control. Supports both authenticated + and anonymous sharing with optional expiration and view limits. + """ + + # Token identification + token: Indexed(str, unique=True) = Field( # type: ignore + default_factory=lambda: str(uuid4()), + description="Unique share token (UUID)", + ) + + # Resource identification + resource_type: str = Field(..., description="Type of shared resource") + resource_id: str = Field(..., description="ID of the shared resource") + + # Ownership + created_by: PydanticObjectId = Field(..., description="User who created the share") + + # Keycloak-compatible policies + policies: List[KeycloakPolicy] = Field( + default_factory=list, + description="Keycloak FGA policies for this share", + ) + + # Permissions (simplified view for API responses) + permissions: List[str] = Field( + default_factory=lambda: ["read"], + description="Simplified permission list (read, write, etc.)", + ) + + # Access control + require_auth: bool = Field( + default=False, + description="If True, user must authenticate to access share", + ) + tailscale_only: bool = Field( + default=False, + description="If True, only accessible from Tailscale network", + ) + allowed_emails: List[str] = Field( + default_factory=list, + description="If non-empty, only these emails can access (when require_auth=True)", + ) + + # Expiration and limits + expires_at: Optional[datetime] = Field( + default=None, + description="When this share expires (None = never)", + ) + max_views: Optional[int] = Field( + default=None, + description="Maximum number of views (None = unlimited)", + ) + view_count: int = Field(default=0, description="Number of times accessed") + + # Audit trail + last_accessed_at: Optional[datetime] = Field( + default=None, + description="Last time this share was accessed", + ) + last_accessed_by: Optional[str] = Field( + default=None, + description="Last user/IP that accessed this share", + ) + access_log: List[Dict[str, Any]] = Field( + default_factory=list, + description="Access audit log (timestamp, user/IP, action)", + ) + + # Timestamps + created_at: datetime = Field(default_factory=datetime.utcnow) + updated_at: datetime = Field(default_factory=datetime.utcnow) + + # Keycloak integration (populated when Keycloak is active) + keycloak_policy_id: Optional[str] = Field( + default=None, + description="Keycloak policy ID if registered with Keycloak FGA", + ) + keycloak_resource_id: Optional[str] = Field( + default=None, + description="Keycloak resource ID if registered", + ) + + class Settings: + """Beanie document settings.""" + + name = "share_tokens" + indexes = [ + "token", # Fast lookup by token + "resource_type", + "resource_id", + "created_by", + "expires_at", + [("resource_type", 1), ("resource_id", 1)], # Compound index + ] + + def is_expired(self) -> bool: + """Check if share token has expired.""" + if self.expires_at is None: + return False + return datetime.utcnow() > self.expires_at + + def is_view_limit_exceeded(self) -> bool: + """Check if view limit has been exceeded.""" + if self.max_views is None: + return False + return self.view_count >= self.max_views + + def can_access(self, user_email: Optional[str] = None) -> tuple[bool, str]: + """Check if access is allowed. + + Args: + user_email: Email of user trying to access (None for anonymous) + + Returns: + Tuple of (allowed: bool, reason: str) + """ + if self.is_expired(): + return False, "Share link has expired" + + if self.is_view_limit_exceeded(): + return False, "Share link view limit exceeded" + + if self.require_auth and user_email is None: + return False, "Authentication required" + + if self.allowed_emails and user_email not in self.allowed_emails: + return False, f"Access restricted to specific users" + + return True, "Access granted" + + def has_permission(self, permission: str) -> bool: + """Check if token grants specific permission.""" + return permission in self.permissions + + async def record_access( + self, + user_identifier: str, + action: str = "view", + metadata: Optional[Dict[str, Any]] = None, + ): + """Record access to shared resource. + + Args: + user_identifier: Email or IP address of accessor + action: Action performed (view, edit, etc.) + metadata: Additional context (user agent, IP, etc.) + """ + self.view_count += 1 + self.last_accessed_at = datetime.utcnow() + self.last_accessed_by = user_identifier + self.updated_at = datetime.utcnow() + + # Add to audit log + log_entry = { + "timestamp": datetime.utcnow(), + "user_identifier": user_identifier, + "action": action, + "view_count": self.view_count, + } + if metadata: + log_entry["metadata"] = metadata + + self.access_log.append(log_entry) + await self.save() + + +class ShareTokenCreate(BaseModel): + """Request model for creating a share token.""" + + resource_type: ResourceType = Field(..., description="Type of resource to share") + resource_id: str = Field(..., min_length=1, description="ID of resource to share") + + permissions: List[SharePermission] = Field( + default=[SharePermission.READ], + description="Permissions to grant", + ) + + # Access control + require_auth: bool = Field( + default=False, + description="Require authentication to access", + ) + tailscale_only: bool = Field( + default=False, + description="Only accessible from Tailscale network", + ) + allowed_emails: List[str] = Field( + default_factory=list, + description="Restrict access to specific email addresses", + ) + + # Expiration + expires_in_days: Optional[int] = Field( + default=None, + ge=1, + le=365, + description="Number of days until expiration (None = never)", + ) + max_views: Optional[int] = Field( + default=None, + ge=1, + description="Maximum number of views (None = unlimited)", + ) + + model_config = {"extra": "forbid"} + + +class ShareTokenResponse(BaseModel): + """Response model for share token information.""" + + token: str + share_url: str + resource_type: str + resource_id: str + permissions: List[str] + expires_at: Optional[datetime] = None + max_views: Optional[int] = None + view_count: int + require_auth: bool + tailscale_only: bool + created_at: datetime + + model_config = {"extra": "forbid"} + + +class ShareAccessLog(BaseModel): + """Access log entry for share token.""" + + timestamp: datetime + user_identifier: str + action: str + view_count: int + metadata: Optional[Dict[str, Any]] = None + + model_config = {"extra": "forbid"} diff --git a/ushadow/backend/src/models/user.py b/ushadow/backend/src/models/user.py index 57b00eb4..c6138d92 100644 --- a/ushadow/backend/src/models/user.py +++ b/ushadow/backend/src/models/user.py @@ -75,6 +75,12 @@ class User(BeanieBaseUser, Document): created_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow) + # Keycloak integration field + keycloak_id: Optional[str] = Field( + default=None, + description="Keycloak user UUID (sub claim) for federated users" + ) + class Settings: name = "users" # MongoDB collection name email_collation = {"locale": "en", "strength": 2} # Case-insensitive email diff --git a/ushadow/backend/src/routers/audio_relay.py b/ushadow/backend/src/routers/audio_relay.py index b0f25635..770af6f8 100644 --- a/ushadow/backend/src/routers/audio_relay.py +++ b/ushadow/backend/src/routers/audio_relay.py @@ -2,8 +2,8 @@ Audio Relay Router - WebSocket relay to multiple destinations Accepts Wyoming protocol audio from mobile app and forwards to: -- Chronicle (/chronicle/ws_pcm) -- Mycelia (/mycelia/ws_pcm) +- Chronicle (/ws?codec=pcm) +- Mycelia (/ws?codec=pcm) - Any other configured endpoints Mobile connects once to /ws/audio/relay, server handles fanout. @@ -38,17 +38,18 @@ async def connect(self): try: import websockets - # Add token to URL - url_with_token = f"{self.url}?token={self.token}" + # Add token to URL (use & if URL already has query params) + separator = '&' if '?' in self.url else '?' + url_with_token = f"{self.url}{separator}token={self.token}" # Detect endpoint type for logging - # Note: /ws/audio is unified endpoint that accepts both PCM and Opus - if "/ws_omi" in self.url: + # Note: /ws endpoint accepts codec via query parameter + if "codec=opus" in self.url: endpoint_type = "Opus" - elif "/ws_pcm" in self.url: + elif "codec=pcm" in self.url: endpoint_type = "PCM" - elif "/ws/audio" in self.url: - endpoint_type = "Unified (PCM/Opus)" + elif "/ws" in self.url: + endpoint_type = "Unified (codec via query param)" else: endpoint_type = "Unknown" logger.info(f"[AudioRelay:{self.name}] Connecting to {self.url} [{endpoint_type}]") @@ -191,11 +192,11 @@ async def audio_relay_websocket( Audio relay WebSocket endpoint. Query parameters: - - destinations: JSON array of {"name": "chronicle", "url": "ws://host/chronicle/ws_pcm"} + - destinations: JSON array of {"name": "chronicle", "url": "ws://host/ws?codec=pcm"} - token: JWT token for authenticating to destinations Example: - ws://localhost:8000/ws/audio/relay?destinations=[{"name":"chronicle","url":"ws://localhost:5001/chronicle/ws_pcm"},{"name":"mycelia","url":"ws://localhost:5173/ws_pcm"}]&token=YOUR_JWT + ws://localhost:8000/ws/audio/relay?destinations=[{"name":"chronicle","url":"ws://host/ws?codec=pcm"},{"name":"mycelia","url":"ws://host/ws?codec=pcm"}]&token=YOUR_JWT """ await websocket.accept() logger.info("[AudioRelay] Client connected") @@ -209,6 +210,22 @@ async def audio_relay_websocket( await websocket.close(code=1008, reason="Missing destinations or token parameter") return + # Bridge Keycloak token to service token for destinations + from src.services.token_bridge import bridge_to_service_token + service_token = await bridge_to_service_token( + token, + audiences=["ushadow", "chronicle", "mycelia"] + ) + + if not service_token: + logger.error("[AudioRelay] Token bridging failed") + await websocket.close(code=1008, reason="Authentication failed") + return + + logger.info("[AudioRelay] โœ“ Token bridged successfully") + # Use service token for downstream connections + token = service_token + destinations = json.loads(destinations_param) if not isinstance(destinations, list) or len(destinations) == 0: await websocket.close(code=1008, reason="destinations must be a non-empty array") @@ -217,13 +234,17 @@ async def audio_relay_websocket( logger.info(f"[AudioRelay] Destinations: {[d['name'] for d in destinations]}") # Log exact URLs received from client for debugging for dest in destinations: - # Note: /ws/audio is unified endpoint that accepts both PCM and Opus + # Detect endpoint type (check for old formats first, then new) if "/ws_omi" in dest['url']: - endpoint_type = "Opus" + endpoint_type = "Opus (LEGACY - use /ws?codec=opus)" elif "/ws_pcm" in dest['url']: + endpoint_type = "PCM (LEGACY - use /ws?codec=pcm)" + elif "codec=opus" in dest['url']: + endpoint_type = "Opus" + elif "codec=pcm" in dest['url']: endpoint_type = "PCM" - elif "/ws/audio" in dest['url']: - endpoint_type = "Unified (PCM/Opus)" + elif "/ws" in dest['url']: + endpoint_type = "Unified (missing codec parameter)" else: endpoint_type = "Unknown" logger.info(f"[AudioRelay] Client requested: {dest['name']} -> {dest['url']} [{endpoint_type}]") @@ -306,5 +327,5 @@ async def relay_status(): "destinations": "JSON array of destination configs", "token": "JWT token for destination authentication" }, - "example_url": 'ws://localhost:8000/ws/audio/relay?destinations=[{"name":"chronicle","url":"ws://host/chronicle/ws_pcm"}]&token=JWT' + "example_url": 'ws://localhost:8000/ws/audio/relay?destinations=[{"name":"chronicle","url":"ws://host/ws?codec=pcm"}]&token=JWT' } diff --git a/ushadow/backend/src/routers/auth.py b/ushadow/backend/src/routers/auth.py index 11e48ca7..425dd43e 100644 --- a/ushadow/backend/src/routers/auth.py +++ b/ushadow/backend/src/routers/auth.py @@ -315,6 +315,37 @@ async def create_initial_admin( @router.get("/me", response_model=UserRead) async def get_current_user_info(user: User = Depends(get_current_user)): """Get current authenticated user information.""" + from src.utils.auth_helpers import get_user_id, get_user_email, get_user_name + + # If user is a Keycloak dict, look up the MongoDB User record + if isinstance(user, dict): + from src.services.keycloak_user_sync import get_mongodb_user_id_for_keycloak_user + from src.models.user import User as UserModel + + # Get or create MongoDB User record + mongodb_user_id = await get_mongodb_user_id_for_keycloak_user( + keycloak_sub=user.get("sub"), + email=user.get("email"), + name=user.get("name") + ) + + # Fetch the User record + user_record = await UserModel.get(mongodb_user_id) + if not user_record: + raise HTTPException(status_code=404, detail="User record not found") + + return UserRead( + id=user_record.id, + email=user_record.email, + display_name=user_record.display_name, + is_active=user_record.is_active, + is_superuser=user_record.is_superuser, + is_verified=user_record.is_verified, + created_at=user_record.created_at, + updated_at=user_record.updated_at, + ) + + # Legacy User object return UserRead( id=user.id, email=user.email, @@ -358,7 +389,7 @@ async def logout( user: User = Depends(get_current_user), ): """Logout current user by clearing the auth cookie. - + Note: For bearer tokens, logout is handled client-side by discarding the token. This endpoint clears the HTTP-only cookie. """ @@ -367,6 +398,108 @@ async def logout( httponly=True, samesite="lax", ) + + +# Keycloak OAuth Token Exchange +class TokenExchangeRequest(BaseModel): + """Request for exchanging OAuth authorization code for tokens.""" + code: str = Field(..., description="Authorization code from Keycloak") + code_verifier: str = Field(..., description="PKCE code verifier") + redirect_uri: str = Field(..., description="Redirect URI used in authorization request") + + +class TokenExchangeResponse(BaseModel): + """Response containing OAuth tokens.""" + access_token: str + refresh_token: Optional[str] = None + id_token: Optional[str] = None + expires_in: Optional[int] = None + token_type: str = "Bearer" + + +@router.post("/token", response_model=TokenExchangeResponse) +async def exchange_code_for_tokens(request: TokenExchangeRequest): + """Exchange OAuth authorization code for access/refresh tokens. + + This endpoint implements the OAuth 2.0 Authorization Code Flow with PKCE. + It exchanges the authorization code received from Keycloak for actual tokens. + + Args: + request: Contains authorization code, PKCE verifier, and redirect URI + + Returns: + Access token, refresh token, and ID token from Keycloak + + Raises: + 400: If code exchange fails (invalid code, expired, etc.) + 503: If Keycloak is unreachable + """ + import httpx + from src.config.keycloak_settings import get_keycloak_config + + try: + # Get Keycloak configuration + kc_config = get_keycloak_config() + + if not kc_config.get("enabled"): + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="Keycloak authentication is not enabled" + ) + + # Prepare token exchange request to Keycloak + token_url = f"{kc_config['url']}/realms/{kc_config['realm']}/protocol/openid-connect/token" + + token_data = { + "grant_type": "authorization_code", + "code": request.code, + "redirect_uri": request.redirect_uri, + "client_id": kc_config["frontend_client_id"], + "code_verifier": request.code_verifier, + } + + logger.info(f"[TOKEN-EXCHANGE] Exchanging code with Keycloak at {token_url}") + + # Make request to Keycloak + async with httpx.AsyncClient() as client: + response = await client.post( + token_url, + data=token_data, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=10.0 + ) + + if response.status_code != 200: + error_detail = response.text + logger.error(f"[TOKEN-EXCHANGE] Keycloak error: {error_detail}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Token exchange failed: {error_detail}" + ) + + tokens = response.json() + logger.info(f"[TOKEN-EXCHANGE] โœ“ Successfully exchanged code for tokens") + + return TokenExchangeResponse( + access_token=tokens["access_token"], + refresh_token=tokens.get("refresh_token"), + id_token=tokens.get("id_token"), + expires_in=tokens.get("expires_in"), + token_type=tokens.get("token_type", "Bearer") + ) + + except httpx.RequestError as e: + logger.error(f"[TOKEN-EXCHANGE] Failed to connect to Keycloak: {e}") + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="Cannot connect to Keycloak authentication server" + ) + except Exception as e: + logger.error(f"[TOKEN-EXCHANGE] Unexpected error: {e}", exc_info=True) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(e) + ) logger.info(f"User logged out: {user.email}") return {"message": "Successfully logged out"} diff --git a/ushadow/backend/src/routers/chat.py b/ushadow/backend/src/routers/chat.py index 59a6a8bb..fe184154 100644 --- a/ushadow/backend/src/routers/chat.py +++ b/ushadow/backend/src/routers/chat.py @@ -3,10 +3,12 @@ Provides a chat interface that: - Uses the selected LLM provider via LiteLLM -- Optionally enriches context with OpenMemory -- Streams responses using Server-Sent Events (SSE) +- Uses MCP-style tool calling for dynamic memory search +- Queries OpenMemory for user-specific context +- Streams responses using AI SDK data stream protocol -The streaming format is compatible with assistant-ui's data stream protocol. +The LLM can call the search_memories tool to fetch relevant context +from OpenMemory during the conversation. """ import json @@ -15,12 +17,13 @@ from typing import List, Optional, Dict, Any import httpx -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, HTTPException, Depends, Request from fastapi.responses import StreamingResponse from pydantic import BaseModel from src.services.llm_client import get_llm_client -from src.config import get_settings +from src.services.auth import get_current_user +from src.models.user import User logger = logging.getLogger(__name__) router = APIRouter() @@ -63,7 +66,8 @@ class ChatStatus(BaseModel): async def fetch_memory_context( query: str, user_id: str, - limit: int = 5 + limit: int = 5, + auth_header: Optional[str] = None ) -> List[str]: """ Fetch relevant memories from OpenMemory to enrich context. @@ -72,56 +76,71 @@ async def fetch_memory_context( query: The user's message to find relevant context for user_id: User identifier for memory lookup limit: Maximum number of memories to retrieve + auth_header: Authorization header to forward to mem0 Returns: List of relevant memory strings """ - settings = get_settings() - memory_url = await settings.get( - "infrastructure.openmemory_server_url", - "http://localhost:8765" - ) - try: + # Use proxy endpoint - call backend's internal port (8000) not external port + # This works regardless of deployment (Docker, K8s, etc.) + headers = {} + if auth_header: + headers["Authorization"] = auth_header + async with httpx.AsyncClient(timeout=5.0) as client: - # Search for relevant memories - response = await client.post( - f"{memory_url}/api/v1/memories/search", - json={ - "query": query, - "user_id": user_id, - "limit": limit - } - ) + # Query OpenMemory (mem0) using filter endpoint - should be the source of truth + url = "http://localhost:8000/api/services/mem0/proxy/api/v1/memories/filter" + body = { + "user_id": user_id, + "limit": 20, + } + logger.info(f"[CHAT] Fetching memories from OpenMemory filter: {url} with body: {body}, auth: {bool(headers.get('Authorization'))}") + + response = await client.post(url, json=body, headers=headers) + + logger.info(f"[CHAT] Memory fetch response status: {response.status_code}") if response.status_code == 200: data = response.json() - memories = data.get("results", []) - return [m.get("memory", m.get("content", "")) for m in memories if m] + items = data.get("items", []) + logger.info(f"[CHAT] Memory fetch returned {len(items)} total memories") + + memories = [] + for item in items: + # OpenMemory uses 'text' field for content + content = item.get("text") or item.get("content", "") + if content: + # Include category info if available for better context + categories = item.get("categories", []) + if categories: + content = f"[{', '.join(categories)}] {content}" + memories.append(content) + + logger.info(f"[CHAT] Retrieved {len(memories)} memories: {memories[:3]}") + return memories[:limit] + else: + logger.warning(f"[CHAT] Memory fetch failed with status {response.status_code}: {response.text[:200]}") except httpx.TimeoutException: - logger.warning("OpenMemory timeout - continuing without context") - except httpx.ConnectError: - logger.debug("OpenMemory not available - continuing without context") + logger.warning("[CHAT] OpenMemory timeout - continuing without context") + except httpx.ConnectError as e: + logger.warning(f"[CHAT] OpenMemory connection error: {e} - continuing without context") except Exception as e: - logger.warning(f"OpenMemory error: {e}") + logger.warning(f"[CHAT] OpenMemory error: {e}", exc_info=True) return [] async def check_memory_available() -> bool: - """Check if OpenMemory service is available.""" - settings = get_settings() - memory_url = await settings.get( - "infrastructure.openmemory_server_url", - "http://localhost:8765" - ) - + """Check if OpenMemory service is available by testing the proxy endpoint.""" try: - async with httpx.AsyncClient(timeout=2.0) as client: - response = await client.get(f"{memory_url}/health") + async with httpx.AsyncClient(timeout=3.0) as client: + # Use the DNS alias to check mem0 directly (same as proxy does internally) + response = await client.get("http://mem0:8765/api/v1/config/") return response.status_code == 200 - except Exception: + except Exception as e: + logger.debug(f"Could not check mem0 availability: {e}") return False @@ -181,15 +200,25 @@ async def get_chat_status() -> ChatStatus: @router.post("") -async def chat(request: ChatRequest): +async def chat( + chat_request: ChatRequest, + request: Request, + current_user: User = Depends(get_current_user) +): """ Chat endpoint with streaming response. Accepts messages and returns a streaming response compatible with assistant-ui's data stream protocol. + + Uses MCP-style tool calling for memory access - the LLM can query + memories dynamically during the conversation. """ llm = get_llm_client() + # Extract auth header to forward to memory service + auth_header = request.headers.get("Authorization") + # Check if configured if not await llm.is_configured(): raise HTTPException( @@ -201,56 +230,119 @@ async def chat(request: ChatRequest): messages: List[Dict[str, str]] = [] # Add system message if provided - if request.system: - messages.append({"role": "system", "content": request.system}) - - # Fetch memory context if enabled - memory_context = [] - if request.use_memory and request.messages: - user_id = request.user_id or "default" - last_user_message = next( - (m.content for m in reversed(request.messages) if m.role == "user"), - None - ) - if last_user_message: - memory_context = await fetch_memory_context( - last_user_message, - user_id - ) - - # Add memory context as system message if available - if memory_context: - context_text = "\n\nRelevant context from memory:\n" + "\n".join( - f"- {mem}" for mem in memory_context - ) - if messages and messages[0]["role"] == "system": - messages[0]["content"] += context_text - else: - messages.insert(0, { - "role": "system", - "content": f"You are a helpful assistant.{context_text}" - }) + if chat_request.system: + messages.append({"role": "system", "content": chat_request.system}) + else: + messages.append({"role": "system", "content": "You are a helpful assistant with access to memory search."}) # Add conversation messages - for msg in request.messages: + for msg in chat_request.messages: messages.append({"role": msg.role, "content": msg.content}) + # Define memory search tool (MCP-style function calling) + tools = None + if chat_request.use_memory: + # Use authenticated user's email as user_id (same as memories router) + from src.utils.auth_helpers import get_user_email + user_id = chat_request.user_id or get_user_email(current_user) + tools = [{ + "type": "function", + "function": { + "name": "search_memories", + "description": "Search the user's stored memories and context. Use this to recall information about the user, their preferences, previous conversations, and relevant facts.", + "parameters": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "What to search for in memories" + }, + "limit": { + "type": "integer", + "description": "Maximum number of memories to return (default 5)", + "default": 5 + } + }, + "required": ["query"] + } + } + }] + async def generate(): """Stream response chunks.""" try: - async for chunk in llm.stream_completion( + # First pass - LLM may request tool calls + response = await llm.completion( messages=messages, - temperature=request.temperature, - max_tokens=request.max_tokens - ): - # Use AI SDK data stream format for text deltas - yield format_text_delta(chunk) + temperature=chat_request.temperature, + max_tokens=chat_request.max_tokens, + tools=tools if tools else None, + tool_choice="auto" if tools else None + ) + + # Check if LLM wants to call tools + if response.choices[0].message.tool_calls: + logger.info(f"[CHAT] LLM requested {len(response.choices[0].message.tool_calls)} tool calls") + + # Add assistant's tool call message to history + assistant_msg = response.choices[0].message + messages.append({ + "role": "assistant", + "content": assistant_msg.content, + "tool_calls": [ + { + "id": tc.id, + "type": "function", + "function": { + "name": tc.function.name, + "arguments": tc.function.arguments + } + } + for tc in assistant_msg.tool_calls + ] + }) + + # Execute tool calls + for tool_call in assistant_msg.tool_calls: + if tool_call.function.name == "search_memories": + args = json.loads(tool_call.function.arguments) + query = args.get("query", "") + limit = args.get("limit", 5) + + logger.info(f"[CHAT] Executing memory search: query='{query}', limit={limit}") + memories = await fetch_memory_context(query, user_id, limit=limit, auth_header=auth_header) + + # Format memories as readable text + memories_text = "\n".join([f"- {mem}" for mem in memories]) + + # Format tool result + tool_result = { + "role": "tool", + "tool_call_id": tool_call.id, + "name": "search_memories", + "content": f"Found {len(memories)} memories:\n{memories_text}" + } + messages.append(tool_result) + logger.info(f"[CHAT] Memory search returned {len(memories)} results: {memories[:2]}") + + # Second pass - LLM responds with tool results + async for chunk in llm.stream_completion( + messages=messages, + temperature=chat_request.temperature, + max_tokens=chat_request.max_tokens + ): + yield format_text_delta(chunk) + else: + # No tool calls - stream the original response + content = response.choices[0].message.content + if content: + yield format_text_delta(content) # Send finish message yield format_finish_message("stop") except Exception as e: - logger.error(f"Chat streaming error: {e}") + logger.error(f"Chat streaming error: {e}", exc_info=True) # Send error in stream error_msg = {"error": str(e)} yield f"e:{json.dumps(error_msg)}\n" @@ -267,15 +359,22 @@ async def generate(): @router.post("/simple") -async def chat_simple(request: ChatRequest) -> Dict[str, Any]: +async def chat_simple( + chat_request: ChatRequest, + request: Request, + current_user: User = Depends(get_current_user) +) -> Dict[str, Any]: """ - Non-streaming chat endpoint. + Non-streaming chat endpoint with MCP tool calling. Returns the complete response as JSON. Useful for testing or when streaming isn't needed. """ llm = get_llm_client() + # Extract auth header to forward to memory service + auth_header = request.headers.get("Authorization") + # Check if configured if not await llm.is_configured(): raise HTTPException( @@ -287,44 +386,106 @@ async def chat_simple(request: ChatRequest) -> Dict[str, Any]: messages: List[Dict[str, str]] = [] # Add system message if provided - if request.system: - messages.append({"role": "system", "content": request.system}) - - # Fetch memory context if enabled - if request.use_memory and request.messages: - user_id = request.user_id or "default" - last_user_message = next( - (m.content for m in reversed(request.messages) if m.role == "user"), - None - ) - if last_user_message: - memory_context = await fetch_memory_context( - last_user_message, - user_id - ) - if memory_context: - context_text = "\n\nRelevant context from memory:\n" + "\n".join( - f"- {mem}" for mem in memory_context - ) - if messages and messages[0]["role"] == "system": - messages[0]["content"] += context_text - else: - messages.insert(0, { - "role": "system", - "content": f"You are a helpful assistant.{context_text}" - }) + if chat_request.system: + messages.append({"role": "system", "content": chat_request.system}) + else: + messages.append({"role": "system", "content": "You are a helpful assistant with access to memory search."}) # Add conversation messages - for msg in request.messages: + for msg in chat_request.messages: messages.append({"role": msg.role, "content": msg.content}) + # Define memory search tool (MCP-style function calling) + tools = None + if chat_request.use_memory: + # Use authenticated user's email as user_id (same as memories router) + from src.utils.auth_helpers import get_user_email + user_id = chat_request.user_id or get_user_email(current_user) + tools = [{ + "type": "function", + "function": { + "name": "search_memories", + "description": "Search the user's stored memories and context. Use this to recall information about the user, their preferences, previous conversations, and relevant facts.", + "parameters": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "What to search for in memories" + }, + "limit": { + "type": "integer", + "description": "Maximum number of memories to return (default 5)", + "default": 5 + } + }, + "required": ["query"] + } + } + }] + try: + # First pass - LLM may request tool calls response = await llm.completion( messages=messages, - temperature=request.temperature, - max_tokens=request.max_tokens + temperature=chat_request.temperature, + max_tokens=chat_request.max_tokens, + tools=tools if tools else None, + tool_choice="auto" if tools else None ) + # Check if LLM wants to call tools + if response.choices[0].message.tool_calls: + logger.info(f"[CHAT] LLM requested {len(response.choices[0].message.tool_calls)} tool calls") + + # Add assistant's tool call message to history + assistant_msg = response.choices[0].message + messages.append({ + "role": "assistant", + "content": assistant_msg.content, + "tool_calls": [ + { + "id": tc.id, + "type": "function", + "function": { + "name": tc.function.name, + "arguments": tc.function.arguments + } + } + for tc in assistant_msg.tool_calls + ] + }) + + # Execute tool calls + for tool_call in assistant_msg.tool_calls: + if tool_call.function.name == "search_memories": + args = json.loads(tool_call.function.arguments) + query = args.get("query", "") + limit = args.get("limit", 5) + + logger.info(f"[CHAT] Executing memory search: query='{query}', limit={limit}") + memories = await fetch_memory_context(query, user_id, limit=limit, auth_header=auth_header) + + # Format memories as readable text + memories_text = "\n".join([f"- {mem}" for mem in memories]) + + # Format tool result + tool_result = { + "role": "tool", + "tool_call_id": tool_call.id, + "name": "search_memories", + "content": f"Found {len(memories)} memories:\n{memories_text}" + } + messages.append(tool_result) + logger.info(f"[CHAT] Memory search returned {len(memories)} results: {memories[:2]}") + + # Second pass - LLM responds with tool results + response = await llm.completion( + messages=messages, + temperature=chat_request.temperature, + max_tokens=chat_request.max_tokens + ) + # Extract the assistant message content = response.choices[0].message.content @@ -336,5 +497,5 @@ async def chat_simple(request: ChatRequest) -> Dict[str, Any]: } except Exception as e: - logger.error(f"Chat error: {e}") + logger.error(f"Chat error: {e}", exc_info=True) raise HTTPException(status_code=500, detail=str(e)) diff --git a/ushadow/backend/src/routers/dashboard.py b/ushadow/backend/src/routers/dashboard.py new file mode 100644 index 00000000..0a20138d --- /dev/null +++ b/ushadow/backend/src/routers/dashboard.py @@ -0,0 +1,45 @@ +""" +Dashboard API - Recent conversations and memories from Chronicle. + +Provides unified dashboard data showing recent system activity. +""" + +import logging + +from fastapi import APIRouter, Depends, HTTPException, Query + +from src.models.dashboard import DashboardData +from src.services.dashboard_service import DashboardService, get_dashboard_service + +logger = logging.getLogger(__name__) +router = APIRouter() + + +@router.get("/", response_model=DashboardData) +async def get_dashboard_data( + conversation_limit: int = Query(10, ge=1, le=50), + memory_limit: int = Query(10, ge=1, le=50), + service: DashboardService = Depends(get_dashboard_service), +) -> DashboardData: + """ + Get complete dashboard data. + + Fetches recent conversations and memories from Chronicle. + + Args: + conversation_limit: Max conversations to return (1-50) + memory_limit: Max memories to return (1-50) + + Returns: + Dashboard data with stats and recent activities + """ + try: + return await service.get_dashboard_data( + conversation_limit=conversation_limit, + memory_limit=memory_limit, + ) + except Exception as e: + logger.error(f"Failed to fetch dashboard data: {e}") + raise HTTPException( + status_code=500, detail="Failed to fetch dashboard data" + ) diff --git a/ushadow/backend/src/routers/github_import.py b/ushadow/backend/src/routers/github_import.py index 55c1aa47..53eee3fc 100644 --- a/ushadow/backend/src/routers/github_import.py +++ b/ushadow/backend/src/routers/github_import.py @@ -330,9 +330,9 @@ def generate_compose_from_dockerhub( } }, 'networks': { - 'infra-network': { + 'ushadow-network': { 'external': True, - 'name': 'infra-network' + 'name': 'ushadow-network' } } } @@ -385,7 +385,7 @@ def generate_compose_from_dockerhub( compose_data['volumes'] = volume_definitions # Add network - service_config['networks'] = ['infra-network'] + service_config['networks'] = ['ushadow-network'] # Add extra_hosts for host.docker.internal service_config['extra_hosts'] = ['host.docker.internal:host-gateway'] diff --git a/ushadow/backend/src/routers/keycloak_admin.py b/ushadow/backend/src/routers/keycloak_admin.py new file mode 100644 index 00000000..1190d456 --- /dev/null +++ b/ushadow/backend/src/routers/keycloak_admin.py @@ -0,0 +1,145 @@ +""" +Keycloak Admin Router + +Admin endpoints for managing Keycloak configuration. +""" + +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +import logging + +from src.services.keycloak_admin import get_keycloak_admin + +router = APIRouter() +logger = logging.getLogger(__name__) + + +class ClientUpdateResponse(BaseModel): + """Response for client update operations""" + success: bool + message: str + client_id: str + + +@router.post("/clients/{client_id}/enable-pkce", response_model=ClientUpdateResponse) +async def enable_pkce_for_client(client_id: str): + """ + Enable PKCE (Proof Key for Code Exchange) for a Keycloak client. + + This updates the client configuration to require PKCE with S256 code challenge method. + PKCE is required for secure authentication in public clients (like SPAs). + + Args: + client_id: The Keycloak client ID (e.g., "ushadow-frontend") + + Returns: + Success status and message + """ + admin_client = get_keycloak_admin() + + try: + # Get current client configuration + client = await admin_client.get_client_by_client_id(client_id) + if not client: + raise HTTPException( + status_code=404, + detail=f"Client '{client_id}' not found in Keycloak" + ) + + client_uuid = client["id"] + logger.info(f"[KC-ADMIN] Enabling PKCE for client: {client_id} ({client_uuid})") + + # Update client attributes to require PKCE + import httpx + import os + + token = await admin_client._get_admin_token() + keycloak_url = os.getenv("KEYCLOAK_URL", "http://keycloak:8080") + realm = os.getenv("KEYCLOAK_REALM", "ushadow") + + # Get full client config first + async with httpx.AsyncClient() as http_client: + get_response = await http_client.get( + f"{keycloak_url}/admin/realms/{realm}/clients/{client_uuid}", + headers={"Authorization": f"Bearer {token}"}, + timeout=10.0 + ) + + if get_response.status_code != 200: + raise HTTPException( + status_code=500, + detail=f"Failed to get client config: {get_response.text}" + ) + + full_client_config = get_response.json() + + # Update attributes + if "attributes" not in full_client_config: + full_client_config["attributes"] = {} + + full_client_config["attributes"]["pkce.code.challenge.method"] = "S256" + + # Update client + update_response = await http_client.put( + f"{keycloak_url}/admin/realms/{realm}/clients/{client_uuid}", + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json" + }, + json=full_client_config, + timeout=10.0 + ) + + if update_response.status_code != 204: + raise HTTPException( + status_code=500, + detail=f"Failed to update client: {update_response.text}" + ) + + logger.info(f"[KC-ADMIN] โœ“ PKCE enabled for client: {client_id}") + + return ClientUpdateResponse( + success=True, + message=f"PKCE (S256) enabled for client '{client_id}'", + client_id=client_id + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"[KC-ADMIN] Failed to enable PKCE: {e}", exc_info=True) + raise HTTPException( + status_code=500, + detail=f"Failed to enable PKCE: {str(e)}" + ) + + +@router.get("/clients/{client_id}/config") +async def get_client_config(client_id: str): + """ + Get Keycloak client configuration. + + Args: + client_id: The Keycloak client ID + + Returns: + Client configuration including attributes + """ + admin_client = get_keycloak_admin() + + client = await admin_client.get_client_by_client_id(client_id) + if not client: + raise HTTPException( + status_code=404, + detail=f"Client '{client_id}' not found" + ) + + return { + "client_id": client.get("clientId"), + "id": client.get("id"), + "enabled": client.get("enabled"), + "publicClient": client.get("publicClient"), + "standardFlowEnabled": client.get("standardFlowEnabled"), + "attributes": client.get("attributes", {}), + "redirectUris": client.get("redirectUris", []), + } diff --git a/ushadow/backend/src/routers/kubernetes.py b/ushadow/backend/src/routers/kubernetes.py index 63414e1a..72735451 100644 --- a/ushadow/backend/src/routers/kubernetes.py +++ b/ushadow/backend/src/routers/kubernetes.py @@ -10,6 +10,7 @@ from src.models.kubernetes import ( KubernetesCluster, KubernetesClusterCreate, + KubernetesClusterUpdate, KubernetesDeploymentSpec, KubernetesNode, ) @@ -125,6 +126,33 @@ async def remove_cluster( return {"success": True, "message": f"Cluster {cluster_id} removed"} +@router.patch("/{cluster_id}", response_model=KubernetesCluster) +async def update_cluster( + cluster_id: str, + update: KubernetesClusterUpdate, + current_user: User = Depends(get_current_user) +): + """Update cluster configuration settings.""" + k8s_manager = await get_kubernetes_manager() + + # Build update dict with only provided fields + updates = {k: v for k, v in update.model_dump().items() if v is not None} + + if not updates: + # No fields to update + cluster = await k8s_manager.get_cluster(cluster_id) + if not cluster: + raise HTTPException(status_code=404, detail="Cluster not found") + return cluster + + updated_cluster = await k8s_manager.update_cluster(cluster_id, updates) + + if not updated_cluster: + raise HTTPException(status_code=404, detail="Cluster not found") + + return updated_cluster + + @router.get("/services/available") async def get_available_services( current_user: User = Depends(get_current_user) @@ -316,8 +344,33 @@ async def deploy_service_to_cluster( # TODO: Track deployment status in Deployment record, not ServiceConfig # ServiceConfig no longer tracks deployment state (removed in architecture refactor) - # Add node selector if node_name specified + # Auto-populate k8s_spec with cluster ingress defaults k8s_spec = request.k8s_spec or KubernetesDeploymentSpec() + + # Auto-configure ingress if cluster has ingress configured + if cluster.ingress_domain: + if k8s_spec.ingress is None: + # No ingress config from frontend - apply cluster defaults + if cluster.ingress_enabled_by_default: + # Auto-generate hostname + service_name = resolved_service.name.lower().replace(" ", "-").replace("_", "-") + hostname = f"{service_name}.{cluster.ingress_domain}" + + k8s_spec.ingress = { + "enabled": True, + "host": hostname, + "path": "/", + "ingressClassName": cluster.ingress_class + } + logger.info(f"โœ“ Auto-configured ingress: {hostname}") + elif k8s_spec.ingress.get("enabled") and not k8s_spec.ingress.get("host"): + # Frontend enabled ingress but no hostname - auto-generate + service_name = resolved_service.name.lower().replace(" ", "-").replace("_", "-") + k8s_spec.ingress["host"] = f"{service_name}.{cluster.ingress_domain}" + k8s_spec.ingress["ingressClassName"] = cluster.ingress_class + logger.info(f"โœ“ Auto-generated ingress hostname: {k8s_spec.ingress['host']}") + + # Add node selector if node_name specified if request.node_name: # Add node selector to ensure pod runs on specific node if not k8s_spec.labels: diff --git a/ushadow/backend/src/routers/memories.py b/ushadow/backend/src/routers/memories.py new file mode 100644 index 00000000..b30b68b4 --- /dev/null +++ b/ushadow/backend/src/routers/memories.py @@ -0,0 +1,385 @@ +""" +Unified memory routing layer for ushadow. + +This module provides a single API for querying memories across different sources: +- OpenMemory (shared between Chronicle and Mycelia) +- Mycelia native memory system +- Chronicle native memory system (Qdrant) + +The routing is source-aware and queries the appropriate backend(s). +""" +import logging +from typing import List, Literal, Optional + +import httpx +from fastapi import APIRouter, HTTPException, Depends, Query + +from src.utils.auth_helpers import get_user_email +from pydantic import BaseModel + +from src.services.auth import get_current_user +from src.models.user import User + +logger = logging.getLogger(__name__) +router = APIRouter(prefix="/api/memories", tags=["memories"]) + +# Backend base URL for internal calls +BACKEND_BASE_URL = "http://localhost:8000" + + +class MemoryItem(BaseModel): + """Unified memory response format""" + id: str + content: str + created_at: str + metadata: dict + source: Literal["openmemory", "mycelia", "chronicle"] # Which system it came from + score: Optional[float] = None + + +class ConversationMemoriesResponse(BaseModel): + """Response for conversation memories query""" + conversation_id: str + conversation_source: Literal["chronicle", "mycelia"] + memories: List[MemoryItem] + count: int + sources_queried: List[str] # Which memory systems were checked + + +@router.get("/{memory_id}") +async def get_memory_by_id( + memory_id: str, + current_user: User = Depends(get_current_user) +) -> MemoryItem: + """ + Get a single memory by ID from any memory source. + + Searches across all available memory backends (OpenMemory, Chronicle, Mycelia) + and returns the first match found. + + Args: + memory_id: The memory ID to retrieve + current_user: Authenticated user (from JWT) + + Returns: + Memory item with full details + + Access Control: + - Regular users: Only their own memories + - Admins: All memories + + Raises: + HTTPException: 404 if memory not found + """ + # Try each memory source in priority order + sources_tried = [] + + # 1. Try OpenMemory first (most common source) + try: + openmemory_url = f"{BACKEND_BASE_URL}/api/services/mem0/proxy" + logger.info(f"[MEMORIES] Querying OpenMemory for memory {memory_id}") + sources_tried.append("openmemory") + + async with httpx.AsyncClient() as client: + # Get specific memory by ID + response = await client.get( + f"{openmemory_url}/api/v1/memories/{memory_id}", + params={"user_id": get_user_email(current_user)} + ) + + if response.status_code == 200: + data = response.json() + # Validate access + metadata = data.get("metadata_", {}) + memory_user_email = metadata.get("chronicle_user_email") or metadata.get("user_email") + + if memory_user_email == get_user_email(current_user) or not memory_user_email: + logger.info(f"[MEMORIES] Found memory in OpenMemory") + # OpenMemory uses 'text' field for content + content = data.get("text") or data.get("content", "") + # Include categories in metadata if they exist + if "categories" in data and data["categories"]: + metadata["categories"] = data["categories"] + return MemoryItem( + id=str(data.get("id")), + content=content, + created_at=str(data.get("created_at", "")), + metadata=metadata, + source="openmemory", + score=None + ) + except Exception as e: + logger.error(f"[MEMORIES] OpenMemory query failed: {e}", exc_info=True) + + # 2. Try Chronicle native memory system + try: + chronicle_url = f"{BACKEND_BASE_URL}/api/services/chronicle-backend/proxy" + logger.info(f"[MEMORIES] Querying Chronicle for memory {memory_id}") + sources_tried.append("chronicle") + + async with httpx.AsyncClient() as client: + # Try Chronicle's memory endpoint if it exists + response = await client.get(f"{chronicle_url}/api/memories/{memory_id}") + + if response.status_code == 200: + data = response.json() + logger.info(f"[MEMORIES] Found memory in Chronicle") + return MemoryItem( + id=data.get("id"), + content=data.get("content"), + created_at=data.get("created_at"), + metadata=data.get("metadata", {}), + source="chronicle", + score=data.get("score") + ) + except Exception as e: + logger.error(f"[MEMORIES] Chronicle query failed: {e}", exc_info=True) + + # 3. Try Mycelia native memory system + try: + mycelia_url = f"{BACKEND_BASE_URL}/api/services/mycelia-backend/proxy" + logger.info(f"[MEMORIES] Querying Mycelia for memory {memory_id}") + sources_tried.append("mycelia") + + async with httpx.AsyncClient() as client: + response = await client.get(f"{mycelia_url}/api/memories/{memory_id}") + + if response.status_code == 200: + data = response.json() + logger.info(f"[MEMORIES] Found memory in Mycelia") + return MemoryItem( + id=data.get("id"), + content=data.get("content"), + created_at=data.get("created_at"), + metadata=data.get("metadata", {}), + source="mycelia", + score=data.get("score") + ) + except Exception as e: + logger.error(f"[MEMORIES] Mycelia query failed: {e}", exc_info=True) + + # Memory not found in any source + logger.warning(f"[MEMORIES] Memory {memory_id} not found in any source (tried: {sources_tried})") + raise HTTPException( + status_code=404, + detail=f"Memory {memory_id} not found (searched: {', '.join(sources_tried)})" + ) + + +@router.get("/by-conversation/{conversation_id}") +async def get_memories_by_conversation( + conversation_id: str, + conversation_source: Literal["chronicle", "mycelia"] = Query(..., description="Which backend has the conversation"), + current_user: User = Depends(get_current_user) +) -> ConversationMemoriesResponse: + """ + Get all memories associated with a conversation across all memory sources. + + This endpoint queries multiple memory backends and aggregates results: + 1. OpenMemory (if available) - checks source_id metadata + 2. Source-specific backend (Chronicle/Mycelia native) + + Args: + conversation_id: The conversation ID to query + conversation_source: Which backend has this conversation ("chronicle" or "mycelia") + current_user: Authenticated user (from JWT) + + Returns: + Aggregated memories from all sources with source attribution + + Access Control: + - Regular users: Only their own conversation memories + - Admins: All conversation memories + """ + all_memories = [] + sources_queried = [] + + # Strategy: Query all available memory sources and aggregate + + # 1. Try OpenMemory (shared memory system) + try: + # Use proxy URL - same method as frontend memoriesApi.getServerUrl() + openmemory_url = f"{BACKEND_BASE_URL}/api/services/mem0/proxy" + logger.info(f"[MEMORIES] Querying OpenMemory via proxy at: {openmemory_url}") + sources_queried.append("openmemory") + openmemory_memories = await _query_openmemory_by_source_id( + openmemory_url, + conversation_id, + get_user_email(current_user) # OpenMemory uses email as user_id + ) + logger.info(f"[MEMORIES] OpenMemory returned {len(openmemory_memories)} memories") + all_memories.extend(openmemory_memories) + except Exception as e: + # OpenMemory not available or query failed - continue with other sources + logger.error(f"[MEMORIES] OpenMemory query failed: {e}", exc_info=True) + + # 2. Query conversation-source-specific backend + if conversation_source == "chronicle": + sources_queried.append("chronicle") + try: + # Use proxy URL - same method as frontend + chronicle_url = f"{BACKEND_BASE_URL}/api/services/chronicle-backend/proxy" + logger.info(f"[MEMORIES] Querying Chronicle via proxy at: {chronicle_url}") + chronicle_memories = await _query_chronicle_memories( + chronicle_url, + conversation_id, + current_user + ) + all_memories.extend(chronicle_memories) + except Exception as e: + # Chronicle query failed + logger.error(f"[MEMORIES] Chronicle query failed: {e}", exc_info=True) + + elif conversation_source == "mycelia": + sources_queried.append("mycelia") + try: + # Use proxy URL - same method as frontend + mycelia_url = f"{BACKEND_BASE_URL}/api/services/mycelia-backend/proxy" + logger.info(f"[MEMORIES] Querying Mycelia via proxy at: {mycelia_url}") + mycelia_memories = await _query_mycelia_memories( + mycelia_url, + conversation_id, + current_user + ) + all_memories.extend(mycelia_memories) + except Exception as e: + # Mycelia query failed + logger.error(f"[MEMORIES] Mycelia query failed: {e}", exc_info=True) + + return ConversationMemoriesResponse( + conversation_id=conversation_id, + conversation_source=conversation_source, + memories=all_memories, + count=len(all_memories), + sources_queried=sources_queried + ) + + +async def _query_openmemory_by_source_id( + openmemory_url: str, + source_id: str, + user_email: str +) -> List[MemoryItem]: + """ + Query OpenMemory for memories with specific source_id in metadata. + + Access control: Validates chronicle_user_email in metadata matches current user. + """ + memories = [] + + logger.info(f"[MEMORIES] _query_openmemory: url={openmemory_url}, source_id={source_id}, user={user_email}") + + async with httpx.AsyncClient() as client: + # Query all memories for user + query_url = f"{openmemory_url}/api/v1/memories/" + params = {"user_id": user_email, "limit": 100} + logger.info(f"[MEMORIES] Querying: {query_url} with params: {params}") + + response = await client.get(query_url, params=params) + logger.info(f"[MEMORIES] OpenMemory response status: {response.status_code}") + response.raise_for_status() + data = response.json() + logger.info(f"[MEMORIES] OpenMemory returned {len(data.get('items', []))} total memories") + + # Filter by source_id in metadata + if "items" in data: + for item in data["items"]: + metadata = item.get("metadata_", {}) + + # Check if this memory belongs to the conversation + if metadata.get("source_id") == source_id: + # Validate access (check chronicle_user_email or user_id) + memory_user_email = metadata.get("chronicle_user_email") or metadata.get("user_email") + if memory_user_email == user_email or not memory_user_email: + # OpenMemory uses 'text' field for content + content = item.get("text") or item.get("content", "") + # Include categories in metadata if they exist + if "categories" in item and item["categories"]: + metadata["categories"] = item["categories"] + memories.append(MemoryItem( + id=str(item.get("id")), + content=content, + created_at=str(item.get("created_at", "")), + metadata=metadata, + source="openmemory", + score=None + )) + + return memories + + +async def _query_chronicle_memories( + chronicle_url: str, + conversation_id: str, + current_user: User +) -> List[MemoryItem]: + """ + Query Chronicle native memory system (via conversation endpoint). + + Chronicle may use: + - Qdrant (native) + - OpenMemory (already queried above - will deduplicate) + + Auth is handled by the service proxy. + """ + memories = [] + + async with httpx.AsyncClient() as client: + # Chronicle has /api/conversations/{id}/memories endpoint + # Proxy handles authentication forwarding + response = await client.get( + f"{chronicle_url}/api/conversations/{conversation_id}/memories" + ) + + if response.status_code == 200: + data = response.json() + for mem in data.get("memories", []): + memories.append(MemoryItem( + id=mem.get("id"), + content=mem.get("content"), + created_at=mem.get("created_at"), + metadata=mem.get("metadata", {}), + source="chronicle", + score=mem.get("score") + )) + + return memories + + +async def _query_mycelia_memories( + mycelia_url: str, + conversation_id: str, + current_user: User +) -> List[MemoryItem]: + """ + Query Mycelia native memory system. + + Mycelia may have its own memory endpoints or use OpenMemory. + + Auth is handled by the service proxy. + """ + memories = [] + + async with httpx.AsyncClient() as client: + # Try Mycelia's conversation memories endpoint if it exists + try: + response = await client.get( + f"{mycelia_url}/api/conversations/{conversation_id}/memories" + ) + + if response.status_code == 200: + data = response.json() + for mem in data.get("memories", []): + memories.append(MemoryItem( + id=mem.get("id"), + content=mem.get("content"), + created_at=mem.get("created_at"), + metadata=mem.get("metadata", {}), + source="mycelia", + score=mem.get("score") + )) + except: + # Mycelia might not have this endpoint yet + pass + + return memories diff --git a/ushadow/backend/src/routers/services.py b/ushadow/backend/src/routers/services.py index b6332c6b..99428c51 100644 --- a/ushadow/backend/src/routers/services.py +++ b/ushadow/backend/src/routers/services.py @@ -726,6 +726,20 @@ async def proxy_service_request( logger.info(f"[PROXY] Token payload: iss={payload.get('iss')}, aud={payload.get('aud')}, sub={payload.get('sub')}") except Exception as e: logger.debug(f"[PROXY] Could not decode token: {e}") + + # Bridge Keycloak tokens to service tokens for Chronicle + from src.services.token_bridge import bridge_to_service_token + token_without_bearer = auth_header.replace("Bearer ", "") + service_token = await bridge_to_service_token( + token_without_bearer, + audiences=["ushadow", "chronicle"] + ) + if service_token and service_token != token_without_bearer: + # Token was bridged (Keycloak โ†’ service token) + headers["authorization"] = f"Bearer {service_token}" + logger.info(f"[PROXY] โœ“ Bridged Keycloak token to service token") + else: + logger.debug(f"[PROXY] Token passed through (already a service token or bridging failed)") else: logger.warning(f"[PROXY] No Authorization header in request to {name}") diff --git a/ushadow/backend/src/routers/share.py b/ushadow/backend/src/routers/share.py new file mode 100644 index 00000000..332c4ed6 --- /dev/null +++ b/ushadow/backend/src/routers/share.py @@ -0,0 +1,322 @@ +"""Share API endpoints for conversation and resource sharing. + +Provides HTTP endpoints for creating, accessing, and managing share tokens. +Thin router layer that delegates to ShareService for business logic. +""" + +import logging +import os +from typing import List, Optional + +from fastapi import APIRouter, Depends, HTTPException, Request +from motor.motor_asyncio import AsyncIOMotorDatabase + +from ..database import get_database +from .tailscale import _read_config as read_tailscale_config +from ..models.share import ( + ShareAccessLog, + ShareToken, + ShareTokenCreate, + ShareTokenResponse, +) +from ..models.user import User +from ..services.auth import get_current_user, get_optional_current_user +from ..services.share_service import ShareService + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/share", tags=["sharing"]) + + +def _get_share_base_url() -> str: + """Determine the base URL for share links. + + Strategy hierarchy: + 1. SHARE_BASE_URL environment variable (highest priority) + 2. SHARE_PUBLIC_GATEWAY environment variable (for external sharing) + 3. Tailscale hostname (for Tailnet-only sharing) + 4. Fallback to localhost (development only) + + Returns: + Base URL string (e.g., "https://ushadow.tail12345.ts.net" or "https://share.yourdomain.com") + """ + # Explicit override (highest priority) + if base_url := os.getenv("SHARE_BASE_URL"): + logger.info(f"Using explicit SHARE_BASE_URL: {base_url}") + return base_url.rstrip("/") + + # Public gateway for external sharing + if gateway_url := os.getenv("SHARE_PUBLIC_GATEWAY"): + logger.info(f"Using public gateway: {gateway_url}") + return gateway_url.rstrip("/") + + # Use Tailscale hostname (works with or without Funnel) + try: + config = read_tailscale_config() + if config and config.hostname: + tailscale_url = f"https://{config.hostname}" + logger.info(f"Using Tailscale hostname: {tailscale_url}") + return tailscale_url + except Exception as e: + logger.warning(f"Failed to read Tailscale config: {e}") + + # Fallback for development + logger.warning("Using localhost fallback - shares will only work locally!") + return "http://localhost:3000" + + +def get_share_service(db: AsyncIOMotorDatabase = Depends(get_database)) -> ShareService: + """Dependency injection for ShareService. + + Args: + db: MongoDB database (injected) + + Returns: + ShareService instance + """ + base_url = _get_share_base_url() + logger.info(f"Share service initialized with base_url: {base_url}") + return ShareService(db=db, base_url=base_url) + + +@router.post("/create", response_model=ShareTokenResponse, status_code=201) +async def create_share_token( + data: ShareTokenCreate, + current_user: User = Depends(get_current_user), + service: ShareService = Depends(get_share_service), +) -> ShareTokenResponse: + """Create a new share token for a resource. + + Requires authentication. User must have permission to share the resource. + + Args: + data: Share token creation parameters + current_user: Authenticated user + service: Share service instance + + Returns: + Created share token with share URL + + Raises: + 400: If resource doesn't exist or user lacks permission + 401: If not authenticated + """ + try: + share_token = await service.create_share_token(data, current_user) + return service.to_response(share_token) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.get("/{token}", response_model=dict) +async def access_shared_resource( + token: str, + request: Request, + current_user: Optional[User] = Depends(get_optional_current_user), + service: ShareService = Depends(get_share_service), +) -> dict: + """Access a shared resource via share token. + + Public endpoint - does not require authentication unless share requires it. + Records access in audit log. + + Args: + token: Share token UUID + request: HTTP request (for IP address) + current_user: Optional authenticated user + service: Share service instance + + Returns: + Shared resource data with permissions + + Raises: + 403: If access denied (expired, limit exceeded, etc.) + 404: If share token not found + """ + # Get user email if authenticated + from src.utils.auth_helpers import get_user_email + user_email = get_user_email(current_user) if current_user else None + + # Get request IP for Tailscale validation + request_ip = request.client.host if request.client else None + + # Validate access + is_valid, share_token, reason = await service.validate_share_access( + token=token, + user_email=user_email, + request_ip=request_ip, + ) + + if not is_valid: + if share_token is None: + raise HTTPException(status_code=404, detail="Share token not found") + raise HTTPException(status_code=403, detail=reason) + + # Record access + user_identifier = user_email or request_ip or "anonymous" + metadata = { + "ip": request_ip, + "user_agent": request.headers.get("user-agent"), + } + await service.record_share_access( + share_token=share_token, + user_identifier=user_identifier, + action="view", + metadata=metadata, + ) + + # TODO: Fetch actual resource data from Chronicle/Mycelia + # For now, return share token info and placeholder resource + return { + "share_token": service.to_response(share_token).dict(), + "resource": { + "type": share_token.resource_type, + "id": share_token.resource_id, + # TODO: Add actual resource data here + "data": f"Placeholder for {share_token.resource_type}:{share_token.resource_id}", + }, + "permissions": share_token.permissions, + } + + +@router.delete("/{token}", status_code=204) +async def revoke_share_token( + token: str, + current_user: User = Depends(get_current_user), + service: ShareService = Depends(get_share_service), +): + """Revoke a share token. + + Requires authentication. User must be the creator or admin. + + Args: + token: Share token to revoke + current_user: Authenticated user + service: Share service instance + + Raises: + 403: If user lacks permission + 404: If share token not found + """ + try: + revoked = await service.revoke_share_token(token, current_user) + if not revoked: + raise HTTPException(status_code=404, detail="Share token not found") + except ValueError as e: + raise HTTPException(status_code=403, detail=str(e)) + + +@router.get("/resource/{resource_type}/{resource_id}", response_model=List[ShareTokenResponse]) +async def list_shares_for_resource( + resource_type: str, + resource_id: str, + current_user: User = Depends(get_current_user), + service: ShareService = Depends(get_share_service), +) -> List[ShareTokenResponse]: + """List all share tokens for a resource. + + Requires authentication. User must have access to the resource. + + Args: + resource_type: Type of resource (conversation, memory, etc.) + resource_id: ID of resource + current_user: Authenticated user + service: Share service instance + + Returns: + List of share tokens for the resource + """ + share_tokens = await service.list_shares_for_resource( + resource_type=resource_type, + resource_id=resource_id, + user=current_user, + ) + return [service.to_response(token) for token in share_tokens] + + +@router.get("/{token}/logs", response_model=List[ShareAccessLog]) +async def get_share_access_logs( + token: str, + current_user: User = Depends(get_current_user), + service: ShareService = Depends(get_share_service), +) -> List[ShareAccessLog]: + """Get access logs for a share token. + + Requires authentication. User must be creator or admin. + + Args: + token: Share token + current_user: Authenticated user + service: Share service instance + + Returns: + List of access log entries + + Raises: + 403: If user lacks permission + 404: If share token not found + """ + try: + return await service.get_share_access_logs(token, current_user) + except ValueError as e: + if "not found" in str(e).lower(): + raise HTTPException(status_code=404, detail=str(e)) + raise HTTPException(status_code=403, detail=str(e)) + + +# Convenience endpoints for specific resource types + +@router.post("/conversations/{conversation_id}", response_model=ShareTokenResponse, status_code=201) +async def share_conversation( + conversation_id: str, + data: ShareTokenCreate, + current_user: User = Depends(get_current_user), + service: ShareService = Depends(get_share_service), +) -> ShareTokenResponse: + """Convenience endpoint for sharing a conversation. + + Automatically sets resource_type to 'conversation' and uses path parameter + for resource_id. Otherwise identical to POST /api/share/create. + + Args: + conversation_id: ID of conversation to share + data: Share token parameters (resource_type/resource_id will be overridden) + current_user: Authenticated user + service: Share service instance + + Returns: + Created share token with share URL + """ + # Override resource type and ID from path + data.resource_type = "conversation" + data.resource_id = conversation_id + + try: + share_token = await service.create_share_token(data, current_user) + return service.to_response(share_token) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.get("/conversations/{conversation_id}/shares", response_model=List[ShareTokenResponse]) +async def list_conversation_shares( + conversation_id: str, + current_user: User = Depends(get_current_user), + service: ShareService = Depends(get_share_service), +) -> List[ShareTokenResponse]: + """Convenience endpoint for listing shares of a conversation. + + Args: + conversation_id: ID of conversation + current_user: Authenticated user + service: Share service instance + + Returns: + List of share tokens for the conversation + """ + share_tokens = await service.list_shares_for_resource( + resource_type="conversation", + resource_id=conversation_id, + user=current_user, + ) + return [service.to_response(token) for token in share_tokens] diff --git a/ushadow/backend/src/routers/tailscale.py b/ushadow/backend/src/routers/tailscale.py index eea0366a..71e45383 100644 --- a/ushadow/backend/src/routers/tailscale.py +++ b/ushadow/backend/src/routers/tailscale.py @@ -703,9 +703,11 @@ async def get_mobile_connection_qr( # Generate auth token for mobile app (valid for ushadow and chronicle) # Both services now share the same database (ushadow-blue) so user IDs match + from src.utils.auth_helpers import get_user_id, get_user_email + auth_token = generate_jwt_for_service( - user_id=str(current_user.id), - user_email=current_user.email, + user_id=get_user_id(current_user), + user_email=get_user_email(current_user), audiences=["ushadow", "chronicle"] ) @@ -989,24 +991,16 @@ async def start_tailscale_container( # Container doesn't exist - create it using Docker SDK logger.info(f"Creating Tailscale container '{container_name}' for environment '{env_name}'...") - # Ensure infra network exists + # Ensure ushadow-network exists try: - infra_network = _get_docker_client().networks.get("infra-network") + ushadow_network = _get_docker_client().networks.get("ushadow-network") + logger.info(f"Found ushadow-network") except docker.errors.NotFound: raise HTTPException( status_code=400, - detail="infra-network not found. Please start infrastructure first." + detail="ushadow-network not found. Please start infrastructure first." ) - # Get environment's compose network if it exists - env_network_name = f"{env_name}_default" - env_network = None - try: - env_network = _get_docker_client().networks.get(env_network_name) - logger.info(f"Connecting to environment network: {env_network_name}") - except docker.errors.NotFound: - logger.debug(f"Environment network '{env_network_name}' not found - using infra-network only") - # Create volume if it doesn't exist (per-environment) try: _get_docker_client().volumes.get(volume_name) @@ -1044,19 +1038,11 @@ async def start_tailscale_container( f"{PROJECT_ROOT}/config": {"bind": "/config", "mode": "ro"}, }, cap_add=["NET_ADMIN", "NET_RAW"], - network="infra-network", + network="ushadow-network", # All app containers and infrastructure on this network restart_policy={"Name": "unless-stopped"}, command="sh -c 'tailscaled --tun=userspace-networking --statedir=/var/lib/tailscale & sleep infinity'" ) - # Connect to environment's compose network for routing to backend/frontend - if env_network: - try: - env_network.connect(container) - logger.info(f"Connected Tailscale container to environment network '{env_network_name}'") - except Exception as e: - logger.warning(f"Failed to connect to environment network: {e}") - logger.info(f"Tailscale container '{container_name}' created with hostname '{ts_hostname}': {container.id}") # Wait for tailscaled to be ready before returning diff --git a/ushadow/backend/src/routers/unodes.py b/ushadow/backend/src/routers/unodes.py index 51cbcdd0..0b41d0cc 100644 --- a/ushadow/backend/src/routers/unodes.py +++ b/ushadow/backend/src/routers/unodes.py @@ -551,8 +551,9 @@ async def create_join_token( Returns the token and a one-liner join command. """ unode_manager = await get_unode_manager() + from src.utils.auth_helpers import get_user_id response = await unode_manager.create_join_token( - user_id=current_user.id, + user_id=get_user_id(current_user), request=request ) diff --git a/ushadow/backend/src/services/auth.py b/ushadow/backend/src/services/auth.py index 11d2e8c7..1c54f203 100644 --- a/ushadow/backend/src/services/auth.py +++ b/ushadow/backend/src/services/auth.py @@ -240,7 +240,14 @@ async def read_token( ) # User dependencies for protecting endpoints -get_current_user = fastapi_users.current_user(active=True) +# Import hybrid auth dependency that accepts both legacy JWT and Keycloak tokens +from src.services.keycloak_auth import get_current_user_hybrid + +# Use hybrid authentication for all endpoints (supports both legacy and Keycloak) +get_current_user = get_current_user_hybrid + +# Legacy fastapi-users dependencies (kept for backwards compatibility if needed) +_legacy_get_current_user = fastapi_users.current_user(active=True) get_optional_current_user = fastapi_users.current_user(active=True, optional=True) get_current_superuser = fastapi_users.current_user(active=True, superuser=True) diff --git a/ushadow/backend/src/services/dashboard_service.py b/ushadow/backend/src/services/dashboard_service.py new file mode 100644 index 00000000..640bb434 --- /dev/null +++ b/ushadow/backend/src/services/dashboard_service.py @@ -0,0 +1,241 @@ +"""Dashboard service for aggregating Chronicle data. + +This service fetches recent conversations and memories from Chronicle +and provides a unified dashboard view. +""" + +import logging +from datetime import datetime +from typing import List, Optional + +import httpx + +from src.models.dashboard import ( + ActivityEvent, + ActivityType, + DashboardData, + DashboardStats, +) + +logger = logging.getLogger(__name__) + +# Chronicle service configuration +CHRONICLE_URL = "http://chronicle-backend:8000" +CHRONICLE_TIMEOUT = 5.0 + + +class DashboardService: + """ + Aggregates Chronicle data for the dashboard. + + Fetches recent conversations and memories, providing + statistics and activity feeds. + """ + + async def get_dashboard_data( + self, + conversation_limit: int = 10, + memory_limit: int = 10, + ) -> DashboardData: + """ + Get complete dashboard data. + + Args: + conversation_limit: Max number of recent conversations + memory_limit: Max number of recent memories + + Returns: + Complete dashboard data with stats and activities + """ + # Fetch data from Chronicle + conversations = await self._fetch_conversations(limit=conversation_limit) + memories = await self._fetch_memories(limit=memory_limit) + + # Convert to activity events + conversation_activities = self._conversations_to_activities(conversations) + memory_activities = self._memories_to_activities(memories) + + # Calculate stats + stats = DashboardStats( + conversation_count=len(conversation_activities), + memory_count=len(memory_activities), + ) + + return DashboardData( + stats=stats, + recent_conversations=conversation_activities, + recent_memories=memory_activities, + last_updated=datetime.utcnow(), + ) + + # ========================================================================= + # Chronicle data fetching + # ========================================================================= + + async def _fetch_conversations(self, limit: int = 10) -> List[dict]: + """ + Fetch recent conversations from Chronicle. + + Args: + limit: Maximum number of conversations to fetch + + Returns: + List of conversation dicts from Chronicle API + """ + try: + async with httpx.AsyncClient(timeout=CHRONICLE_TIMEOUT) as client: + response = await client.get( + f"{CHRONICLE_URL}/api/conversations", + params={"page": 1, "limit": limit}, + ) + if response.status_code == 200: + data = response.json() + return data.get("items", []) + except Exception as e: + logger.warning(f"Failed to fetch conversations: {e}") + + return [] + + async def _fetch_memories(self, limit: int = 10) -> List[dict]: + """ + Fetch recent memories from Chronicle. + + Args: + limit: Maximum number of memories to fetch + + Returns: + List of memory dicts from Chronicle API + """ + try: + async with httpx.AsyncClient(timeout=CHRONICLE_TIMEOUT) as client: + response = await client.get( + f"{CHRONICLE_URL}/api/memories", + params={"limit": limit}, + ) + if response.status_code == 200: + data = response.json() + # Chronicle returns either a list or a dict with items + if isinstance(data, list): + return data + return data.get("items", []) + except Exception as e: + logger.warning(f"Failed to fetch memories: {e}") + + return [] + + # ========================================================================= + # Data transformation + # ========================================================================= + + def _conversations_to_activities( + self, conversations: List[dict] + ) -> List[ActivityEvent]: + """ + Convert Chronicle conversations to activity events. + + Args: + conversations: Raw conversation data from Chronicle + + Returns: + List of ActivityEvent objects + """ + activities = [] + + for conv in conversations: + # Parse timestamp + timestamp = self._parse_timestamp( + conv.get("created_at") or conv.get("timestamp") + ) + + # Create activity event + activities.append( + ActivityEvent( + id=f"conv-{conv.get('id', 'unknown')}", + type=ActivityType.CONVERSATION, + title=conv.get("title") or "Untitled Conversation", + description=conv.get("summary"), + timestamp=timestamp, + metadata={ + "duration": conv.get("duration"), + "message_count": conv.get("message_count", 0), + }, + source="chronicle", + ) + ) + + return activities + + def _memories_to_activities(self, memories: List[dict]) -> List[ActivityEvent]: + """ + Convert Chronicle memories to activity events. + + Args: + memories: Raw memory data from Chronicle + + Returns: + List of ActivityEvent objects + """ + activities = [] + + for mem in memories: + timestamp = self._parse_timestamp(mem.get("timestamp")) + + # Truncate long content for title + content = mem.get("content", "") + title = content[:60] + "..." if len(content) > 60 else content + + activities.append( + ActivityEvent( + id=f"mem-{mem.get('id', 'unknown')}", + type=ActivityType.MEMORY, + title=title, + description=content if len(content) > 60 else None, + timestamp=timestamp, + metadata={ + "type": mem.get("type"), + "tags": mem.get("tags", []), + }, + source="chronicle", + ) + ) + + return activities + + # ========================================================================= + # Utilities + # ========================================================================= + + def _parse_timestamp(self, timestamp_str: Optional[str]) -> datetime: + """ + Parse timestamp string to datetime. + + Args: + timestamp_str: ISO format timestamp string + + Returns: + Parsed datetime, or current time if parsing fails + """ + if not timestamp_str: + return datetime.utcnow() + + try: + # Try ISO format with timezone + return datetime.fromisoformat(timestamp_str.replace("Z", "+00:00")) + except Exception: + try: + # Try without timezone + return datetime.fromisoformat(timestamp_str) + except Exception: + logger.warning(f"Failed to parse timestamp: {timestamp_str}") + return datetime.utcnow() + + +# Dependency injection +async def get_dashboard_service() -> DashboardService: + """ + Provide DashboardService instance. + + Returns: + Configured DashboardService instance + """ + return DashboardService() diff --git a/ushadow/backend/src/services/deployment_backends.py b/ushadow/backend/src/services/deployment_backends.py index f1161b0e..9a4cc3de 100644 --- a/ushadow/backend/src/services/deployment_backends.py +++ b/ushadow/backend/src/services/deployment_backends.py @@ -172,8 +172,32 @@ async def _deploy_local( return deployment except docker.errors.ImageNotFound as e: - logger.error(f"Image not found: {resolved_service.image}") - raise ValueError(f"Docker image not found: {resolved_service.image}") + logger.warning(f"Image not found locally: {resolved_service.image}, attempting to pull...") + + try: + # Attempt to pull the image + logger.info(f"Pulling image: {resolved_service.image}") + docker_client.images.pull(resolved_service.image) + logger.info(f"โœ… Successfully pulled image: {resolved_service.image}") + + # Retry deployment after successful pull + logger.info(f"Retrying deployment after image pull...") + return await self._deploy_local( + unode, + resolved_service, + deployment_id, + container_name + ) + + except docker.errors.ImageNotFound as pull_error: + logger.error(f"Image not found in registry: {resolved_service.image}") + raise ValueError(f"Docker image not found: {resolved_service.image}. Image does not exist in registry.") + except docker.errors.APIError as pull_error: + logger.error(f"Failed to pull image: {pull_error}") + raise ValueError(f"Failed to pull Docker image {resolved_service.image}: {str(pull_error)}") + except Exception as pull_error: + logger.error(f"Error pulling image: {pull_error}") + raise ValueError(f"Failed to pull Docker image {resolved_service.image}: {str(pull_error)}") except docker.errors.APIError as e: logger.error(f"Docker API error: {e}") raise ValueError(f"Docker deployment failed: {str(e)}") diff --git a/ushadow/backend/src/services/deployment_manager.py b/ushadow/backend/src/services/deployment_manager.py index a969a11c..2eb3c28b 100644 --- a/ushadow/backend/src/services/deployment_manager.py +++ b/ushadow/backend/src/services/deployment_manager.py @@ -176,11 +176,15 @@ async def resolve_service_for_deployment( compose_registry = get_compose_registry() + logger.info(f"[DEBUG resolve_service_for_deployment] Called with service_id={service_id}, config_id={config_id}") + # Get service from compose registry service = compose_registry.get_service(service_id) if not service: raise ValueError(f"Service not found: {service_id}") + logger.info(f"[DEBUG resolve_service_for_deployment] Found service: service_id={service.service_id}, service_name={service.service_name}") + # Use new Settings API to resolve environment variables from src.config import get_settings settings = get_settings() @@ -350,12 +354,13 @@ async def resolve_service_for_deployment( if isinstance(networks, list): network = networks[0] if networks else None elif isinstance(networks, dict): - # Dict format: {"infra-network": null} - get first key + # Dict format: {"ushadow-network": null} - get first key network = list(networks.keys())[0] if networks else None else: network = None # Create ResolvedServiceDefinition + logger.info(f"[DEBUG resolve_service_for_deployment] Creating ResolvedServiceDefinition with service_id={service_id}, service_name={service.service_name}") resolved = ResolvedServiceDefinition( service_id=service_id, name=service.service_name, @@ -506,6 +511,8 @@ async def deploy_service( config_id: ServiceConfig ID or Template ID (required) - references config to use namespace: Optional K8s namespace (only used for K8s deployments) """ + logger.info(f"[DEBUG deploy_service] Called with service_id={service_id}, config_id={config_id}") + # Resolve service with all variables substituted try: resolved_service = await self.resolve_service_for_deployment( @@ -513,6 +520,7 @@ async def deploy_service( deploy_target=unode_hostname, config_id=config_id ) + logger.info(f"[DEBUG deploy_service] Resolved service has service_id={resolved_service.service_id}, name={resolved_service.name}") except ValueError as e: logger.error(f"Failed to resolve service {service_id}: {e}") raise ValueError(f"Service resolution failed: {e}") diff --git a/ushadow/backend/src/services/deployment_platforms.py b/ushadow/backend/src/services/deployment_platforms.py index 16cce74d..d231841c 100644 --- a/ushadow/backend/src/services/deployment_platforms.py +++ b/ushadow/backend/src/services/deployment_platforms.py @@ -173,19 +173,43 @@ async def _deploy_local( try: docker_client = docker.from_env() - # Parse ports to Docker format + # ===== PORT CONFIGURATION ===== + # Parse all port-related configuration in one place + logger.info(f"[PORT DEBUG] Starting port parsing for {resolved_service.service_id}") + logger.info(f"[PORT DEBUG] Input ports from resolved_service.ports: {resolved_service.ports}") + port_bindings = {} exposed_ports = {} + exposed_port = None # First host port for deployment tracking + for port_str in resolved_service.ports: + logger.info(f"[PORT DEBUG] Processing port_str: {port_str}") if ":" in port_str: host_port, container_port = port_str.split(":") port_key = f"{container_port}/tcp" port_bindings[port_key] = int(host_port) exposed_ports[port_key] = {} + + # Save first host port for deployment tracking + if exposed_port is None: + exposed_port = int(host_port) + + logger.info(f"[PORT DEBUG] Mapped: host={host_port} -> container={container_port} (key={port_key})") else: port_key = f"{port_str}/tcp" exposed_ports[port_key] = {} + # Save first port for deployment tracking + if exposed_port is None: + exposed_port = int(port_str) + + logger.info(f"[PORT DEBUG] Exposed only: {port_key}") + + logger.info(f"[PORT DEBUG] Final port_bindings: {port_bindings}") + logger.info(f"[PORT DEBUG] Final exposed_ports: {exposed_ports}") + logger.info(f"[PORT DEBUG] Tracking exposed_port: {exposed_port}") + # ===== END PORT CONFIGURATION ===== + # Create container with ushadow labels for stateless tracking from datetime import datetime, timezone labels = { @@ -214,50 +238,42 @@ async def _deploy_local( logger.info(f"Creating container {container_name} from image {resolved_service.image}") - # Add service name as network alias so Docker DNS works - # This allows containers to reach each other by service name (e.g., "mycelia-python-worker") - # We use the low-level API to properly set network aliases - networking_config = docker_client.api.create_networking_config({ - network: docker_client.api.create_endpoint_config( - aliases=[resolved_service.service_id] - ) - }) + # Use high-level API which handles port format better + # High-level API expects ports dict like: {'8000/tcp': 8090} for host port mapping + logger.info(f"[PORT DEBUG] Creating container with high-level API") + logger.info(f"[PORT DEBUG] ports (high-level format): {port_bindings}") - # Build host config for ports and restart policy - host_config = docker_client.api.create_host_config( - port_bindings=port_bindings, - restart_policy={"Name": resolved_service.restart_policy or "unless-stopped"}, - binds=resolved_service.volumes if resolved_service.volumes else None, - ) - - # Create container using low-level API (properly supports networking_config) - container_data = docker_client.api.create_container( + container = docker_client.containers.create( image=resolved_service.image, name=container_name, labels=labels, environment=resolved_service.environment, - host_config=host_config, command=resolved_service.command, - networking_config=networking_config, + ports=port_bindings, # High-level API takes port_bindings directly as 'ports' + volumes={v.split(':')[0]: {'bind': v.split(':')[1], 'mode': v.split(':')[2] if len(v.split(':')) > 2 else 'rw'} + for v in (resolved_service.volumes or [])}, + restart_policy={"Name": resolved_service.restart_policy or "unless-stopped"}, detach=True, ) + logger.info(f"[PORT DEBUG] Container created with ID: {container.id[:12]}") - # Get container object and start it - container = docker_client.containers.get(container_data['Id']) + # Connect to custom network with service name as alias BEFORE starting + # This allows containers to reach each other by service name (e.g., "mycelia-python-worker") + logger.info(f"[PORT DEBUG] Connecting container to network {network} with alias {resolved_service.service_id}") + network_obj = docker_client.networks.get(network) + network_obj.connect(container, aliases=[resolved_service.service_id]) + logger.info(f"[PORT DEBUG] Connected to network {network}") + + # Now start the container + logger.info(f"[PORT DEBUG] Starting container {container_name}...") container.start() + # Reload to get updated port info + container.reload() + logger.info(f"[PORT DEBUG] Container started. Ports mapping: {container.ports}") logger.info(f"Container {container_name} created and started: {container.id[:12]}") - # Extract exposed port - exposed_port = None - if resolved_service.ports: - first_port = resolved_service.ports[0] - if ":" in first_port: - exposed_port = int(first_port.split(":")[0]) - else: - exposed_port = int(first_port) - - # Build deployment object + # Build deployment object (exposed_port was extracted during port parsing above) hostname = target.identifier # Use standardized field (hostname for Docker targets) deployment = Deployment( id=deployment_id, @@ -284,8 +300,34 @@ async def _deploy_local( return deployment except docker.errors.ImageNotFound as e: - logger.error(f"Image not found: {resolved_service.image}") - raise ValueError(f"Docker image not found: {resolved_service.image}") + logger.warning(f"Image not found locally: {resolved_service.image}, attempting to pull...") + + try: + # Attempt to pull the image + logger.info(f"Pulling image: {resolved_service.image}") + docker_client.images.pull(resolved_service.image) + logger.info(f"โœ… Successfully pulled image: {resolved_service.image}") + + # Retry deployment after successful pull + logger.info(f"Retrying deployment after image pull...") + return await self._deploy_local( + target, + resolved_service, + deployment_id, + container_name, + project_name, + config_id + ) + + except docker.errors.ImageNotFound as pull_error: + logger.error(f"Image not found in registry: {resolved_service.image}") + raise ValueError(f"Docker image not found: {resolved_service.image}. Image does not exist in registry.") + except docker.errors.APIError as pull_error: + logger.error(f"Failed to pull image: {pull_error}") + raise ValueError(f"Failed to pull Docker image {resolved_service.image}: {str(pull_error)}") + except Exception as pull_error: + logger.error(f"Error pulling image: {pull_error}") + raise ValueError(f"Failed to pull Docker image {resolved_service.image}: {str(pull_error)}") except docker.errors.APIError as e: logger.error(f"Docker API error: {e}") raise ValueError(f"Docker deployment failed: {str(e)}") diff --git a/ushadow/backend/src/services/docker_manager.py b/ushadow/backend/src/services/docker_manager.py index b3bce417..64d3a2eb 100644 --- a/ushadow/backend/src/services/docker_manager.py +++ b/ushadow/backend/src/services/docker_manager.py @@ -1287,6 +1287,7 @@ async def _start_service_via_compose(self, service_name: str, compose_file: str, # Get docker service name from the discovered service docker_service_name = discovered.service_name if discovered else service_name + logger.info(f"[DEBUG] Deploying service_name={service_name} -> docker_service_name={docker_service_name}, discovered={discovered.service_id if discovered else None}") # Build environment variables from service configuration # All env vars are passed via subprocess_env for compose ${VAR} substitution diff --git a/ushadow/backend/src/services/keycloak_admin.py b/ushadow/backend/src/services/keycloak_admin.py new file mode 100644 index 00000000..e1cb80b7 --- /dev/null +++ b/ushadow/backend/src/services/keycloak_admin.py @@ -0,0 +1,400 @@ +""" +Keycloak Admin API Service + +Manages Keycloak configuration programmatically via Admin REST API. +Primary use case: Dynamic redirect URI registration for multi-environment worktrees. + +Each Ushadow environment (worktree) runs on a different port: +- ushadow: 3010 (PORT_OFFSET=10) +- ushadow-orange: 3020 (PORT_OFFSET=20) +- ushadow-yellow: 3030 (PORT_OFFSET=30) + +This service ensures Keycloak accepts redirects from all active environments. +""" + +import os +import logging +import httpx +from typing import Optional, List + +logger = logging.getLogger(__name__) + + +class KeycloakAdminClient: + """Keycloak Admin API client for managing realm configuration.""" + + def __init__( + self, + keycloak_url: str, + realm: str, + admin_user: str, + admin_password: str, + ): + self.keycloak_url = keycloak_url + self.realm = realm + self.admin_user = admin_user + self.admin_password = admin_password + self._access_token: Optional[str] = None + + async def _get_admin_token(self) -> str: + """ + Get admin access token for Keycloak Admin API. + + Uses master realm admin credentials to authenticate. + Token is cached and reused until it expires. + """ + if self._access_token: + # TODO: Check token expiration and refresh if needed + return self._access_token + + token_url = f"{self.keycloak_url}/realms/master/protocol/openid-connect/token" + + async with httpx.AsyncClient() as client: + try: + response = await client.post( + token_url, + data={ + "grant_type": "password", + "client_id": "admin-cli", + "username": self.admin_user, + "password": self.admin_password, + }, + timeout=10.0, + ) + + if response.status_code != 200: + logger.error(f"[KC-ADMIN] Failed to get admin token: {response.text}") + raise Exception(f"Failed to authenticate as Keycloak admin: {response.status_code}") + + tokens = response.json() + self._access_token = tokens["access_token"] + logger.info("[KC-ADMIN] โœ“ Authenticated as Keycloak admin") + return self._access_token + + except httpx.RequestError as e: + logger.error(f"[KC-ADMIN] Failed to connect to Keycloak: {e}") + raise Exception(f"Failed to connect to Keycloak Admin API: {e}") + + async def get_client_by_client_id(self, client_id: str) -> Optional[dict]: + """ + Get Keycloak client configuration by client_id. + + Args: + client_id: The client_id (e.g., "ushadow-frontend") + + Returns: + Client configuration dict if found, None otherwise + """ + token = await self._get_admin_token() + url = f"{self.keycloak_url}/admin/realms/{self.realm}/clients" + + async with httpx.AsyncClient() as client: + try: + response = await client.get( + url, + headers={"Authorization": f"Bearer {token}"}, + params={"clientId": client_id}, + timeout=10.0, + ) + + if response.status_code != 200: + logger.error(f"[KC-ADMIN] Failed to get client: {response.text}") + return None + + clients = response.json() + if not clients or len(clients) == 0: + logger.warning(f"[KC-ADMIN] Client '{client_id}' not found") + return None + + return clients[0] # Returns first match + + except httpx.RequestError as e: + logger.error(f"[KC-ADMIN] Failed to get client: {e}") + return None + + async def update_client_redirect_uris( + self, + client_id: str, + redirect_uris: List[str], + merge: bool = True + ) -> bool: + """ + Update redirect URIs for a Keycloak client. + + Args: + client_id: The client_id (e.g., "ushadow-frontend") + redirect_uris: List of redirect URIs to set + merge: If True, merge with existing URIs. If False, replace entirely. + + Returns: + True if successful, False otherwise + """ + # Get current client configuration + client = await self.get_client_by_client_id(client_id) + if not client: + logger.error(f"[KC-ADMIN] Cannot update redirect URIs - client '{client_id}' not found") + return False + + client_uuid = client["id"] # Internal UUID, not the client_id + + # Merge or replace redirect URIs + if merge: + existing_uris = set(client.get("redirectUris", [])) + new_uris = existing_uris.union(set(redirect_uris)) + final_uris = list(new_uris) + logger.info(f"[KC-ADMIN] Merging redirect URIs: {len(existing_uris)} existing + {len(redirect_uris)} new = {len(final_uris)} total") + else: + final_uris = redirect_uris + logger.info(f"[KC-ADMIN] Replacing redirect URIs with {len(final_uris)} URIs") + + # Update client configuration + token = await self._get_admin_token() + url = f"{self.keycloak_url}/admin/realms/{self.realm}/clients/{client_uuid}" + + async with httpx.AsyncClient() as client_http: + try: + # Prepare update payload (only redirect URIs) + update_payload = { + "id": client_uuid, + "clientId": client_id, + "redirectUris": final_uris, + } + + response = await client_http.put( + url, + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + json=update_payload, + timeout=10.0, + ) + + if response.status_code != 204: # Keycloak returns 204 No Content on success + logger.error(f"[KC-ADMIN] Failed to update client: {response.status_code} - {response.text}") + return False + + logger.info(f"[KC-ADMIN] โœ“ Updated redirect URIs for client '{client_id}'") + for uri in final_uris: + logger.info(f"[KC-ADMIN] - {uri}") + return True + + except httpx.RequestError as e: + logger.error(f"[KC-ADMIN] Failed to update client: {e}") + return False + + async def register_redirect_uri(self, client_id: str, redirect_uri: str) -> bool: + """ + Register a single redirect URI for a client (merges with existing). + + Args: + client_id: The client_id (e.g., "ushadow-frontend") + redirect_uri: The redirect URI to add (e.g., "http://localhost:3010/auth/callback") + + Returns: + True if successful, False otherwise + """ + return await self.update_client_redirect_uris( + client_id=client_id, + redirect_uris=[redirect_uri], + merge=True + ) + + async def update_post_logout_redirect_uris( + self, + client_id: str, + post_logout_redirect_uris: List[str], + merge: bool = True + ) -> bool: + """ + Update post-logout redirect URIs for a Keycloak client. + + Args: + client_id: The client_id (e.g., "ushadow-frontend") + post_logout_redirect_uris: List of post-logout redirect URIs to set + merge: If True, merge with existing URIs. If False, replace entirely. + + Returns: + True if successful, False otherwise + """ + # Get client UUID + client = await self.get_client_by_client_id(client_id) + if not client: + logger.error(f"[KC-ADMIN] Client '{client_id}' not found") + return False + + client_uuid = client["id"] + + # Merge or replace post-logout redirect URIs + if merge: + existing_uris = set(client.get("attributes", {}).get("post.logout.redirect.uris", "").split("##")) + # Remove empty strings from the set + existing_uris = {uri for uri in existing_uris if uri} + new_uris = existing_uris.union(set(post_logout_redirect_uris)) + final_uris = list(new_uris) + logger.info(f"[KC-ADMIN] Merging post-logout redirect URIs: {len(existing_uris)} existing + {len(post_logout_redirect_uris)} new = {len(final_uris)} total") + else: + final_uris = post_logout_redirect_uris + logger.info(f"[KC-ADMIN] Replacing post-logout redirect URIs with {len(final_uris)} URIs") + + # Update client configuration + token = await self._get_admin_token() + url = f"{self.keycloak_url}/admin/realms/{self.realm}/clients/{client_uuid}" + + async with httpx.AsyncClient() as client_http: + try: + # Prepare update payload + # Post-logout redirect URIs are stored as a ## delimited string in attributes + attributes = client.get("attributes", {}) + attributes["post.logout.redirect.uris"] = "##".join(final_uris) + + update_payload = { + "id": client_uuid, + "clientId": client_id, + "attributes": attributes, + } + + response = await client_http.put( + url, + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + json=update_payload, + timeout=10.0, + ) + + if response.status_code != 204: # Keycloak returns 204 No Content on success + logger.error(f"[KC-ADMIN] Failed to update post-logout redirect URIs: {response.status_code} - {response.text}") + return False + + logger.info(f"[KC-ADMIN] โœ“ Updated post-logout redirect URIs for client '{client_id}'") + for uri in final_uris: + logger.info(f"[KC-ADMIN] - {uri}") + return True + + except httpx.RequestError as e: + logger.error(f"[KC-ADMIN] Failed to update post-logout redirect URIs: {e}") + return False + + +async def register_current_environment_redirect_uri() -> bool: + """ + Register this environment's redirect URIs with Keycloak. + + Registers both local (localhost/127.0.0.1) and Tailscale URIs if available. + Uses PORT_OFFSET to determine the correct frontend port. + Called during backend startup to ensure Keycloak accepts redirects from this environment. + + Example: + - ushadow (PORT_OFFSET=10): Registers http://localhost:3010/auth/callback + - ushadow-orange (PORT_OFFSET=20): Registers http://localhost:3020/auth/callback + - With Tailscale: Also registers https://ushadow.spangled-kettle.ts.net/auth/callback + """ + # Get configuration from environment + keycloak_url = os.getenv("KEYCLOAK_URL", "http://keycloak:8080") + keycloak_realm = os.getenv("KEYCLOAK_REALM", "ushadow") + keycloak_client_id = os.getenv("KEYCLOAK_FRONTEND_CLIENT_ID", "ushadow-frontend") + + # Admin credentials + admin_user = os.getenv("KEYCLOAK_ADMIN_USER", "admin") + admin_password = os.getenv("KEYCLOAK_ADMIN_PASSWORD", "admin") + + # Calculate frontend port from PORT_OFFSET + port_offset = int(os.getenv("PORT_OFFSET", "0")) + frontend_port = 3000 + port_offset + + # Build redirect URIs - start with local URIs + redirect_uris = [ + f"http://localhost:{frontend_port}/oauth/callback", + f"http://127.0.0.1:{frontend_port}/oauth/callback", + ] + + post_logout_redirect_uris = [ + f"http://localhost:{frontend_port}/", + f"http://127.0.0.1:{frontend_port}/", + ] + + # Check if Tailscale is configured and add Tailscale URIs + try: + from src.utils.tailscale_serve import get_tailscale_status + ts_status = get_tailscale_status() + if ts_status.hostname and ts_status.authenticated: + # Add Tailscale URIs (HTTPS through Tailscale serve) + tailscale_redirect_uri = f"https://{ts_status.hostname}/oauth/callback" + tailscale_logout_uri = f"https://{ts_status.hostname}/" + + redirect_uris.append(tailscale_redirect_uri) + post_logout_redirect_uris.append(tailscale_logout_uri) + + logger.info(f"[KC-ADMIN] Detected Tailscale hostname: {ts_status.hostname}") + except Exception as e: + logger.debug(f"[KC-ADMIN] Could not detect Tailscale hostname: {e}") + + logger.info(f"[KC-ADMIN] Registering redirect URIs for environment:") + for uri in redirect_uris: + logger.info(f"[KC-ADMIN] - {uri}") + logger.info(f"[KC-ADMIN] Registering post-logout redirect URIs:") + for uri in post_logout_redirect_uris: + logger.info(f"[KC-ADMIN] - {uri}") + + # Create admin client and register URIs + admin_client = KeycloakAdminClient( + keycloak_url=keycloak_url, + realm=keycloak_realm, + admin_user=admin_user, + admin_password=admin_password, + ) + + # Register login redirect URIs + success = await admin_client.update_client_redirect_uris( + client_id=keycloak_client_id, + redirect_uris=redirect_uris, + merge=True # Merge with existing URIs (don't break other environments) + ) + + if not success: + logger.error(f"[KC-ADMIN] โŒ Failed to register redirect URIs for port {frontend_port}") + return False + + # Register post-logout redirect URIs + success = await admin_client.update_post_logout_redirect_uris( + client_id=keycloak_client_id, + post_logout_redirect_uris=post_logout_redirect_uris, + merge=True # Merge with existing URIs (don't break other environments) + ) + + if success: + logger.info(f"[KC-ADMIN] โœ“ Successfully registered all redirect URIs for port {frontend_port}") + else: + logger.warning(f"[KC-ADMIN] โš ๏ธ Failed to register redirect URIs - Keycloak login may not work on port {frontend_port}") + + return success + + +# Singleton getter for dependency injection +_keycloak_admin_client: Optional[KeycloakAdminClient] = None + + +def get_keycloak_admin() -> KeycloakAdminClient: + """ + Get the Keycloak admin client singleton. + + Configuration is loaded from environment variables. + """ + global _keycloak_admin_client + + if _keycloak_admin_client is None: + keycloak_url = os.getenv("KEYCLOAK_URL", "http://keycloak:8080") + keycloak_realm = os.getenv("KEYCLOAK_REALM", "ushadow") + admin_user = os.getenv("KEYCLOAK_ADMIN_USER", "admin") + admin_password = os.getenv("KEYCLOAK_ADMIN_PASSWORD", "admin") + + _keycloak_admin_client = KeycloakAdminClient( + keycloak_url=keycloak_url, + realm=keycloak_realm, + admin_user=admin_user, + admin_password=admin_password, + ) + + return _keycloak_admin_client diff --git a/ushadow/backend/src/services/keycloak_auth.py b/ushadow/backend/src/services/keycloak_auth.py new file mode 100644 index 00000000..929f6bdd --- /dev/null +++ b/ushadow/backend/src/services/keycloak_auth.py @@ -0,0 +1,157 @@ +""" +Keycloak Token Validation + +Validates Keycloak JWT access tokens for API requests. +This allows federated users (authenticated via Keycloak) to access the API +without needing a local Ushadow account. +""" + +import os +import logging +from typing import Optional, Union +import jwt +from fastapi import HTTPException, status, Depends, Request +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials + +logger = logging.getLogger(__name__) + +# Security scheme for extracting Bearer tokens +security = HTTPBearer(auto_error=False) + + +def validate_keycloak_token(token: str) -> Optional[dict]: + """ + Validate a Keycloak access token. + + Args: + token: JWT access token from Keycloak + + Returns: + Decoded token payload if valid, None if invalid + + Note: + This is a simplified validation for development. + In production, you should: + 1. Fetch Keycloak's public keys from JWKS endpoint + 2. Verify signature using the public key + 3. Validate issuer, audience, and other claims + """ + try: + # For now, decode without verification (development only!) + # TODO: Add proper JWT signature verification using Keycloak's public keys + # Keycloak typically uses RS256 algorithm, so we need to allow it even when not verifying + payload = jwt.decode( + token, + algorithms=["RS256", "HS256"], # Allow common algorithms + options={ + "verify_signature": False, # FIXME: Enable in production! + "verify_exp": True, # Still check expiration + } + ) + + # Log the payload for debugging + logger.info(f"Decoded Keycloak token - issuer: {payload.get('iss')}, user: {payload.get('preferred_username')}") + + # Validate issuer (accept both internal and external URLs) + keycloak_external = os.getenv("KEYCLOAK_EXTERNAL_URL", "http://localhost:8081") + keycloak_internal = os.getenv("KEYCLOAK_URL", "http://keycloak:8080") + keycloak_realm = os.getenv("KEYCLOAK_REALM", "ushadow") + + expected_issuers = [ + f"{keycloak_external}/realms/{keycloak_realm}", + f"{keycloak_internal}/realms/{keycloak_realm}", + ] + + token_issuer = payload.get("iss") + if token_issuer not in expected_issuers: + logger.warning(f"Invalid issuer: {token_issuer} (expected one of {expected_issuers})") + # Don't reject - just log for now during development + # return None + + # Token is valid + logger.info(f"โœ“ Validated Keycloak token for user: {payload.get('preferred_username')}") + return payload + + except jwt.ExpiredSignatureError: + logger.warning("Keycloak token expired") + return None + except jwt.InvalidTokenError as e: + logger.warning(f"Invalid Keycloak token: {e}") + return None + except Exception as e: + logger.error(f"Error validating Keycloak token: {e}", exc_info=True) + return None + + +def get_keycloak_user_from_token(token: str) -> Optional[dict]: + """ + Extract user info from a Keycloak token. + + Args: + token: JWT access token from Keycloak + + Returns: + User info dict with keys: email, name, sub (user ID), etc. + """ + payload = validate_keycloak_token(token) + if not payload: + return None + + return { + "sub": payload.get("sub"), + "email": payload.get("email"), + "name": payload.get("name"), + "preferred_username": payload.get("preferred_username"), + "email_verified": payload.get("email_verified", False), + # Mark as Keycloak user for backend logic + "auth_type": "keycloak", + } + + +async def get_current_user_hybrid( + credentials: Optional[HTTPAuthorizationCredentials] = Depends(security) +) -> Union[dict, None]: + """ + Hybrid authentication dependency that accepts EITHER legacy OR Keycloak tokens. + + This is a FastAPI dependency that can be used in place of the legacy get_current_user. + It tries to validate the token as: + 1. Keycloak access token + 2. Legacy Ushadow JWT (via fastapi-users) + + Args: + credentials: HTTP Authorization credentials (Bearer token) + + Returns: + User info dict if authenticated, raises 401 if not + + Raises: + HTTPException: 401 if no valid authentication found + """ + if not credentials: + logger.warning("[AUTH] No credentials provided") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not authenticated" + ) + + token = credentials.credentials + token_preview = token[:20] + "..." if len(token) > 20 else token + logger.info(f"[AUTH] Validating token: {token_preview}") + + # Try Keycloak token validation first (simpler, no database lookup) + keycloak_user = get_keycloak_user_from_token(token) + if keycloak_user: + logger.info(f"[AUTH] โœ… Keycloak authentication successful: {keycloak_user.get('email')}") + return keycloak_user + + # Try legacy auth validation + # TODO: Add legacy token validation here if needed + # For now, we'll just check if it's a Keycloak token + # The existing fastapi-users middleware will handle legacy tokens + logger.warning(f"[AUTH] โŒ Token validation failed - neither Keycloak nor legacy token") + + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token" + ) diff --git a/ushadow/backend/src/services/keycloak_startup.py b/ushadow/backend/src/services/keycloak_startup.py new file mode 100644 index 00000000..9d5cd563 --- /dev/null +++ b/ushadow/backend/src/services/keycloak_startup.py @@ -0,0 +1,178 @@ +""" +Keycloak Startup Registration + +Automatically registers the current environment's redirect URIs with Keycloak +when the backend starts. This ensures multi-worktree setups work without +manual Keycloak configuration. +""" + +import logging +import os +from typing import List, Optional + +from .keycloak_admin import get_keycloak_admin +from ..config.keycloak_settings import is_keycloak_enabled +from .tailscale_manager import TailscaleManager + +logger = logging.getLogger(__name__) + + +def get_tailscale_hostname() -> Optional[str]: + """ + Get the full Tailscale hostname for the current environment. + + Returns: + Full hostname like "orange.spangled-kettle.ts.net" or None + """ + try: + manager = TailscaleManager() + tailnet_suffix = manager.get_tailnet_suffix() + + if not tailnet_suffix: + return None + + # Get environment name (e.g., "orange", "purple") + env_name = os.getenv("ENV_NAME", "ushadow") + + # Construct full hostname: {env}.{tailnet} + return f"{env_name}.{tailnet_suffix}" + except Exception as e: + logger.debug(f"[KC-STARTUP] Could not get Tailscale hostname: {e}") + return None + + +def get_current_redirect_uris() -> List[str]: + """ + Generate redirect URIs for the current environment. + + Returns URIs based on: + - PORT_OFFSET environment variable (for multi-worktree support) + - FRONTEND_URL environment variable (for custom domains) + - Tailscale hostname detection (for .ts.net domains) + + Returns: + List of redirect URIs to register + """ + redirect_uris = [] + + # Get port offset (default 10 for main environment) + port_offset = int(os.getenv("PORT_OFFSET", "10")) + frontend_port = 3000 + port_offset + + # Localhost redirect + localhost_uri = f"http://localhost:{frontend_port}/oauth/callback" + redirect_uris.append(localhost_uri) + + # Custom frontend URL (e.g., for production domains) + frontend_url = os.getenv("FRONTEND_URL") + if frontend_url: + custom_uri = f"{frontend_url.rstrip('/')}/oauth/callback" + redirect_uris.append(custom_uri) + + # Tailscale hostname (auto-detect using TailscaleManager) + tailscale_hostname = get_tailscale_hostname() + if tailscale_hostname: + # Support both http and https for Tailscale + ts_uri_http = f"http://{tailscale_hostname}/oauth/callback" + ts_uri_https = f"https://{tailscale_hostname}/oauth/callback" + redirect_uris.append(ts_uri_http) + redirect_uris.append(ts_uri_https) + logger.info(f"[KC-STARTUP] ๐Ÿ“ก Adding Tailscale URIs: {tailscale_hostname}") + + return redirect_uris + + +def get_current_post_logout_uris() -> List[str]: + """ + Generate post-logout redirect URIs for the current environment. + + Returns: + List of post-logout redirect URIs to register + """ + post_logout_uris = [] + + # Get port offset + port_offset = int(os.getenv("PORT_OFFSET", "10")) + frontend_port = 3000 + port_offset + + # Localhost + post_logout_uris.append(f"http://localhost:{frontend_port}") + post_logout_uris.append(f"http://localhost:{frontend_port}/") + + # Custom frontend URL + frontend_url = os.getenv("FRONTEND_URL") + if frontend_url: + base_url = frontend_url.rstrip('/') + post_logout_uris.append(base_url) + post_logout_uris.append(base_url + "/") + + # Tailscale hostname (auto-detect using TailscaleManager) + tailscale_hostname = get_tailscale_hostname() + if tailscale_hostname: + post_logout_uris.append(f"http://{tailscale_hostname}") + post_logout_uris.append(f"http://{tailscale_hostname}/") + post_logout_uris.append(f"https://{tailscale_hostname}") + post_logout_uris.append(f"https://{tailscale_hostname}/") + + return post_logout_uris + + +async def register_current_environment(): + """ + Register the current environment's redirect URIs with Keycloak. + + This is called during backend startup to ensure the current worktree's + frontend URLs are whitelisted in Keycloak. + + Skip if: + - Keycloak is not enabled in config + - KEYCLOAK_AUTO_REGISTER=false environment variable is set + """ + # Check if Keycloak is enabled + if not is_keycloak_enabled(): + logger.debug("[KC-STARTUP] Keycloak not enabled, skipping auto-registration") + return + + # Check if auto-registration is disabled + if os.getenv("KEYCLOAK_AUTO_REGISTER", "true").lower() == "false": + logger.info("[KC-STARTUP] Keycloak auto-registration disabled via KEYCLOAK_AUTO_REGISTER=false") + return + + try: + # Get admin client + admin_client = get_keycloak_admin() + + # Get URIs to register + redirect_uris = get_current_redirect_uris() + post_logout_uris = get_current_post_logout_uris() + + logger.info("[KC-STARTUP] ๐Ÿ” Registering redirect URIs with Keycloak...") + logger.info(f"[KC-STARTUP] Environment: PORT_OFFSET={os.getenv('PORT_OFFSET', '10')}") + + # Register redirect URIs + success = await admin_client.update_client_redirect_uris( + client_id="ushadow-frontend", + redirect_uris=redirect_uris, + merge=True # Merge with existing URIs + ) + + if not success: + logger.warning("[KC-STARTUP] โš ๏ธ Failed to register redirect URIs (Keycloak may not be ready yet)") + logger.warning("[KC-STARTUP] You may need to manually configure redirect URIs in Keycloak admin console") + return + + # Register post-logout redirect URIs + logout_success = await admin_client.update_post_logout_redirect_uris( + client_id="ushadow-frontend", + post_logout_redirect_uris=post_logout_uris, + merge=True + ) + + if logout_success: + logger.info("[KC-STARTUP] โœ… Redirect URIs registered successfully") + else: + logger.warning("[KC-STARTUP] โš ๏ธ Failed to register post-logout redirect URIs") + + except Exception as e: + logger.warning(f"[KC-STARTUP] โš ๏ธ Failed to auto-register Keycloak URIs: {e}") + logger.warning("[KC-STARTUP] This is non-critical - you can manually configure URIs in Keycloak admin console") diff --git a/ushadow/backend/src/services/keycloak_user_sync.py b/ushadow/backend/src/services/keycloak_user_sync.py new file mode 100644 index 00000000..46bf388b --- /dev/null +++ b/ushadow/backend/src/services/keycloak_user_sync.py @@ -0,0 +1,120 @@ +""" +Keycloak User Synchronization + +Syncs Keycloak users to MongoDB User collection for Chronicle compatibility. +Chronicle requires MongoDB ObjectIds for user_id, but Keycloak uses UUIDs. + +This module creates/updates MongoDB User records for Keycloak-authenticated users. +""" + +import logging +from typing import Optional +from beanie import PydanticObjectId + +from src.models.user import User + +logger = logging.getLogger(__name__) + + +async def get_or_create_user_from_keycloak( + keycloak_sub: str, + email: str, + name: Optional[str] = None +) -> User: + """ + Get or create a MongoDB User record for a Keycloak user. + + This ensures Keycloak users have a corresponding MongoDB ObjectId that + Chronicle can use. The Keycloak subject ID is stored in keycloak_id field. + + Args: + keycloak_sub: Keycloak user ID (UUID format) + email: User's email address + name: User's full name (optional) + + Returns: + User: MongoDB User document with ObjectId + + Example: + >>> user = await get_or_create_user_from_keycloak( + ... keycloak_sub="f47ac10b-58cc-4372-a567-0e02b2c3d479", + ... email="alice@example.com", + ... name="Alice Smith" + ... ) + >>> str(user.id) # MongoDB ObjectId: "507f1f77bcf86cd799439011" + """ + # Try to find existing user by Keycloak ID + user = await User.find_one(User.keycloak_id == keycloak_sub) + + if user: + logger.info(f"[KC-USER-SYNC] Found existing user: {email} (MongoDB ID: {user.id})") + + # Update display_name if it changed + if name and user.display_name != name: + logger.info(f"[KC-USER-SYNC] Updating display_name: {user.display_name} โ†’ {name}") + user.display_name = name + await user.save() + + return user + + # Try to find by email (might be a legacy user who logged in via Keycloak) + user = await User.find_one(User.email == email) + + if user: + logger.info(f"[KC-USER-SYNC] Found legacy user by email: {email}") + logger.info(f"[KC-USER-SYNC] Linking to Keycloak ID: {keycloak_sub}") + + # Link to Keycloak + user.keycloak_id = keycloak_sub + if name and not user.display_name: + user.display_name = name + await user.save() + + return user + + # Create new user + logger.info(f"[KC-USER-SYNC] Creating new user for Keycloak account: {email}") + + user = User( + email=email, + display_name=name or email, # Fallback to email if no name provided + keycloak_id=keycloak_sub, + is_active=True, + is_verified=True, # Keycloak users are pre-verified + is_superuser=False, # Keycloak users are not admins by default + hashed_password="", # No password - auth is via Keycloak + ) + + await user.create() + + logger.info(f"[KC-USER-SYNC] โœ“ Created user: {email} (MongoDB ID: {user.id})") + + return user + + +async def get_mongodb_user_id_for_keycloak_user( + keycloak_sub: str, + email: str, + name: Optional[str] = None +) -> str: + """ + Get MongoDB ObjectId string for a Keycloak user. + + This is a convenience wrapper around get_or_create_user_from_keycloak + that returns just the ObjectId as a string (for use in JWT tokens). + + Args: + keycloak_sub: Keycloak user ID (UUID) + email: User's email + name: User's full name (optional) + + Returns: + str: MongoDB ObjectId as string (24 hex chars) + """ + user = await get_or_create_user_from_keycloak( + keycloak_sub=keycloak_sub, + email=email, + name=name + ) + + return str(user.id) diff --git a/ushadow/backend/src/services/kubernetes_manager.py b/ushadow/backend/src/services/kubernetes_manager.py index 39cc8f69..efc34e05 100644 --- a/ushadow/backend/src/services/kubernetes_manager.py +++ b/ushadow/backend/src/services/kubernetes_manager.py @@ -307,6 +307,33 @@ async def update_cluster_infra_scan( logger.error(f"Error updating cluster infra scan: {e}") return False + async def update_cluster( + self, + cluster_id: str, + updates: Dict[str, Any] + ) -> Optional[KubernetesCluster]: + """Update cluster configuration fields.""" + try: + # Validate cluster exists + cluster = await self.get_cluster(cluster_id) + if not cluster: + return None + + # Update MongoDB document + result = await self.clusters_collection.update_one( + {"cluster_id": cluster_id}, + {"$set": updates} + ) + + if result.modified_count == 0 and result.matched_count == 0: + return None + + # Return updated cluster + return await self.get_cluster(cluster_id) + except Exception as e: + logger.error(f"Error updating cluster: {e}") + return None + async def remove_cluster(self, cluster_id: str) -> bool: """Remove a cluster and its kubeconfig.""" # Delete encrypted kubeconfig file diff --git a/ushadow/backend/src/services/service_orchestrator.py b/ushadow/backend/src/services/service_orchestrator.py index 8381dbbb..f03c56fa 100644 --- a/ushadow/backend/src/services/service_orchestrator.py +++ b/ushadow/backend/src/services/service_orchestrator.py @@ -833,14 +833,6 @@ def _service_matches_installed(self, service: DiscoveredService, installed_names if compose_base in installed_names: return True - # If ANY service from the same compose file is installed, show all services from that file - # This handles multi-service compose files like mycelia (backend, frontend, worker) - all_services = self.compose_registry.get_services() - same_file_services = [s for s in all_services if s.compose_file == service.compose_file] - for sibling in same_file_services: - if sibling.service_name in installed_names: - return True - return False async def _build_service_summary(self, service: DiscoveredService, installed: bool) -> ServiceSummary: diff --git a/ushadow/backend/src/services/share_service.py b/ushadow/backend/src/services/share_service.py new file mode 100644 index 00000000..bd97d4e5 --- /dev/null +++ b/ushadow/backend/src/services/share_service.py @@ -0,0 +1,512 @@ +"""Share service for conversation and resource sharing. + +Implements business logic for creating, validating, and managing share tokens +with Keycloak Fine-Grained Authorization (FGA) integration. +""" + +import logging +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional +from uuid import uuid4 + +from beanie import PydanticObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +from ..models.share import ( + KeycloakPolicy, + ResourceType, + ShareAccessLog, + SharePermission, + ShareToken, + ShareTokenCreate, + ShareTokenResponse, +) +from ..models.user import User + +logger = logging.getLogger(__name__) + + +class ShareService: + """Service for managing share tokens and access control. + + Coordinates share token creation, validation, and Keycloak FGA integration. + Implements business rules for expiration, view limits, and permission checking. + """ + + def __init__(self, db: AsyncIOMotorDatabase, base_url: str = "http://localhost:3000"): + """Initialize share service. + + Args: + db: MongoDB database instance + base_url: Base URL for generating share links (e.g., "https://ushadow.example.com") + """ + self.db = db + self.base_url = base_url.rstrip("/") + + async def create_share_token( + self, + data: ShareTokenCreate, + created_by: User, + ) -> ShareToken: + """Create a new share token. + + Args: + data: Share token creation parameters + created_by: User creating the share + + Returns: + Created share token + + Raises: + ValueError: If resource doesn't exist or user lacks permission + """ + # TODO: Validate resource exists and user has permission to share it + # This is a business logic decision point - should we verify ownership here? + # Consider: strict ownership check vs. allowing sharing of any accessible resource + await self._validate_resource_exists(data.resource_type, data.resource_id) + await self._validate_user_can_share(created_by, data.resource_type, data.resource_id) + + # Calculate expiration + expires_at = None + if data.expires_in_days: + expires_at = datetime.utcnow() + timedelta(days=data.expires_in_days) + + # Build Keycloak-compatible policies + policies = self._build_keycloak_policies( + resource_type=data.resource_type.value, + resource_id=data.resource_id, + permissions=[p.value for p in data.permissions], + ) + + # Create share token + share_token = ShareToken( + token=str(uuid4()), + resource_type=data.resource_type.value, + resource_id=data.resource_id, + created_by=created_by.id, + policies=policies, + permissions=[p.value for p in data.permissions], + require_auth=data.require_auth, + tailscale_only=data.tailscale_only, + allowed_emails=data.allowed_emails, + expires_at=expires_at, + max_views=data.max_views, + ) + + await share_token.insert() + + # TODO: Register with Keycloak FGA if enabled + # await self._register_with_keycloak(share_token) + + logger.info( + f"Created share token {share_token.token} for {data.resource_type}:{data.resource_id} " + f"by user {created_by.email}" + ) + + return share_token + + async def get_share_token(self, token: str) -> Optional[ShareToken]: + """Get share token by token string. + + Args: + token: Share token UUID + + Returns: + ShareToken if found, None otherwise + """ + return await ShareToken.find_one(ShareToken.token == token) + + async def validate_share_access( + self, + token: str, + user_email: Optional[str] = None, + request_ip: Optional[str] = None, + ) -> tuple[bool, Optional[ShareToken], str]: + """Validate access to a shared resource. + + Args: + token: Share token string + user_email: Email of user trying to access (None for anonymous) + request_ip: IP address of request (for Tailscale validation) + + Returns: + Tuple of (is_valid, share_token, reason) + """ + share_token = await self.get_share_token(token) + if not share_token: + return False, None, "Invalid share token" + + # Check access permissions + can_access, reason = share_token.can_access(user_email) + if not can_access: + return False, share_token, reason + + # TODO: Validate Tailscale network if required + # This is a decision point - how should we verify Tailscale access? + # Options: check IP ranges, validate via Tailscale API, trust reverse proxy headers + if share_token.tailscale_only: + is_tailscale = await self._validate_tailscale_access(request_ip) + if not is_tailscale: + return False, share_token, "Access restricted to Tailscale network" + + return True, share_token, "Access granted" + + async def record_share_access( + self, + share_token: ShareToken, + user_identifier: str, + action: str = "view", + metadata: Optional[Dict[str, Any]] = None, + ): + """Record access to shared resource for audit trail. + + Args: + share_token: Share token being accessed + user_identifier: Email or IP of accessor + action: Action performed (view, edit, etc.) + metadata: Additional context (user agent, IP, etc.) + """ + await share_token.record_access(user_identifier, action, metadata) + logger.info( + f"Recorded {action} access to share {share_token.token} " + f"by {user_identifier} (view {share_token.view_count})" + ) + + async def revoke_share_token(self, token: str, user: User) -> bool: + """Revoke a share token. + + Args: + token: Share token to revoke + user: User attempting to revoke + + Returns: + True if revoked, False if not found or permission denied + + Raises: + ValueError: If user lacks permission to revoke + """ + share_token = await self.get_share_token(token) + if not share_token: + return False + + # Verify user can revoke (must be creator or admin) + if str(share_token.created_by) != str(user.id) and not user.is_superuser: + raise ValueError("Only the creator or admin can revoke share tokens") + + # TODO: Unregister from Keycloak FGA if enabled + # await self._unregister_from_keycloak(share_token) + + await share_token.delete() + logger.info(f"Revoked share token {token} by user {user.email}") + return True + + async def list_shares_for_resource( + self, + resource_type: str, + resource_id: str, + user: User, + ) -> List[ShareToken]: + """List all share tokens for a resource. + + Args: + resource_type: Type of resource + resource_id: ID of resource + user: User requesting list (must have access to resource) + + Returns: + List of share tokens + """ + # TODO: Validate user has access to resource + # await self._validate_user_can_access(user, resource_type, resource_id) + + return await ShareToken.find( + ShareToken.resource_type == resource_type, + ShareToken.resource_id == resource_id, + ).to_list() + + async def get_share_access_logs( + self, + token: str, + user: User, + ) -> List[ShareAccessLog]: + """Get access logs for a share token. + + Args: + token: Share token + user: User requesting logs (must be creator or admin) + + Returns: + List of access log entries + + Raises: + ValueError: If user lacks permission + """ + share_token = await self.get_share_token(token) + if not share_token: + raise ValueError("Share token not found") + + # Verify permission + if str(share_token.created_by) != str(user.id) and not user.is_superuser: + raise ValueError("Only the creator or admin can view access logs") + + return [ShareAccessLog(**log) for log in share_token.access_log] + + def to_response(self, share_token: ShareToken) -> ShareTokenResponse: + """Convert ShareToken to API response model. + + Args: + share_token: Share token document + + Returns: + ShareTokenResponse for API + """ + return ShareTokenResponse( + token=share_token.token, + share_url=f"{self.base_url}/share/{share_token.token}", + resource_type=share_token.resource_type, + resource_id=share_token.resource_id, + permissions=share_token.permissions, + expires_at=share_token.expires_at, + max_views=share_token.max_views, + view_count=share_token.view_count, + require_auth=share_token.require_auth, + tailscale_only=share_token.tailscale_only, + created_at=share_token.created_at, + ) + + # Private helper methods + + def _build_keycloak_policies( + self, + resource_type: str, + resource_id: str, + permissions: List[str], + ) -> List[KeycloakPolicy]: + """Build Keycloak FGA policies from permissions. + + Args: + resource_type: Type of resource + resource_id: ID of resource + permissions: List of permission strings (read, write, etc.) + + Returns: + List of Keycloak-compatible policies + """ + # Resource identifier format: "type:id" (e.g., "conversation:123") + resource = f"{resource_type}:{resource_id}" + + return [ + KeycloakPolicy( + resource=resource, + action=permission, + effect="allow", + ) + for permission in permissions + ] + + async def _validate_resource_exists( + self, + resource_type: ResourceType, + resource_id: str, + ): + """Validate that resource exists and is accessible. + + Args: + resource_type: Type of resource + resource_id: ID of resource + + Raises: + ValueError: If resource doesn't exist + """ + import httpx + import os + + # Configuration: Enable/disable strict validation + ENABLE_VALIDATION = os.getenv("SHARE_VALIDATE_RESOURCES", "false").lower() == "true" + + if not ENABLE_VALIDATION: + # Lazy validation - skip check for faster share creation + logger.debug(f"Skipping validation for {resource_type}:{resource_id} (SHARE_VALIDATE_RESOURCES=false)") + return + + # Strict validation - verify resource exists + logger.debug(f"Validating resource {resource_type}:{resource_id}") + + # TODO: YOUR IMPLEMENTATION (5-10 lines) + # Implement validation logic based on your backend choice: + # + # For Mycelia (resource-based API): + # POST to /api/resource/tech.mycelia.objects with action: "get", id: resource_id + # + # For Chronicle (REST API): + # GET /api/conversations/{resource_id} + # + # Example structure: + # if resource_type == ResourceType.CONVERSATION: + # # Your validation code here + # pass + # elif resource_type == ResourceType.MEMORY: + # # Memory validation + # pass + + # Placeholder: Log that validation needs implementation + logger.warning( + f"Resource validation is enabled but not implemented for {resource_type}. " + f"Add validation logic in share_service.py:_validate_resource_exists()" + ) + + async def _validate_user_can_share( + self, + user: User, + resource_type: ResourceType, + resource_id: str, + ): + """Validate user has permission to share resource. + + Business rule: Users can only share resources they created (ownership-based). + + Args: + user: User attempting to share + resource_type: Type of resource + resource_id: ID of resource + + Raises: + ValueError: If user lacks permission (not the owner) + """ + import httpx + import os + + # Superusers can share anything + if user.is_superuser: + logger.debug(f"Superuser {user.email} granted share permission for {resource_type}:{resource_id}") + return + + # For conversations/objects in Mycelia, verify ownership + if resource_type == ResourceType.CONVERSATION: + mycelia_url = os.getenv("MYCELIA_URL", "http://mycelia-backend:8000") + + try: + # Fetch the object from Mycelia to check userId field + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.post( + f"{mycelia_url}/api/resource/tech.mycelia.objects", + json={ + "action": "get", + "id": resource_id + }, + # TODO: Add authentication header if needed + # headers={"Authorization": f"Bearer {token}"} + ) + + if response.status_code == 404: + raise ValueError(f"Conversation {resource_id} not found") + elif response.status_code != 200: + logger.error(f"Failed to fetch resource for ownership check: {response.status_code}") + raise ValueError("Could not verify resource ownership") + + resource_data = response.json() + + # Check if user owns this resource + # Mycelia stores userId field on objects + resource_owner = resource_data.get("userId") + if not resource_owner: + logger.warning(f"Resource {resource_id} has no userId field, allowing share") + return # Allow if no owner specified + + # Compare owner with current user + # User email is used as the userId in Mycelia + if resource_owner != user.email: + raise ValueError( + f"You can only share conversations you created. " + f"This conversation belongs to {resource_owner}" + ) + + logger.debug(f"User {user.email} verified as owner of {resource_type}:{resource_id}") + + except httpx.RequestError as e: + logger.error(f"Failed to connect to Mycelia for ownership check: {e}") + raise ValueError("Could not verify resource ownership - Mycelia unavailable") + + elif resource_type == ResourceType.MEMORY: + # TODO: Implement memory ownership check if needed + # For now, allow authenticated users to share memories + logger.debug(f"Memory sharing not yet enforcing ownership for {resource_id}") + + else: + # Other resource types - allow for now + logger.debug(f"Resource type {resource_type} ownership check not implemented") + + async def _validate_tailscale_access(self, request_ip: Optional[str]) -> bool: + """Validate request is from Tailscale network. + + Args: + request_ip: IP address of request + + Returns: + True if from Tailscale, False otherwise + """ + import ipaddress + import os + + # Configuration: Enable/disable Tailscale validation + ENABLE_TAILSCALE_CHECK = os.getenv("SHARE_VALIDATE_TAILSCALE", "false").lower() == "true" + + if not ENABLE_TAILSCALE_CHECK: + # Disabled - allow all IPs (useful for testing or when not using Tailscale) + logger.debug(f"Tailscale validation disabled (SHARE_VALIDATE_TAILSCALE=false)") + return True + + if not request_ip: + logger.warning("No request IP provided for Tailscale validation") + return False + + # TODO: YOUR IMPLEMENTATION (5-10 lines) + # Choose your Tailscale validation strategy based on your setup: + # + # Option A - IP Range Check (if ushadow runs directly on Tailscale): + # try: + # ip = ipaddress.ip_address(request_ip) + # tailscale_range = ipaddress.ip_network("100.64.0.0/10") + # is_tailscale = ip in tailscale_range + # logger.debug(f"IP {request_ip} {'is' if is_tailscale else 'is NOT'} in Tailscale range") + # return is_tailscale + # except ValueError: + # logger.warning(f"Invalid IP address: {request_ip}") + # return False + # + # Option B - Trust Tailscale Serve Headers (if using Tailscale Serve): + # # This requires passing the Request object instead of just IP + # # tailscale_user = request.headers.get("X-Tailscale-User") + # # return tailscale_user is not None + # + # For now, log a warning and allow (fail open for testing) + logger.warning( + f"Tailscale validation enabled but not implemented. " + f"Add logic in share_service.py:_validate_tailscale_access(). " + f"IP: {request_ip}" + ) + return True # Fail open until implemented + + async def _register_with_keycloak(self, share_token: ShareToken): + """Register share token with Keycloak FGA. + + Args: + share_token: Share token to register + """ + # TODO: Implement Keycloak FGA registration + # This should: + # 1. Create Keycloak resource for the shared item + # 2. Create Keycloak authorization policies + # 3. Store keycloak_policy_id and keycloak_resource_id on share_token + logger.debug(f"Keycloak FGA registration for token {share_token.token}") + + async def _unregister_from_keycloak(self, share_token: ShareToken): + """Unregister share token from Keycloak FGA. + + Args: + share_token: Share token to unregister + """ + # TODO: Implement Keycloak FGA cleanup + # This should delete the Keycloak resource and policies + if share_token.keycloak_policy_id: + logger.debug(f"Keycloak FGA cleanup for policy {share_token.keycloak_policy_id}") diff --git a/ushadow/backend/src/services/tailscale_manager.py b/ushadow/backend/src/services/tailscale_manager.py index a9e8be40..c7bd3985 100644 --- a/ushadow/backend/src/services/tailscale_manager.py +++ b/ushadow/backend/src/services/tailscale_manager.py @@ -9,9 +9,8 @@ Architecture: - Layer 1 (Tailscale Serve): External HTTPS โ†’ Internal containers - - /api/* โ†’ backend (REST APIs) + - /api/* โ†’ backend (REST APIs, includes /ws/audio/relay for WebSockets) - /auth/* โ†’ backend (authentication) - - /ws_pcm, /ws_omi โ†’ chronicle (WebSockets, direct for low latency) - /* โ†’ frontend (SPA catch-all) - Layer 2 (Generic Proxy): Backend routes REST to services via /api/services/{name}/proxy/* @@ -188,23 +187,58 @@ def start_container(self) -> Dict[str, Any]: except docker.errors.NotFound: # Container doesn't exist - create it - # TODO: Get image, network, ports from settings/config - # For now, use defaults + # Match configuration from compose/tailscale-compose.yml + + # First, ensure networks exist + try: + ushadow_net = self.docker_client.networks.get("ushadow-network") + logger.info("Found ushadow-network") + except docker.errors.NotFound: + logger.error("ushadow-network not found! Container will use default network.") + ushadow_net = None + + try: + infra_net = self.docker_client.networks.get("infra-network") + logger.info("Found infra-network") + except docker.errors.NotFound: + logger.warning("infra-network not found") + infra_net = None + + # Create networking_config for multiple networks + from docker.types import EndpointConfig, NetworkingConfig + + networking_config = NetworkingConfig( + endpoints_config={ + "ushadow-network": EndpointConfig() if ushadow_net else None, + "infra-network": EndpointConfig() if infra_net else None, + } + ) + container = self.docker_client.containers.run( image="tailscale/tailscale:latest", name=container_name, + hostname=container_name, detach=True, - network_mode="host", environment={ "TS_STATE_DIR": "/var/lib/tailscale", - "TS_SOCKET": "/var/run/tailscale/tailscaled.sock", + "TS_USERSPACE": "true", + "TS_ACCEPT_DNS": "true", + "TS_EXTRA_ARGS": "--advertise-tags=tag:container", }, volumes={ volume_name: {"bind": "/var/lib/tailscale", "mode": "rw"} }, cap_add=["NET_ADMIN", "NET_RAW"], + networking_config=networking_config, + command=[ + "sh", "-c", + f"tailscaled --tun=userspace-networking --statedir=/var/lib/tailscale & " + f"sleep 2 && tailscale up --hostname={self.env_name} && sleep infinity" + ], ) + logger.info(f"Created {container_name} on ushadow-network and infra-network") + return { "status": "created", "message": "Tailscale container created and started" @@ -634,7 +668,6 @@ def logout(self) -> bool: def configure_base_routes(self, backend_container: Optional[str] = None, frontend_container: Optional[str] = None, - chronicle_container: Optional[str] = None, backend_port: int = 8000, frontend_port: Optional[int] = None) -> bool: """Configure base infrastructure routes (Layer 1). @@ -642,14 +675,14 @@ def configure_base_routes(self, Sets up: - /api/* โ†’ backend (REST APIs through generic proxy) - /auth/* โ†’ backend (authentication) - - /ws_pcm โ†’ chronicle (WebSocket, direct for low latency) - - /ws_omi โ†’ chronicle (WebSocket, direct for low latency) - /* โ†’ frontend (SPA catch-all) + Note: Chronicle and other deployed services are accessed via their own ports, + not through Tailscale routing. + Args: backend_container: Backend container name (default: {env}-backend) frontend_container: Frontend container name (default: {env}-webui) - chronicle_container: Chronicle container name (default: {env}-chronicle-backend) backend_port: Backend internal port (default: 8000) frontend_port: Frontend internal port (auto-detect if None) @@ -661,8 +694,6 @@ def configure_base_routes(self, backend_container = f"{self.env_name}-backend" if not frontend_container: frontend_container = f"{self.env_name}-webui" - if not chronicle_container: - chronicle_container = f"{self.env_name}-chronicle-backend" # Auto-detect frontend port based on dev/prod mode if frontend_port is None: @@ -671,7 +702,6 @@ def configure_base_routes(self, backend_base = f"http://{backend_container}:{backend_port}" frontend_target = f"http://{frontend_container}:{frontend_port}" - chronicle_base = f"http://{chronicle_container}:{backend_port}" success = True @@ -683,12 +713,8 @@ def configure_base_routes(self, if not self.add_serve_route(route, target): success = False - # WebSocket routes - direct to Chronicle for low latency (legacy/mobile) - ws_routes = ["/ws_pcm", "/ws_omi"] - for route in ws_routes: - target = f"{chronicle_base}{route}" - if not self.add_serve_route(route, target): - success = False + # Chronicle WebSocket routes removed - Chronicle is now a deployed service + # accessed via its own port (e.g., http://localhost:8090) # Frontend catches everything else if not self.add_serve_route("/", frontend_target): diff --git a/ushadow/backend/src/services/template_service.py b/ushadow/backend/src/services/template_service.py index 97fefa70..c29c8e2d 100644 --- a/ushadow/backend/src/services/template_service.py +++ b/ushadow/backend/src/services/template_service.py @@ -83,7 +83,7 @@ async def list_templates(source: Optional[str] = None) -> List[Template]: is_installed = True # Debug logging - logger.info(f"Service: {service.service_name}, installed: {is_installed}, installed_names: {installed_names}") + logger.debug(f"Service: {service.service_name}, installed: {is_installed}, installed_names: {installed_names}") templates.append(Template( id=service.service_id, diff --git a/ushadow/backend/src/services/token_bridge.py b/ushadow/backend/src/services/token_bridge.py new file mode 100644 index 00000000..5fb6509d --- /dev/null +++ b/ushadow/backend/src/services/token_bridge.py @@ -0,0 +1,126 @@ +""" +Token Bridge Utility + +Automatically converts Keycloak OIDC tokens to service-compatible JWT tokens. +This allows proxy and audio relay to transparently bridge authentication. + +Usage: + token = extract_token_from_request(request) + service_token = await bridge_to_service_token(token, audiences=["chronicle"]) +""" + +import logging +from typing import Optional +from fastapi import Request +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer + +from .keycloak_auth import get_keycloak_user_from_token +from .keycloak_user_sync import get_mongodb_user_id_for_keycloak_user +from .auth import generate_jwt_for_service + +logger = logging.getLogger(__name__) +security = HTTPBearer(auto_error=False) + + +def extract_token_from_request(request: Request) -> Optional[str]: + """ + Extract Bearer token from Authorization header or query parameter. + + Args: + request: FastAPI request object + + Returns: + Token string if found, None otherwise + """ + # Try Authorization header first + auth_header = request.headers.get("authorization", "") + if auth_header.startswith("Bearer "): + return auth_header[7:] # Remove "Bearer " prefix + + # Try query parameter (for WebSocket connections) + token = request.query_params.get("token") + if token: + return token + + return None + + +async def bridge_to_service_token( + token: str, + audiences: Optional[list[str]] = None +) -> Optional[str]: + """ + Convert a Keycloak token to a service-compatible JWT token. + + If the token is already a service token (not a Keycloak token), + returns it unchanged. Otherwise, validates the Keycloak token + and generates a new service token. + + Args: + token: Token to bridge (Keycloak or service token) + audiences: Audiences for the service token (defaults to ["ushadow", "chronicle"]) + + Returns: + Service token if bridging succeeded, None if token is invalid + """ + if not token: + return None + + # Try to validate as Keycloak token + keycloak_user = get_keycloak_user_from_token(token) + + if not keycloak_user: + # Not a valid Keycloak token + # Could be a service token already, or invalid + # Let it through and let the downstream service validate + logger.debug("[TOKEN-BRIDGE] Token is not a Keycloak token, passing through") + return token + + # It's a Keycloak token - bridge it + user_email = keycloak_user.get("email") + keycloak_sub = keycloak_user.get("sub") + user_name = keycloak_user.get("name") + + if not user_email or not keycloak_sub: + logger.error(f"[TOKEN-BRIDGE] Missing user info: email={user_email}, keycloak_sub={keycloak_sub}") + return None + + # Sync Keycloak user to MongoDB (creates User record if needed) + # This gives us a MongoDB ObjectId that Chronicle can use + try: + mongodb_user_id = await get_mongodb_user_id_for_keycloak_user( + keycloak_sub=keycloak_sub, + email=user_email, + name=user_name + ) + logger.debug(f"[TOKEN-BRIDGE] Keycloak {keycloak_sub} โ†’ MongoDB {mongodb_user_id}") + except Exception as e: + logger.error(f"[TOKEN-BRIDGE] Failed to sync Keycloak user to MongoDB: {e}", exc_info=True) + return None + + # Generate service token with MongoDB ObjectId + audiences = audiences or ["ushadow", "chronicle"] + service_token = generate_jwt_for_service( + user_id=mongodb_user_id, # Use MongoDB ObjectId, not Keycloak UUID + user_email=user_email, + audiences=audiences + ) + + logger.info(f"[TOKEN-BRIDGE] โœ“ Bridged Keycloak token for {user_email} โ†’ service token (MongoDB ID: {mongodb_user_id})") + logger.debug(f"[TOKEN-BRIDGE] Audiences: {audiences}, token: {service_token[:30]}...") + + return service_token + + +def is_keycloak_token(token: str) -> bool: + """ + Check if a token is a Keycloak token (vs service token). + + Args: + token: JWT token to check + + Returns: + True if token is from Keycloak, False otherwise + """ + keycloak_user = get_keycloak_user_from_token(token) + return keycloak_user is not None diff --git a/ushadow/backend/src/utils/auth_helpers.py b/ushadow/backend/src/utils/auth_helpers.py new file mode 100644 index 00000000..8e07d549 --- /dev/null +++ b/ushadow/backend/src/utils/auth_helpers.py @@ -0,0 +1,50 @@ +""" +Authentication helper utilities for handling both Keycloak and legacy user formats. +""" + +from typing import Union, Optional + + +def get_user_id(user: Union[dict, object]) -> str: + """ + Safely extract user ID from either Keycloak dict or legacy User object. + + Args: + user: Either Keycloak user dict (with 'sub' field) or legacy User object (with 'id' attribute) + + Returns: + User ID as string + """ + if isinstance(user, dict): + return user.get("sub", "") + return str(getattr(user, "id", "")) + + +def get_user_email(user: Union[dict, object]) -> str: + """ + Safely extract user email from either Keycloak dict or legacy User object. + + Args: + user: Either Keycloak user dict (with 'email' field) or legacy User object (with 'email' attribute) + + Returns: + User email as string + """ + if isinstance(user, dict): + return user.get("email", "") + return getattr(user, "email", "") + + +def get_user_name(user: Union[dict, object]) -> Optional[str]: + """ + Safely extract user name from either Keycloak dict or legacy User object. + + Args: + user: Either Keycloak user dict (with 'name' field) or legacy User object (with 'display_name' attribute) + + Returns: + User name as string or None + """ + if isinstance(user, dict): + return user.get("name") or user.get("preferred_username") + return getattr(user, "display_name", None) or getattr(user, "email", None) diff --git a/ushadow/backend/src/utils/service_urls.py b/ushadow/backend/src/utils/service_urls.py index 13925e7e..645c18ef 100644 --- a/ushadow/backend/src/utils/service_urls.py +++ b/ushadow/backend/src/utils/service_urls.py @@ -20,11 +20,13 @@ def get_internal_proxy_url(service_name: str) -> str: service_name: Service name (e.g., "mem0", "chronicle-backend") Returns: - Internal proxy URL (e.g., "http://ushadow-orange-backend:8360/api/services/mem0/proxy") + Internal proxy URL (e.g., "http://ushadow-orange-backend:8000/api/services/mem0/proxy") """ - backend_port = os.getenv("BACKEND_PORT", "8001") + # Backend always listens on port 8000 internally (container port) + # BACKEND_PORT is the external/host port which varies by environment + backend_internal_port = "8000" project_name = os.getenv("COMPOSE_PROJECT_NAME", "ushadow") - return f"http://{project_name}-backend:{backend_port}/api/services/{service_name}/proxy" + return f"http://{project_name}-backend:{backend_internal_port}/api/services/{service_name}/proxy" def get_relative_proxy_url(service_name: str) -> str: diff --git a/ushadow/backend/src/utils/tailscale_serve.py b/ushadow/backend/src/utils/tailscale_serve.py index e8f16529..457f978f 100644 --- a/ushadow/backend/src/utils/tailscale_serve.py +++ b/ushadow/backend/src/utils/tailscale_serve.py @@ -313,10 +313,11 @@ def configure_base_routes( Sets up: - /api/* -> backend/api (path preserved) - /auth/* -> backend/auth (path preserved) - - /ws_pcm -> chronicle-backend/ws_pcm (websocket - direct to Chronicle) - - /ws_omi -> chronicle-backend/ws_omi (websocket - direct to Chronicle) - /* -> frontend + Note: Audio WebSockets use /ws/audio/relay (part of /api/* routing) + The relay handles forwarding to Chronicle/Mycelia internally + Note: Tailscale serve strips the path prefix, so we include it in the target URL to preserve the full path at the service. @@ -361,22 +362,12 @@ def configure_base_routes( if not add_serve_route(route, target): success = False - # Configure Chronicle WebSocket routes - these go directly to Chronicle for low latency - # (REST APIs use /api/services/chronicle-backend/proxy/* through ushadow backend) - chronicle_container = f"{env_name}-chronicle-backend" - chronicle_port = 8000 # Chronicle's internal port - chronicle_base = f"http://{chronicle_container}:{chronicle_port}" - - websocket_routes = ["/ws_pcm", "/ws_omi"] - for route in websocket_routes: - target = f"{chronicle_base}{route}" - if not add_serve_route(route, target): - success = False + # NOTE: Audio WebSockets are handled by the audio relay at /ws/audio/relay + # The relay forwards to Chronicle/Mycelia/other services via internal Docker networking + # No direct Chronicle WebSocket routing needed at Layer 1 - # NOTE: Chronicle REST APIs are now accessed via generic proxy pattern: - # /api/services/chronicle-backend/proxy/* instead of direct /chronicle routing - # This provides unified auth and centralized routing through ushadow backend - # WebSockets go directly to Chronicle for low latency + # NOTE: Chronicle REST APIs are accessed via generic proxy pattern: + # /api/services/chronicle-backend/proxy/* - unified auth through ushadow backend # Frontend catches everything else if not add_serve_route("/", frontend_target): diff --git a/ushadow/backend/tests/test_yaml_parser.py b/ushadow/backend/tests/test_yaml_parser.py index a45924ab..d606e9b6 100644 --- a/ushadow/backend/tests/test_yaml_parser.py +++ b/ushadow/backend/tests/test_yaml_parser.py @@ -219,7 +219,7 @@ def test_parse_full_compose(self): - mem0 networks: - infra-network: + ushadow-network: external: true volumes: @@ -255,7 +255,7 @@ def test_parse_full_compose(self): assert mem0_ui.depends_on == ["mem0"] # Check networks and volumes - assert "infra-network" in result.networks + assert "ushadow-network" in result.networks assert "mem0_data" in result.volumes finally: diff --git a/ushadow/backend/uv.lock b/ushadow/backend/uv.lock index 93060783..96e6b47e 100644 --- a/ushadow/backend/uv.lock +++ b/ushadow/backend/uv.lock @@ -1445,6 +1445,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] +[[package]] +name = "neo4j" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/01/d6ce65e4647f6cb2b9cca3b813978f7329b54b4e36660aaec1ddf0ccce7a/neo4j-6.1.0.tar.gz", hash = "sha256:b5dde8c0d8481e7b6ae3733569d990dd3e5befdc5d452f531ad1884ed3500b84", size = 239629, upload-time = "2026-01-12T11:27:34.777Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/5c/ee71e2dd955045425ef44283f40ba1da67673cf06404916ca2950ac0cd39/neo4j-6.1.0-py3-none-any.whl", hash = "sha256:3bd93941f3a3559af197031157220af9fd71f4f93a311db687bd69ffa417b67d", size = 325326, upload-time = "2026-01-12T11:27:33.196Z" }, +] + [[package]] name = "oauthlib" version = "3.3.1" @@ -1974,6 +1986,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" }, ] +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + [[package]] name = "pywin32" version = "311" @@ -2582,6 +2603,7 @@ dependencies = [ { name = "litellm" }, { name = "mcp" }, { name = "motor" }, + { name = "neo4j" }, { name = "omegaconf" }, { name = "passlib", extra = ["bcrypt"] }, { name = "prompt-toolkit" }, @@ -2634,6 +2656,7 @@ requires-dist = [ { name = "litellm", specifier = ">=1.50.0" }, { name = "mcp", specifier = ">=1.1.0" }, { name = "motor", specifier = ">=3.6.0" }, + { name = "neo4j", specifier = ">=5.26.0" }, { name = "omegaconf", specifier = ">=2.3.0" }, { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, { name = "prompt-toolkit", specifier = ">=3.0.48" }, diff --git a/ushadow/frontend/keycloak-theme/login/resources/css/login.css b/ushadow/frontend/keycloak-theme/login/resources/css/login.css new file mode 100644 index 00000000..c69e8244 --- /dev/null +++ b/ushadow/frontend/keycloak-theme/login/resources/css/login.css @@ -0,0 +1,538 @@ +/** + * Ushadow Keycloak Login Theme + * Matches the frontend login design exactly + */ + +/* ============================================ + GLOBAL STYLES & PAGE BACKGROUND + ============================================ */ + +body, +html { + margin: 0 !important; + padding: 0 !important; + width: 100% !important; + height: 100% !important; + overflow-x: hidden !important; +} + +body, +html, +.login-pf-page { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif !important; + background-color: #18181b !important; /* Dark purple-black like reference */ + color: #ffffff !important; +} + +/* Make the page wrapper full height */ +.login-pf-page { + min-height: 100vh !important; + display: flex !important; + flex-direction: column !important; + align-items: center !important; + justify-content: center !important; + position: relative !important; +} + +.login-pf, +.login-pf-page .login-pf { + width: 100% !important; + max-width: none !important; + display: flex !important; + flex-direction: column !important; + align-items: center !important; + justify-content: center !important; + padding: 2rem 1rem !important; +} + +/* Purple glow top-right */ +body::before { + content: '' !important; + position: fixed !important; + top: -200px !important; + right: -200px !important; + width: 600px !important; + height: 600px !important; + background: radial-gradient(circle, rgba(168, 85, 247, 0.15) 0%, transparent 70%) !important; + pointer-events: none !important; + z-index: 0 !important; +} + +/* Green glow bottom-left */ +body::after { + content: '' !important; + position: fixed !important; + bottom: -200px !important; + left: -200px !important; + width: 600px !important; + height: 600px !important; + background: radial-gradient(circle, rgba(74, 222, 128, 0.12) 0%, transparent 70%) !important; + pointer-events: none !important; + z-index: 0 !important; +} + +/* ============================================ + LOGO & HEADER + ============================================ */ + +#kc-header-wrapper { + width: 100% !important; + text-align: center !important; + margin: 0 auto 2rem auto !important; + position: relative !important; + z-index: 10 !important; + display: flex !important; + flex-direction: column !important; + align-items: center !important; +} + +/* Logo image - large 3D U */ +#kc-header-wrapper::before { + content: ''; + display: block; + width: 180px !important; + height: 180px !important; + margin: 0 auto 1rem; + background: url('../img/logo.png') center no-repeat; + background-size: contain; + filter: drop-shadow(0 8px 24px rgba(74, 222, 128, 0.2)) drop-shadow(0 8px 24px rgba(168, 85, 247, 0.2)); +} + +/* Ushadow brand text - GRADIENT green to purple */ +#kc-header, +#kc-header-wrapper h1 { + font-size: 2.75rem !important; + font-weight: 600 !important; + background: linear-gradient(90deg, #4ade80 0%, #a855f7 100%) !important; + -webkit-background-clip: text !important; + -webkit-text-fill-color: transparent !important; + background-clip: text !important; + margin: 0 auto 0.5rem auto !important; + letter-spacing: -0.03em !important; + display: inline-block !important; + text-align: center !important; + width: auto !important; +} + +/* "AI Orchestration Platform" subtitle */ +#kc-header::after { + content: 'AI Orchestration Platform'; + display: block; + font-size: 1rem; + font-weight: 400; + color: #a1a1aa; + margin-top: 0.5rem; + margin-bottom: 0.75rem; + background: none !important; + -webkit-text-fill-color: #a1a1aa !important; + letter-spacing: normal !important; +} + +/* ============================================ + LOGIN CARD + ============================================ */ + +#kc-content-wrapper, +#kc-content { + position: relative !important; + z-index: 10 !important; + width: 100% !important; + display: flex !important; + flex-direction: column !important; + align-items: center !important; +} + +#kc-form, +.login-pf form { + width: 100% !important; + max-width: 420px !important; +} + +.card-pf { + background-color: rgba(26, 26, 31, 0.8) !important; /* Semi-transparent dark */ + backdrop-filter: blur(10px) !important; + border: 1px solid rgba(63, 63, 70, 0.5) !important; + border-radius: 16px !important; + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.4) !important; + padding: 2.5rem !important; + width: 100% !important; + max-width: 420px !important; + margin: 0 auto !important; +} + +/* ============================================ + PAGE TITLE - "Sign in to your account" + ============================================ */ + +#kc-page-title, +.instruction { + font-size: 0.9375rem !important; + font-weight: 400 !important; + color: #a1a1aa !important; + margin-bottom: 1.75rem !important; + text-align: center !important; +} + +/* ============================================ + FORM ELEMENTS + ============================================ */ + +.form-group, +.pf-c-form__group { + margin-bottom: 1.25rem !important; + display: block !important; /* Override grid layout */ + grid-template-columns: none !important; /* Remove two-column layout */ +} + +/* Force single-column layout for registration form */ +.pf-c-form__group-label, +.pf-c-form__group-control { + grid-column: auto !important; + max-width: 100% !important; +} + +label, +.pf-c-form__label { + display: block !important; + font-size: 0.875rem !important; + font-weight: 400 !important; + color: #d4d4d8 !important; /* Lighter gray for labels */ + margin-bottom: 0.5rem !important; + width: 100% !important; +} + +/* Label text wrapper - keep inline with asterisk */ +.pf-c-form__label-text { + display: inline !important; +} + +/* Required field indicator - inline with label */ +.pf-c-form__label-required { + display: inline !important; + color: #f87171 !important; /* Red asterisk */ + margin-left: 0.25rem !important; +} + +/* "Required fields" text */ +.subtitle, +#kc-content-wrapper > p { + font-size: 0.75rem !important; + color: #71717a !important; + margin-bottom: 1rem !important; +} + +/* Input fields - ROUNDED like reference */ +input[type="text"], +input[type="email"], +input[type="password"], +input.pf-c-form-control { + width: 100% !important; + padding: 0.75rem 1rem !important; + font-size: 0.9375rem !important; + border: 1px solid rgba(63, 63, 70, 0.6) !important; /* Subtle border */ + border-radius: 10px !important; /* Nicely rounded like reference */ + background-color: rgba(24, 24, 27, 0.8) !important; /* Darker, more opaque */ + background-image: none !important; /* Remove any gradient overlays */ + color: #ffffff !important; + transition: all 0.2s ease-in-out !important; + box-sizing: border-box !important; +} + +/* Aggressive override for password field specifically */ +input[type="password"] { + background-color: rgba(24, 24, 27, 0.8) !important; + background-image: none !important; + background: rgba(24, 24, 27, 0.8) !important; +} + +/* Remove white outline/border from password field wrapper */ +.pf-c-input-group, +.pf-c-form-control__utilities, +div[class*="input-group"] { + background-color: transparent !important; + border: none !important; + box-shadow: none !important; +} + +/* Password field parent containers */ +.pf-c-input-group::before, +.pf-c-input-group::after { + display: none !important; +} + +/* Make sure password input doesn't have extra borders from wrapper */ +.pf-c-input-group input[type="password"] { + border: 1px solid rgba(63, 63, 70, 0.6) !important; + box-shadow: none !important; +} + +input[type="text"]:focus, +input[type="email"]:focus, +input[type="password"]:focus, +input.pf-c-form-control:focus { + outline: none !important; + border-color: rgba(74, 222, 128, 0.5) !important; + background-color: rgba(24, 24, 27, 0.8) !important; + box-shadow: 0 0 0 1px rgba(74, 222, 128, 0.2) !important; +} + +input::placeholder { + color: #71717a !important; +} + +/* Password visibility toggle - no white background */ +.pf-c-button.pf-m-control, +button[type="button"].pf-c-button { + background-color: transparent !important; + border: none !important; + color: #a1a1aa !important; + padding: 0.5rem !important; +} + +.pf-c-button.pf-m-control:hover { + background-color: transparent !important; + color: #d4d4d8 !important; +} + +/* Remove any default input borders/underlines */ +input:-webkit-autofill, +input:-webkit-autofill:hover, +input:-webkit-autofill:focus { + -webkit-box-shadow: 0 0 0 1000px rgba(24, 24, 27, 0.6) inset !important; + -webkit-text-fill-color: #ffffff !important; + border-radius: 10px !important; +} + +/* ============================================ + CHECKBOX & LINKS + ============================================ */ + +#kc-form-options { + display: flex !important; + align-items: center !important; + justify-content: space-between !important; + margin: 1rem 0 1.5rem 0 !important; +} + +.checkbox, +.pf-c-check { + display: flex !important; + align-items: center !important; +} + +input[type="checkbox"] { + width: auto !important; + height: 1rem !important; + margin-right: 0.5rem !important; + accent-color: #4ade80 !important; + cursor: pointer !important; + border-radius: 4px !important; +} + +.checkbox label, +.pf-c-check__label { + margin-bottom: 0 !important; + font-size: 0.875rem !important; + color: #d4d4d8 !important; + cursor: pointer !important; +} + +/* Links - blue like reference */ +a { + color: #60a5fa !important; + text-decoration: none !important; + font-size: 0.875rem !important; + transition: color 0.2s ease !important; +} + +a:hover { + color: #93c5fd !important; + text-decoration: underline !important; +} + +/* ============================================ + BUTTONS + ============================================ */ + +/* Primary button - GREEN like reference */ +.btn-primary, +button[type="submit"], +input[type="submit"], +.pf-c-button.pf-m-primary { + width: 100% !important; + padding: 0.75rem 1.5rem !important; + font-size: 1rem !important; + font-weight: 500 !important; + color: #09090b !important; /* Very dark text on green */ + background: linear-gradient(135deg, #4ade80 0%, #22c55e 100%) !important; + background-image: linear-gradient(135deg, #4ade80 0%, #22c55e 100%) !important; + border: none !important; + border-radius: 10px !important; /* Match input rounding */ + cursor: pointer !important; + transition: all 0.2s ease-in-out !important; + box-shadow: 0 0 24px rgba(74, 222, 128, 0.25) !important; + text-transform: none !important; +} + +.btn-primary:hover, +button[type="submit"]:hover, +.pf-c-button.pf-m-primary:hover { + background: linear-gradient(135deg, #86efac 0%, #4ade80 100%) !important; + background-image: linear-gradient(135deg, #86efac 0%, #4ade80 100%) !important; + box-shadow: 0 0 32px rgba(74, 222, 128, 0.35) !important; + transform: translateY(-1px) !important; +} + +.btn-primary:active, +button[type="submit"]:active { + transform: translateY(0) !important; +} + +/* ============================================ + REGISTRATION LINK + ============================================ */ + +#kc-registration { + text-align: center !important; + margin-top: 1.5rem !important; + padding-top: 1.5rem !important; + border-top: 1px solid rgba(63, 63, 70, 0.4) !important; +} + +#kc-registration span { + color: #71717a !important; + font-size: 0.875rem !important; +} + +#kc-registration a { + color: #4ade80 !important; + font-weight: 500 !important; + margin-left: 0.25rem !important; +} + +#kc-registration a:hover { + color: #86efac !important; +} + +/* ============================================ + ALERTS & MESSAGES + ============================================ */ + +.alert { + padding: 0.875rem 1rem !important; + border-radius: 10px !important; + margin-bottom: 1.25rem !important; + font-size: 0.875rem !important; + border: 1px solid transparent !important; +} + +.alert-error, +.pf-c-alert.pf-m-danger { + background-color: rgba(239, 68, 68, 0.1) !important; + border-color: rgba(239, 68, 68, 0.3) !important; + color: #fca5a5 !important; +} + +.alert-success, +.pf-c-alert.pf-m-success { + background-color: rgba(74, 222, 128, 0.1) !important; + border-color: rgba(74, 222, 128, 0.3) !important; + color: #86efac !important; +} + +.alert-warning, +.pf-c-alert.pf-m-warning { + background-color: rgba(251, 191, 36, 0.1) !important; + border-color: rgba(251, 191, 36, 0.3) !important; + color: #fcd34d !important; +} + +.alert-info, +.pf-c-alert.pf-m-info { + background-color: rgba(96, 165, 250, 0.1) !important; + border-color: rgba(96, 165, 250, 0.3) !important; + color: #93c5fd !important; +} + +/* ============================================ + SOCIAL LOGIN (if enabled) + ============================================ */ + +.kc-social-links { + margin-top: 1.5rem !important; + border-top: 1px solid rgba(63, 63, 70, 0.4) !important; + padding-top: 1.5rem !important; +} + +.kc-social-link { + display: flex !important; + align-items: center !important; + justify-content: center !important; + padding: 0.75rem 1rem !important; + margin-bottom: 0.75rem !important; + background-color: rgba(24, 24, 27, 0.6) !important; + border: 1px solid rgba(63, 63, 70, 0.5) !important; + border-radius: 10px !important; + color: #d4d4d8 !important; + text-decoration: none !important; + transition: all 0.2s ease-in-out !important; + font-size: 0.9375rem !important; +} + +.kc-social-link:hover { + background-color: rgba(39, 39, 42, 0.8) !important; + border-color: rgba(82, 82, 91, 0.6) !important; + transform: translateY(-1px) !important; +} + +/* ============================================ + FOOTER TEXT + ============================================ */ + +#kc-info, +#kc-info-wrapper { + text-align: center !important; + color: #71717a !important; + font-size: 0.75rem !important; + margin-top: 1rem !important; +} + +/* ============================================ + RESPONSIVE + ============================================ */ + +@media (max-width: 768px) { + #kc-header-wrapper::before { + width: 140px !important; + height: 140px !important; + } + + #kc-header { + font-size: 2.25rem !important; + } + + .card-pf { + padding: 1.75rem !important; + margin: 1rem !important; + } +} + +/* ============================================ + UTILITY OVERRIDES + ============================================ */ + +/* Remove any PatternFly default styles that conflict */ +.pf-c-form-control { + background-image: none !important; +} + +.pf-c-button { + text-transform: none !important; + letter-spacing: normal !important; +} + +/* Ensure z-index stacking works */ +#kc-container { + position: relative; + z-index: 1; +} diff --git a/ushadow/frontend/keycloak-theme/login/resources/img/logo.png b/ushadow/frontend/keycloak-theme/login/resources/img/logo.png new file mode 100644 index 00000000..642149a1 Binary files /dev/null and b/ushadow/frontend/keycloak-theme/login/resources/img/logo.png differ diff --git a/ushadow/frontend/keycloak-theme/login/theme.properties b/ushadow/frontend/keycloak-theme/login/theme.properties new file mode 100644 index 00000000..5a9284d9 --- /dev/null +++ b/ushadow/frontend/keycloak-theme/login/theme.properties @@ -0,0 +1,12 @@ +# Login Theme Configuration +parent=keycloak + +# Import base styles +import=common/keycloak + +# Custom styles +styles=css/login.css + +# Custom logo +# Place your logo in: login/resources/img/logo.png +# Recommended size: 200x60px (transparent background) diff --git a/ushadow/frontend/package-lock.json b/ushadow/frontend/package-lock.json index f936a1ea..54c12865 100644 --- a/ushadow/frontend/package-lock.json +++ b/ushadow/frontend/package-lock.json @@ -19,17 +19,21 @@ "axios": "^1.7.7", "d3": "^7.9.0", "frappe-gantt": "^1.0.4", + "jwt-decode": "^4.0.0", "lucide-react": "^0.446.0", "react": "^18.3.1", "react-dom": "^18.3.1", "react-hook-form": "^7.69.0", + "react-markdown": "^10.1.0", "react-router-dom": "^6.26.2", + "remark-gfm": "^4.0.1", "vibe-kanban-web-companion": "^0.0.5", "zod": "^4.2.1", "zustand": "^5.0.0" }, "devDependencies": { "@playwright/test": "^1.48.0", + "@tailwindcss/typography": "^0.5.19", "@types/react": "^18.3.9", "@types/react-dom": "^18.3.0", "@typescript-eslint/eslint-plugin": "^8.7.0", @@ -2522,6 +2526,33 @@ "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", "license": "MIT" }, + "node_modules/@tailwindcss/typography": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.19.tgz", + "integrity": "sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "6.0.10" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" + } + }, + "node_modules/@tailwindcss/typography/node_modules/postcss-selector-parser": { + "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/@tanstack/query-core": { "version": "5.90.12", "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.12.tgz", @@ -2846,19 +2877,45 @@ "@types/d3-selection": "*" } }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "dev": true, "license": "MIT" }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", "license": "MIT" }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -2866,18 +2923,31 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, "node_modules/@types/prop-types": { "version": "15.7.15", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", - "devOptional": true, "license": "MIT" }, "node_modules/@types/react": { "version": "18.3.27", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.27.tgz", "integrity": "sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==", - "devOptional": true, "license": "MIT", "dependencies": { "@types/prop-types": "*", @@ -2894,6 +2964,12 @@ "@types/react": "^18.0.0" } }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.50.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.50.0.tgz", @@ -3127,6 +3203,12 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "license": "ISC" + }, "node_modules/@vitejs/plugin-react": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", @@ -3357,6 +3439,16 @@ "proxy-from-env": "^1.1.0" } }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -3550,6 +3642,16 @@ ], "license": "CC-BY-4.0" }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -3567,6 +3669,46 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/chokidar": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", @@ -3646,6 +3788,16 @@ "node": ">= 0.8" } }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/commander": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", @@ -3729,7 +3881,6 @@ "version": "3.2.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", - "devOptional": true, "license": "MIT" }, "node_modules/cytoscape": { @@ -4167,7 +4318,6 @@ "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -4181,6 +4331,19 @@ } } }, + "node_modules/decode-named-character-reference": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz", + "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==", + "license": "MIT", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -4212,12 +4375,34 @@ "node": ">=0.4.0" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", "license": "MIT" }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/didyoumean": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", @@ -4596,6 +4781,16 @@ "node": ">=4.0" } }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -4606,6 +4801,12 @@ "node": ">=0.10.0" } }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -5019,12 +5220,62 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", + "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-js": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/htm": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/htm/-/htm-3.1.1.tgz", "integrity": "sha512-983Vyg8NwUE7JkZ6NmOqpCZ+sh1bKv2iYTlUkzlWmA5JD2acKoxd4KVxbMmxX/85mtfdnDmTFoNKcg5DGAvxNQ==", "license": "Apache-2.0" }, + "node_modules/html-url-attributes": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz", + "integrity": "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -5101,6 +5352,12 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "license": "ISC" }, + "node_modules/inline-style-parser": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", + "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", + "license": "MIT" + }, "node_modules/internmap": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", @@ -5110,6 +5367,30 @@ "node": ">=12" } }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/is-arrayish": { "version": "0.3.4", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz", @@ -5145,6 +5426,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -5168,6 +5459,16 @@ "node": ">=0.10.0" } }, + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -5178,6 +5479,18 @@ "node": ">=0.12.0" } }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", @@ -5266,6 +5579,15 @@ "node": ">=6" } }, + "node_modules/jwt-decode": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-4.0.0.tgz", + "integrity": "sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -5355,6 +5677,16 @@ "url": "https://tidelift.com/funding/github/npm/loglevel" } }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -5386,6 +5718,16 @@ "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc" } }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -5395,70 +5737,915 @@ "node": ">= 0.4" } }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", + "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", "license": "MIT", "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" }, - "engines": { - "node": ">=8.6" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, + "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "license": "MIT", "engines": { - "node": ">=8.6" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/jonschlinkert" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "node_modules/mdast-util-gfm": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", + "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", "license": "MIT", "dependencies": { - "mime-db": "1.52.0" + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, - "engines": { - "node": ">= 0.6" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", + "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "license": "MIT", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", + "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -5479,7 +6666,6 @@ "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, "license": "MIT" }, "node_modules/mz": { @@ -5718,6 +6904,31 @@ "node": ">=6" } }, + "node_modules/parse-entities": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", + "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -5992,6 +7203,16 @@ "node": ">= 0.8.0" } }, + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", @@ -6098,6 +7319,33 @@ "react": "^16.8.0 || ^17 || ^18 || ^19" } }, + "node_modules/react-markdown": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-10.1.0.tgz", + "integrity": "sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" + } + }, "node_modules/react-merge-refs": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/react-merge-refs/-/react-merge-refs-1.1.0.tgz", @@ -6284,6 +7532,72 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/remark-gfm": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", + "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", + "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/resizelistener": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/resizelistener/-/resizelistener-1.1.0.tgz", @@ -6527,12 +7841,36 @@ "node": ">=0.10.0" } }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==", "license": "MIT" }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -6546,6 +7884,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-to-js": { + "version": "1.1.21", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", + "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", + "license": "MIT", + "dependencies": { + "style-to-object": "1.0.14" + } + }, + "node_modules/style-to-object": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", + "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", + "license": "MIT", + "dependencies": { + "inline-style-parser": "0.2.7" + } + }, "node_modules/sucrase": { "version": "3.35.1", "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", @@ -6729,6 +8085,26 @@ "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", "license": "MIT" }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/ts-api-utils": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", @@ -6788,6 +8164,93 @@ "integrity": "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==", "license": "MIT" }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz", + "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/update-browserslist-db": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", @@ -6933,6 +8396,34 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/vibe-kanban-web-companion": { "version": "0.0.5", "resolved": "https://registry.npmjs.org/vibe-kanban-web-companion/-/vibe-kanban-web-companion-0.0.5.tgz", @@ -7154,6 +8645,16 @@ "optional": true } } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } } } } diff --git a/ushadow/frontend/package.json b/ushadow/frontend/package.json index ec0fb65a..260d0475 100644 --- a/ushadow/frontend/package.json +++ b/ushadow/frontend/package.json @@ -27,17 +27,21 @@ "axios": "^1.7.7", "d3": "^7.9.0", "frappe-gantt": "^1.0.4", + "jwt-decode": "^4.0.0", "lucide-react": "^0.446.0", "react": "^18.3.1", "react-dom": "^18.3.1", "react-hook-form": "^7.69.0", + "react-markdown": "^10.1.0", "react-router-dom": "^6.26.2", + "remark-gfm": "^4.0.1", "vibe-kanban-web-companion": "^0.0.5", "zod": "^4.2.1", "zustand": "^5.0.0" }, "devDependencies": { "@playwright/test": "^1.48.0", + "@tailwindcss/typography": "^0.5.19", "@types/react": "^18.3.9", "@types/react-dom": "^18.3.0", "@typescript-eslint/eslint-plugin": "^8.7.0", diff --git a/ushadow/frontend/src/App.tsx b/ushadow/frontend/src/App.tsx index 715bce6a..fc91d4e3 100644 --- a/ushadow/frontend/src/App.tsx +++ b/ushadow/frontend/src/App.tsx @@ -2,6 +2,7 @@ import { BrowserRouter, Routes, Route, Navigate } from 'react-router-dom' import { ErrorBoundary } from './components/ErrorBoundary' import { ThemeProvider } from './contexts/ThemeContext' import { AuthProvider, useAuth } from './contexts/AuthContext' +import { KeycloakAuthProvider } from './contexts/KeycloakAuthContext' import { FeatureFlagsProvider } from './contexts/FeatureFlagsContext' import { WizardProvider } from './contexts/WizardContext' import { ChronicleProvider } from './contexts/ChronicleContext' @@ -28,9 +29,12 @@ import Layout from './components/layout/Layout' import RegistrationPage from './pages/RegistrationPage' import LoginPage from './pages/LoginPage' import ErrorPage from './pages/ErrorPage' +import OAuthCallback from './auth/OAuthCallback' import Dashboard from './pages/Dashboard' import WizardStartPage from './pages/WizardStartPage' import ChroniclePage from './pages/ChroniclePage' +import ConversationsPage from './pages/ConversationsPage' +import ConversationDetailPage from './pages/ConversationDetailPage' import RecordingPage from './pages/RecordingPage' import MCPPage from './pages/MCPPage' import AgentZeroPage from './pages/AgentZeroPage' @@ -40,6 +44,7 @@ import SettingsPage from './pages/SettingsPage' import ServiceConfigsPage from './pages/ServiceConfigsPage' import InterfacesPage from './pages/InterfacesPage' import MemoriesPage from './pages/MemoriesPage' +import MemoryDetailPage from './pages/MemoryDetailPage' import ClusterPage from './pages/ClusterPage' import SpeakerRecognitionPage from './pages/SpeakerRecognitionPage' import ChatPage from './pages/ChatPage' @@ -87,6 +92,7 @@ function AppContent() { {/* Public Routes */} } /> } /> + } /> } /> {/* Protected Routes - All wrapped in Layout */} @@ -117,16 +123,19 @@ function AppContent() { } /> } /> } /> + } /> + } /> } /> } /> } /> } /> } /> - } /> + } /> } /> } /> } /> } /> + } /> } /> } /> } /> @@ -150,13 +159,15 @@ function App() { - - - - - - - + + + + + + + + + diff --git a/ushadow/frontend/src/auth/OAuthCallback.tsx b/ushadow/frontend/src/auth/OAuthCallback.tsx new file mode 100644 index 00000000..75178cf5 --- /dev/null +++ b/ushadow/frontend/src/auth/OAuthCallback.tsx @@ -0,0 +1,109 @@ +/** + * OAuth Callback Handler + * + * Handles the redirect from Keycloak after login. + * Exchanges authorization code for tokens and redirects to original page. + */ + +import { useEffect, useState, useRef } from 'react' +import { useNavigate } from 'react-router-dom' +import { useKeycloakAuth } from '../contexts/KeycloakAuthContext' +import { TokenManager } from './TokenManager' + +export default function OAuthCallback() { + const [error, setError] = useState(null) + const [processing, setProcessing] = useState(true) + const navigate = useNavigate() + const { handleCallback } = useKeycloakAuth() + const hasProcessed = useRef(false) + + useEffect(() => { + // Prevent duplicate processing (React StrictMode runs effects twice in dev) + if (hasProcessed.current) { + return + } + hasProcessed.current = true + + async function processCallback() { + try { + // Extract code and state from URL + const { code, error: oauthError, error_description, state } = + TokenManager.extractTokensFromCallback(window.location.href) + + // Check for OAuth errors + if (oauthError) { + throw new Error(error_description || oauthError) + } + + // Ensure we have a code + if (!code) { + throw new Error('Missing authorization code') + } + + // Ensure we have state (required for CSRF protection) + if (!state) { + throw new Error('Missing state parameter') + } + + console.log('[OAuthCallback] ๐Ÿ“ Code extracted, clearing URL to prevent reuse...') + // CRITICAL: Clear the URL params immediately to prevent the code from being reused + // if this component remounts (which can happen in React StrictMode or during navigation) + window.history.replaceState({}, document.title, window.location.pathname) + + // Exchange code for tokens (includes state verification) + await handleCallback(code, state) + + // Get return URL or default to dashboard + const returnUrl = sessionStorage.getItem('login_return_url') || '/' + sessionStorage.removeItem('login_return_url') + + console.log('[OAuthCallback] โœ… Success! Redirecting to:', returnUrl) + + + // Small delay to ensure auth state propagates through React context + await new Promise(resolve => setTimeout(resolve, 100)) + + // Redirect to original page + navigate(returnUrl, { replace: true }) + } catch (err) { + console.error('OAuth callback error:', err) + setError(err instanceof Error ? err.message : 'Authentication failed') + setProcessing(false) + } + } + + processCallback() + }, [handleCallback, navigate]) + + if (error) { + return ( +
+
+

+ Authentication Error +

+

{error}

+ +
+
+ ) + } + + if (processing) { + return ( +
+
+
+

Completing sign-in...

+
+
+ ) + } + + return null +} diff --git a/ushadow/frontend/src/auth/ServiceTokenManager.ts b/ushadow/frontend/src/auth/ServiceTokenManager.ts new file mode 100644 index 00000000..11bc00c8 --- /dev/null +++ b/ushadow/frontend/src/auth/ServiceTokenManager.ts @@ -0,0 +1,59 @@ +/** + * Service Token Manager + * + * Manages Chronicle-compatible JWT tokens generated from Keycloak tokens. + * This bridges Keycloak OIDC authentication with legacy JWT-based services. + */ + +const BACKEND_URL = import.meta.env.VITE_BACKEND_URL || 'http://localhost:8000' + +export interface ServiceTokenResponse { + service_token: string + token_type: string + expires_in: number +} + +/** + * Exchange a Keycloak token for a Chronicle-compatible service token. + * + * @param keycloakToken - The Keycloak access token from sessionStorage + * @param audiences - Services this token should be valid for (default: ["ushadow", "chronicle"]) + * @returns Service token that Chronicle and other services can validate + */ +export async function getServiceToken( + keycloakToken: string, + audiences?: string[] +): Promise { + const response = await fetch(`${BACKEND_URL}/api/auth/token/service-token`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${keycloakToken}` + }, + body: JSON.stringify({ audiences }) + }) + + if (!response.ok) { + const error = await response.json().catch(() => ({ detail: 'Unknown error' })) + throw new Error(`Failed to get service token: ${error.detail}`) + } + + const data: ServiceTokenResponse = await response.json() + return data.service_token +} + +/** + * Get a Chronicle-compatible token for the current user. + * Automatically retrieves the Keycloak token from session storage. + * + * @returns Service token ready to use with Chronicle WebSocket + */ +export async function getChronicleToken(): Promise { + const keycloakToken = sessionStorage.getItem('kc_access_token') + + if (!keycloakToken) { + throw new Error('No Keycloak token found. Please log in first.') + } + + return getServiceToken(keycloakToken, ['ushadow', 'chronicle']) +} diff --git a/ushadow/frontend/src/auth/TokenManager.ts b/ushadow/frontend/src/auth/TokenManager.ts new file mode 100644 index 00000000..dbcb2aa6 --- /dev/null +++ b/ushadow/frontend/src/auth/TokenManager.ts @@ -0,0 +1,325 @@ +/** + * Token Manager + * + * Handles OIDC token storage, retrieval, and validation. + * Uses sessionStorage for security (tokens cleared when tab closes). + */ + +import { jwtDecode } from 'jwt-decode' + +const TOKEN_KEY = 'kc_access_token' +const REFRESH_TOKEN_KEY = 'kc_refresh_token' +const ID_TOKEN_KEY = 'kc_id_token' + +interface TokenResponse { + access_token: string + refresh_token?: string + id_token?: string + expires_in?: number + token_type?: string +} + +interface LoginUrlParams { + keycloakUrl: string + realm: string + clientId: string + redirectUri: string + state: string +} + +interface LogoutUrlParams { + keycloakUrl: string + realm: string + redirectUri: string +} + +interface DecodedToken { + exp: number + iat: number + sub: string + preferred_username?: string + email?: string + name?: string + given_name?: string + family_name?: string + [key: string]: any +} + +export class TokenManager { + /** + * Store tokens in sessionStorage + */ + static storeTokens(tokens: TokenResponse): void { + if (tokens.access_token) { + sessionStorage.setItem(TOKEN_KEY, tokens.access_token) + } + if (tokens.refresh_token) { + sessionStorage.setItem(REFRESH_TOKEN_KEY, tokens.refresh_token) + } + if (tokens.id_token) { + sessionStorage.setItem(ID_TOKEN_KEY, tokens.id_token) + } + } + + /** + * Get access token from storage + */ + static getAccessToken(): string | null { + return sessionStorage.getItem(TOKEN_KEY) + } + + /** + * Get refresh token from storage + */ + static getRefreshToken(): string | null { + return sessionStorage.getItem(REFRESH_TOKEN_KEY) + } + + /** + * Get ID token from storage + */ + static getIdToken(): string | null { + return sessionStorage.getItem(ID_TOKEN_KEY) + } + + /** + * Clear all tokens from storage + */ + static clearTokens(): void { + sessionStorage.removeItem(TOKEN_KEY) + sessionStorage.removeItem(REFRESH_TOKEN_KEY) + sessionStorage.removeItem(ID_TOKEN_KEY) + } + + /** + * Check if user is authenticated (has valid token) + */ + static isAuthenticated(): boolean { + const token = this.getAccessToken() + if (!token) { + console.log('[TokenManager] No access token found') + return false + } + + try { + const decoded = jwtDecode(token) + const now = Math.floor(Date.now() / 1000) + const isValid = decoded.exp > now + const expiresIn = decoded.exp - now + + console.log('[TokenManager] Token check:', { + isValid, + expiresIn: `${Math.floor(expiresIn / 60)}m ${expiresIn % 60}s`, + expiresAt: new Date(decoded.exp * 1000).toISOString(), + now: new Date(now * 1000).toISOString() + }) + + if (!isValid) { + console.warn('[TokenManager] โš ๏ธ Token EXPIRED!', { + expiredAgo: `${Math.floor(Math.abs(expiresIn) / 60)}m ${Math.abs(expiresIn) % 60}s ago` + }) + } + + return isValid + } catch (error) { + console.error('[TokenManager] Invalid token:', error) + return false + } + } + + /** + * Get user info from decoded token + */ + static getUserInfo(): any | null { + const token = this.getAccessToken() + if (!token) return null + + try { + const decoded = jwtDecode(token) + return { + sub: decoded.sub, + username: decoded.preferred_username, + email: decoded.email, + name: decoded.name, + given_name: decoded.given_name, + family_name: decoded.family_name, + // Include all other claims + ...decoded, + } + } catch (error) { + console.error('Failed to decode token:', error) + return null + } + } + + /** + * Build Keycloak login URL with PKCE + */ + static async buildLoginUrl(params: LoginUrlParams): Promise { + const { keycloakUrl, realm, clientId, redirectUri, state } = params + + // Generate PKCE code verifier and challenge + const codeVerifier = this.generateCodeVerifier() + const codeChallenge = await this.generateCodeChallenge(codeVerifier) + + // Store code verifier for token exchange + sessionStorage.setItem('pkce_code_verifier', codeVerifier) + + const authUrl = `${keycloakUrl}/realms/${realm}/protocol/openid-connect/auth` + const queryParams = new URLSearchParams({ + client_id: clientId, + redirect_uri: redirectUri, + response_type: 'code', + scope: 'openid profile email', + state: state, + code_challenge: codeChallenge, + code_challenge_method: 'S256', + }) + + return `${authUrl}?${queryParams.toString()}` + } + + /** + * Build Keycloak logout URL + */ + static buildLogoutUrl(params: LogoutUrlParams): string { + const { keycloakUrl, realm, redirectUri } = params + const logoutUrl = `${keycloakUrl}/realms/${realm}/protocol/openid-connect/logout` + + // Get id_token from storage for proper logout + const idToken = this.getIdToken() + + const queryParams = new URLSearchParams({ + post_logout_redirect_uri: redirectUri, + }) + + // Add id_token_hint if available (recommended by OIDC spec) + if (idToken) { + queryParams.set('id_token_hint', idToken) + } + + return `${logoutUrl}?${queryParams.toString()}` + } + + /** + * Exchange authorization code for tokens via backend + */ + static async exchangeCodeForTokens( + code: string, + backendUrl: string + ): Promise { + const codeVerifier = sessionStorage.getItem('pkce_code_verifier') + if (!codeVerifier) { + throw new Error('Missing PKCE code verifier') + } + + const response = await fetch(`${backendUrl}/api/auth/token`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + code, + code_verifier: codeVerifier, + redirect_uri: `${window.location.origin}/oauth/callback`, + }), + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`Token exchange failed: ${error}`) + } + + const tokens = await response.json() + + // Clean up code verifier + sessionStorage.removeItem('pkce_code_verifier') + + return tokens + } + + /** + * Extract tokens from callback URL + */ + static extractTokensFromCallback(url: string): { + code?: string + state?: string + error?: string + error_description?: string + } { + const urlObj = new URL(url) + const params = new URLSearchParams(urlObj.search) + + return { + code: params.get('code') || undefined, + state: params.get('state') || undefined, + error: params.get('error') || undefined, + error_description: params.get('error_description') || undefined, + } + } + + /** + * Refresh access token using refresh token + */ + static async refreshAccessToken(backendUrl: string): Promise { + const refreshToken = this.getRefreshToken() + if (!refreshToken) { + throw new Error('No refresh token available') + } + + console.log('[TokenManager] Refreshing access token...') + + const response = await fetch(`${backendUrl}/api/auth/refresh`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + refresh_token: refreshToken, + }), + }) + + if (!response.ok) { + const error = await response.text() + console.error('[TokenManager] Token refresh failed:', error) + throw new Error(`Token refresh failed: ${error}`) + } + + const tokens = await response.json() + console.log('[TokenManager] โœ… Token refreshed successfully') + + return tokens + } + + // PKCE helpers + + /** + * Generate PKCE code verifier (random string) + */ + private static generateCodeVerifier(): string { + const array = new Uint8Array(32) + crypto.getRandomValues(array) + return this.base64UrlEncode(array) + } + + /** + * Generate PKCE code challenge (SHA-256 hash of verifier) + */ + private static async generateCodeChallenge(verifier: string): Promise { + const encoder = new TextEncoder() + const data = encoder.encode(verifier) + const hash = await crypto.subtle.digest('SHA-256', data) + return this.base64UrlEncode(new Uint8Array(hash)) + } + + /** + * Base64 URL encode (for PKCE) + */ + private static base64UrlEncode(array: Uint8Array): string { + const base64 = btoa(String.fromCharCode(...Array.from(array))) + return base64 + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=/g, '') + } +} diff --git a/ushadow/frontend/src/auth/config.ts b/ushadow/frontend/src/auth/config.ts new file mode 100644 index 00000000..368a2e26 --- /dev/null +++ b/ushadow/frontend/src/auth/config.ts @@ -0,0 +1,35 @@ +/** + * Keycloak and Backend Configuration + * + * Loaded from environment variables (.env file) + */ + +/** + * Get backend URL based on current origin. + * + * When accessing via Tailscale (e.g., https://ushadow.spangled-kettle.ts.net), + * the backend is accessible at the same origin through /api routes. + * When accessing locally (localhost/127.0.0.1), use the configured backend port. + */ +function getBackendUrl(): string { + const origin = window.location.origin + + // If accessing via Tailscale (*.ts.net), use the same origin + // Tailscale serve routes /api to the backend + if (origin.includes('.ts.net')) { + return origin + } + + // Otherwise use the configured backend URL (local development) + return import.meta.env.VITE_BACKEND_URL || 'http://localhost:8000' +} + +export const keycloakConfig = { + url: import.meta.env.VITE_KEYCLOAK_URL || 'http://localhost:8081', + realm: import.meta.env.VITE_KEYCLOAK_REALM || 'ushadow', + clientId: import.meta.env.VITE_KEYCLOAK_CLIENT_ID || 'ushadow-frontend', +} + +export const backendConfig = { + url: getBackendUrl(), +} diff --git a/ushadow/frontend/src/components/DeployToK8sModal.tsx b/ushadow/frontend/src/components/DeployToK8sModal.tsx index 389a4a5a..0be020d7 100644 --- a/ushadow/frontend/src/components/DeployToK8sModal.tsx +++ b/ushadow/frontend/src/components/DeployToK8sModal.tsx @@ -46,6 +46,11 @@ export default function DeployToK8sModal({ isOpen, onClose, cluster: initialClus const [error, setError] = useState(null) const [deploymentResult, setDeploymentResult] = useState(null) + // Ingress configuration + const [ingressEnabled, setIngressEnabled] = useState(false) + const [ingressHostname, setIngressHostname] = useState('') + const [customHostname, setCustomHostname] = useState(false) + useEffect(() => { if (isOpen) { // If service is preselected, load env vars directly @@ -62,6 +67,26 @@ export default function DeployToK8sModal({ isOpen, onClose, cluster: initialClus } }, [isOpen, preselectedServiceId]) + // Auto-configure ingress based on cluster settings + useEffect(() => { + if (selectedService && selectedCluster) { + const hasIngressConfig = selectedCluster.ingress_domain && selectedCluster.ingress_domain.length > 0 + const shouldEnable = hasIngressConfig && (selectedCluster.ingress_enabled_by_default || false) + + setIngressEnabled(shouldEnable) + + // Auto-generate hostname + if (hasIngressConfig) { + const serviceName = selectedService.service_name + .toLowerCase() + .replace(/[^a-z0-9-]/g, '-') + const autoHostname = `${serviceName}.${selectedCluster.ingress_domain}` + setIngressHostname(autoHostname) + setCustomHostname(false) + } + } + }, [selectedService, selectedCluster]) + const loadServices = async () => { try { // Use servicesApi instead of kubernetesApi to get installed compose services @@ -265,7 +290,15 @@ export default function DeployToK8sModal({ isOpen, onClose, cluster: initialClus { service_id: selectedService.service_id, namespace: namespace, - config_id: instanceId + config_id: instanceId, + k8s_spec: ingressEnabled ? { + ingress: { + enabled: true, + host: ingressHostname, + path: "/", + ingressClassName: selectedCluster.ingress_class || "nginx" + } + } : undefined } ) @@ -416,6 +449,63 @@ export default function DeployToK8sModal({ isOpen, onClose, cluster: initialClus

+ {/* Ingress Configuration */} + {selectedCluster?.ingress_domain && ( +
+
+ +
+ + {ingressEnabled && ( +
+
+ + {!customHostname ? ( +
+ + {ingressHostname} + + +
+ ) : ( + { + const value = e.target.value + if (/^[a-z0-9.-]*$/.test(value)) { + setIngressHostname(value) + } + }} + placeholder={`service.${selectedCluster.ingress_domain}`} + className="flex-1 px-3 py-1 text-sm rounded border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-700 text-neutral-900 dark:text-neutral-100 font-mono" + data-testid="deploy-ingress-hostname-input" + /> + )} +
+

+ Accessible at: http://{ingressHostname} +

+
+ )} +
+ )} + {/* Environment Variables */}
+

- Select providers for each service capability + {activeSubTab === 'api' + ? 'API services and their workers' + : 'User interface services'}

{/* Service Cards Grid */} -
- {composeTemplates.map((template) => { - // Find ALL configs for this template - const templateConfigs = instances.filter((i) => i.template_id === template.id) - // Show the first config (or null if none) - const config = templateConfigs[0] || null - const consumerId = config?.id || template.id - - // Get service status from Docker - const serviceName = template.id.includes(':') ? template.id.split(':').pop()! : template.id - const status = serviceStatuses[serviceName] - - // Filter wiring for this consumer - const consumerWiring = wiring.filter((w) => w.target_config_id === consumerId) - - // Get deployments for this service - const serviceDeployments = deployments.filter((d) => d.service_id === template.id) - - return ( - onAddConfig(template.id)} - onWiringChange={(capability, sourceConfigId) => - onWiringChange(consumerId, capability, sourceConfigId) - } - onWiringClear={(capability) => onWiringClear(consumerId, capability)} - onConfigCreate={onConfigCreate} - onEditConfig={onEditConfig} - onDeleteConfig={onDeleteConfig} - onUpdateConfig={onUpdateConfig} - onStart={() => onStart(template.id)} - onStop={() => onStop(template.id)} - onEdit={() => onEdit(template.id)} - onDeploy={(target) => onDeploy(template.id, target)} - /> - ) - })} -
+ {activeSubTab === 'api' ? ( +
+ {groupedApiServices.map(({ api, workers }) => ( +
+ {/* API Service Card */} + {renderServiceCard(api)} + + {/* Worker Cards - shown in the same column as their API */} + {workers.length > 0 && ( +
+ {workers.map((worker) => renderServiceCard(worker))} +
+ )} +
+ ))} +
+ ) : ( +
+ {uiServices.map((template) => renderServiceCard(template))} +
+ )} + + {currentServices.length === 0 && ( + + )} ) } diff --git a/ushadow/frontend/src/contexts/KeycloakAuthContext.tsx b/ushadow/frontend/src/contexts/KeycloakAuthContext.tsx new file mode 100644 index 00000000..4d430cd4 --- /dev/null +++ b/ushadow/frontend/src/contexts/KeycloakAuthContext.tsx @@ -0,0 +1,261 @@ +/** + * Keycloak Authentication Context + * + * Provides OIDC authentication using Keycloak for federated auth + * (voice message sharing, external user access) + * + * Works alongside the existing AuthContext (legacy email/password) + */ + +import { createContext, useContext, useState, useEffect, ReactNode } from 'react' +import { TokenManager } from '../auth/TokenManager' +import { keycloakConfig, backendConfig } from '../auth/config' + +interface KeycloakAuthContextType { + isAuthenticated: boolean + isLoading: boolean + userInfo: any | null + login: (redirectUri?: string) => void + register: (redirectUri?: string) => void + logout: (redirectUri?: string) => void + getAccessToken: () => string | null + handleCallback: (code: string, state: string) => Promise +} + +const KeycloakAuthContext = createContext(undefined) + +export function KeycloakAuthProvider({ children }: { children: ReactNode }) { + // Initialize auth state synchronously to prevent flash of unauthenticated state + const initialAuthState = TokenManager.isAuthenticated() + const initialUserInfo = initialAuthState ? TokenManager.getUserInfo() : null + + const [isAuthenticated, setIsAuthenticated] = useState(initialAuthState) + const [isLoading, setIsLoading] = useState(false) // No loading needed - we check synchronously + const [userInfo, setUserInfo] = useState(initialUserInfo) + + useEffect(() => { + // Re-check auth state on mount (in case token expired between initial check and mount) + const authenticated = TokenManager.isAuthenticated() + if (authenticated !== isAuthenticated) { + setIsAuthenticated(authenticated) + if (authenticated) { + const info = TokenManager.getUserInfo() + setUserInfo(info) + } else { + setUserInfo(null) + } + } + + // Set up automatic token refresh + // Refresh token 60 seconds before it expires + const setupTokenRefresh = () => { + try { + const token = TokenManager.getAccessToken() + if (!token) { + console.log('[KC-AUTH] No token found, skipping refresh setup') + return undefined + } + + const decoded = TokenManager.getUserInfo() + if (!decoded?.exp) { + console.log('[KC-AUTH] No expiration in token, skipping refresh setup') + return undefined + } + + const now = Math.floor(Date.now() / 1000) + const expiresIn = decoded.exp - now + + // If token is already expired or expires in less than 0 seconds, don't set up refresh + if (expiresIn <= 0) { + console.warn('[KC-AUTH] Token already expired, skipping refresh setup') + setIsAuthenticated(false) + setUserInfo(null) + return undefined + } + + const refreshAt = Math.max(0, expiresIn - 60) // Refresh 60s before expiry + + console.log('[KC-AUTH] Setting up token refresh:', { + expiresIn: `${Math.floor(expiresIn / 60)}m ${expiresIn % 60}s`, + refreshIn: `${Math.floor(refreshAt / 60)}m ${refreshAt % 60}s` + }) + + const timeoutId = setTimeout(async () => { + try { + console.log('[KC-AUTH] Refreshing token...') + if (!backendConfig?.url) { + throw new Error('Backend URL not configured') + } + const newTokens = await TokenManager.refreshAccessToken(backendConfig.url) + TokenManager.storeTokens(newTokens) + console.log('[KC-AUTH] โœ… Token refreshed successfully') + + // Update context state + setIsAuthenticated(true) + const info = TokenManager.getUserInfo() + setUserInfo(info) + + // Schedule next refresh + setupTokenRefresh() + } catch (error) { + console.error('[KC-AUTH] โŒ Token refresh failed:', error) + // Token refresh failed - clear auth state (will trigger redirect to login) + setIsAuthenticated(false) + setUserInfo(null) + TokenManager.clearTokens() + } + }, refreshAt * 1000) + + return () => { + console.log('[KC-AUTH] Cleaning up token refresh timeout') + clearTimeout(timeoutId) + } + } catch (error) { + console.error('[KC-AUTH] Error setting up token refresh:', error) + return undefined + } + } + + const cleanup = setupTokenRefresh() + return () => { + if (cleanup) cleanup() + } + }, []) + + const login = async (redirectUri?: string) => { + // Save current location for return after login + const returnUrl = redirectUri || window.location.pathname + window.location.search + sessionStorage.setItem('login_return_url', returnUrl) + + // Generate CSRF state + const state = generateState() + sessionStorage.setItem('oauth_state', state) + + // Build Keycloak login URL (async because of PKCE SHA-256) + const loginUrl = await TokenManager.buildLoginUrl({ + keycloakUrl: keycloakConfig.url, + realm: keycloakConfig.realm, + clientId: keycloakConfig.clientId, + redirectUri: `${window.location.origin}/oauth/callback`, + state, + }) + + // Redirect to Keycloak + window.location.href = loginUrl + } + + const register = async (redirectUri?: string) => { + // Save current location for return after registration + const returnUrl = redirectUri || window.location.pathname + window.location.search + sessionStorage.setItem('login_return_url', returnUrl) + + // Generate CSRF state + const state = generateState() + sessionStorage.setItem('oauth_state', state) + + // Build Keycloak registration URL - uses /registrations endpoint instead of /auth + const registrationUrl = await TokenManager.buildLoginUrl({ + keycloakUrl: keycloakConfig.url, + realm: keycloakConfig.realm, + clientId: keycloakConfig.clientId, + redirectUri: `${window.location.origin}/oauth/callback`, + state, + }) + + // Replace /auth with /registrations to trigger Keycloak registration screen + const registrationEndpoint = registrationUrl.replace('/protocol/openid-connect/auth', '/protocol/openid-connect/registrations') + + // Redirect to Keycloak registration + window.location.href = registrationEndpoint + } + + const logout = (redirectUri?: string) => { + // Build logout URL FIRST (needs id_token from storage) + // Important: Keycloak requires exact match, so add trailing slash to origin + const defaultRedirectUri = `${window.location.origin}/` + const logoutUrl = TokenManager.buildLogoutUrl({ + keycloakUrl: keycloakConfig.url, + realm: keycloakConfig.realm, + redirectUri: redirectUri || defaultRedirectUri, + }) + + // THEN clear tokens (after we've read id_token for logout URL) + TokenManager.clearTokens() + setIsAuthenticated(false) + setUserInfo(null) + + // Redirect to Keycloak logout + window.location.href = logoutUrl + } + + const handleCallback = async (code: string, state: string) => { + // Verify state (CSRF protection) + const savedState = sessionStorage.getItem('oauth_state') + if (state !== savedState) { + throw new Error('Invalid state parameter - possible CSRF attack') + } + + // Exchange code for tokens via backend + const tokens = await TokenManager.exchangeCodeForTokens(code, backendConfig.url) + console.log('[KC-AUTH] Received tokens:', { + hasAccessToken: !!tokens.access_token, + hasRefreshToken: !!tokens.refresh_token, + hasIdToken: !!tokens.id_token, + tokenPreview: tokens.access_token?.substring(0, 30) + '...' + }) + + // Store tokens + TokenManager.storeTokens(tokens) + console.log('[KC-AUTH] Tokens stored in sessionStorage') + + // Verify storage worked + const storedToken = sessionStorage.getItem('kc_access_token') + console.log('[KC-AUTH] Verified storage:', { + hasStoredToken: !!storedToken, + storedTokenPreview: storedToken?.substring(0, 30) + '...' + }) + + // Update auth state + setIsAuthenticated(true) + const info = TokenManager.getUserInfo() + setUserInfo(info) + + // Clean up + sessionStorage.removeItem('oauth_state') + } + + const getAccessToken = () => { + return TokenManager.getAccessToken() + } + + return ( + + {children} + + ) +} + +export function useKeycloakAuth() { + const context = useContext(KeycloakAuthContext) + if (context === undefined) { + throw new Error('useKeycloakAuth must be used within a KeycloakAuthProvider') + } + return context +} + +// Helper function +function generateState(): string { + return Math.random().toString(36).substring(2, 15) + + Math.random().toString(36).substring(2, 15) +} diff --git a/ushadow/frontend/src/hooks/index.ts b/ushadow/frontend/src/hooks/index.ts index 218eac1e..bb659ee4 100644 --- a/ushadow/frontend/src/hooks/index.ts +++ b/ushadow/frontend/src/hooks/index.ts @@ -50,3 +50,7 @@ export type { UseServiceCatalogResult } from './useServiceCatalog'; export { useDeploymentActions } from './useDeploymentActions'; export type { UseDeploymentActionsResult } from './useDeploymentActions'; + +// Sharing hooks +export { useShare } from './useShare'; +export type { UseShareOptions, UseShareReturn } from './useShare'; diff --git a/ushadow/frontend/src/hooks/useConversationDetail.ts b/ushadow/frontend/src/hooks/useConversationDetail.ts new file mode 100644 index 00000000..84a915b6 --- /dev/null +++ b/ushadow/frontend/src/hooks/useConversationDetail.ts @@ -0,0 +1,65 @@ +import { useQuery } from '@tanstack/react-query' +import { chronicleConversationsApi } from '../services/chronicleApi' +import { myceliaApi } from '../services/api' +import type { Conversation } from '../services/chronicleApi' +import type { ConversationSource } from './useConversations' + +/** + * Fetch a single conversation from Chronicle + */ +export function useChronicleConversation(id: string, options?: { enabled?: boolean }) { + return useQuery({ + queryKey: ['conversation', 'chronicle', id], + queryFn: async () => { + console.log('[useChronicleConversation] Fetching conversation:', id) + const response = await chronicleConversationsApi.getById(id) + console.log('[useChronicleConversation] Response:', response) + + // Handle different response formats + const data = response.data + if (data && typeof data === 'object' && 'conversation' in data) { + return (data as any).conversation as Conversation + } + return data as Conversation + }, + enabled: options?.enabled !== false && !!id, + retry: false, + staleTime: 60000, // Consider fresh for 60s + }) +} + +/** + * Fetch a single conversation from Mycelia + */ +export function useMyceliaConversation(id: string, options?: { enabled?: boolean }) { + return useQuery({ + queryKey: ['conversation', 'mycelia', id], + queryFn: async () => { + console.log('[useMyceliaConversation] Fetching conversation:', id) + const response = await myceliaApi.getConversation(id) + console.log('[useMyceliaConversation] Response:', response) + return response.data as Conversation + }, + enabled: options?.enabled !== false && !!id, + retry: false, + staleTime: 60000, + }) +} + +/** + * Fetch a conversation from the specified source + */ +export function useConversationDetail(id: string, source: ConversationSource) { + const chronicle = useChronicleConversation(id, { enabled: source === 'chronicle' }) + const mycelia = useMyceliaConversation(id, { enabled: source === 'mycelia' }) + + const activeQuery = source === 'chronicle' ? chronicle : mycelia + + return { + conversation: activeQuery.data, + isLoading: activeQuery.isLoading, + error: activeQuery.error, + refetch: activeQuery.refetch, + source, + } +} diff --git a/ushadow/frontend/src/hooks/useConversations.ts b/ushadow/frontend/src/hooks/useConversations.ts new file mode 100644 index 00000000..65fd3d44 --- /dev/null +++ b/ushadow/frontend/src/hooks/useConversations.ts @@ -0,0 +1,111 @@ +import { useQuery } from '@tanstack/react-query' +import { chronicleApi, myceliaApi } from '../services/api' +import { chronicleConversationsApi } from '../services/chronicleApi' +import type { Conversation } from '../services/chronicleApi' + +export type ConversationSource = 'chronicle' | 'mycelia' + +interface ConversationsResponse { + conversations: Conversation[] + count: number +} + +/** + * Fetch conversations from Chronicle + */ +export function useChronicleConversations(options?: { enabled?: boolean }) { + return useQuery({ + queryKey: ['conversations', 'chronicle'], + queryFn: async () => { + const response = await chronicleConversationsApi.getAll() + // Handle different response formats + const data = response.data + + // If data is already an array, return it + if (Array.isArray(data)) { + return data as Conversation[] + } + + // If data has a conversations field, return that + if (data && typeof data === 'object' && 'conversations' in data) { + return (data as any).conversations as Conversation[] + } + + // Otherwise return empty array + console.warn('[useChronicleConversations] Unexpected response format:', data) + return [] + }, + enabled: options?.enabled !== false, + retry: false, + staleTime: 30000, // Consider fresh for 30s + }) +} + +/** + * Fetch conversations from Mycelia + */ +export function useMyceliaConversations(options?: { enabled?: boolean }) { + return useQuery({ + queryKey: ['conversations', 'mycelia'], + queryFn: async () => { + try { + const response = await myceliaApi.getConversations({ limit: 25 }) + const data = response.data + + // If data has conversations field, return that + if (data && typeof data === 'object' && 'conversations' in data) { + return ((data as any).conversations || []) as Conversation[] + } + + // If data is already an array, return it + if (Array.isArray(data)) { + return data as Conversation[] + } + + // Otherwise return empty array + console.warn('[useMyceliaConversations] Unexpected response format:', data) + return [] + } catch (error) { + console.error('[useMyceliaConversations] Error fetching conversations:', error) + return [] + } + }, + enabled: options?.enabled !== false, + retry: false, + staleTime: 30000, + }) +} + +/** + * Fetch conversations from multiple sources + * Returns a map of source -> conversations + */ +export function useMultiSourceConversations(enabledSources: ConversationSource[]) { + const chronicleEnabled = enabledSources.includes('chronicle') + const myceliaEnabled = enabledSources.includes('mycelia') + + const chronicle = useChronicleConversations({ enabled: chronicleEnabled }) + const mycelia = useMyceliaConversations({ enabled: myceliaEnabled }) + + // Ensure data is always an array + const chronicleData = Array.isArray(chronicle.data) ? chronicle.data : [] + const myceliaData = Array.isArray(mycelia.data) ? mycelia.data : [] + + return { + chronicle: { + data: chronicleData, + isLoading: chronicle.isLoading, + error: chronicle.error, + refetch: chronicle.refetch, + }, + mycelia: { + data: myceliaData, + isLoading: mycelia.isLoading, + error: mycelia.error, + refetch: mycelia.refetch, + }, + // Aggregate states + anyLoading: chronicle.isLoading || mycelia.isLoading, + allLoaded: (!chronicleEnabled || !chronicle.isLoading) && (!myceliaEnabled || !mycelia.isLoading), + } +} diff --git a/ushadow/frontend/src/hooks/useDashboardData.ts b/ushadow/frontend/src/hooks/useDashboardData.ts new file mode 100644 index 00000000..d7559bb8 --- /dev/null +++ b/ushadow/frontend/src/hooks/useDashboardData.ts @@ -0,0 +1,18 @@ +import { useQuery } from '@tanstack/react-query' +import { dashboardApi, type DashboardData } from '../services/api' + +/** + * Hook to fetch dashboard data (stats + recent conversations & memories). + * Automatically refetches every 30 seconds to keep data fresh. + */ +export function useDashboardData(conversationLimit = 10, memoryLimit = 10) { + return useQuery({ + queryKey: ['dashboard', 'data', conversationLimit, memoryLimit], + queryFn: async () => { + const response = await dashboardApi.getDashboardData(conversationLimit, memoryLimit) + return response.data + }, + refetchInterval: 30000, // Refresh every 30 seconds + staleTime: 10000, // Consider data stale after 10 seconds + }) +} diff --git a/ushadow/frontend/src/hooks/useServiceConfigData.ts b/ushadow/frontend/src/hooks/useServiceConfigData.ts index d7a192ce..405ef07f 100644 --- a/ushadow/frontend/src/hooks/useServiceConfigData.ts +++ b/ushadow/frontend/src/hooks/useServiceConfigData.ts @@ -65,8 +65,8 @@ export function useServiceConfigData(): UseServiceConfigDataResult { return result }, - staleTime: 0, // Never cache - always fetch fresh data - gcTime: 0, // Don't keep inactive data in cache + staleTime: 5 * 60 * 1000, // Consider data fresh for 5 minutes + gcTime: 10 * 60 * 1000, // Keep in cache for 10 minutes after component unmounts refetchOnWindowFocus: false, // Only refetch on manual refresh retry: 1, }) diff --git a/ushadow/frontend/src/hooks/useShare.ts b/ushadow/frontend/src/hooks/useShare.ts new file mode 100644 index 00000000..7dc535b4 --- /dev/null +++ b/ushadow/frontend/src/hooks/useShare.ts @@ -0,0 +1,40 @@ +import { useState } from 'react' + +export interface UseShareOptions { + resourceType: 'conversation' | 'memory' | 'collection' + resourceId: string +} + +export interface UseShareReturn { + isShareDialogOpen: boolean + openShareDialog: () => void + closeShareDialog: () => void + resourceType: 'conversation' | 'memory' | 'collection' + resourceId: string +} + +/** + * Hook for managing share dialog state. + * + * Usage: + * ```tsx + * const shareProps = useShare({ + * resourceType: 'conversation', + * resourceId: conversationId + * }) + * + * + * + * ``` + */ +export function useShare({ resourceType, resourceId }: UseShareOptions): UseShareReturn { + const [isShareDialogOpen, setIsShareDialogOpen] = useState(false) + + return { + isShareDialogOpen, + openShareDialog: () => setIsShareDialogOpen(true), + closeShareDialog: () => setIsShareDialogOpen(false), + resourceType, + resourceId, + } +} diff --git a/ushadow/frontend/src/hooks/useWebRecording.ts b/ushadow/frontend/src/hooks/useWebRecording.ts index 9af6eba2..7a56e026 100644 --- a/ushadow/frontend/src/hooks/useWebRecording.ts +++ b/ushadow/frontend/src/hooks/useWebRecording.ts @@ -311,25 +311,101 @@ export const useWebRecording = (): WebRecordingReturn => { // ===== DUAL-STREAM MODE ===== console.log('Starting dual-stream recording') - // Get Chronicle direct URL for WebSocket - const backendUrl = await getChronicleDirectUrl() - - // Create and connect adapter - const adapter = new ChronicleWebSocketAdapter({ - backendUrl, - token, - deviceName: 'ushadow-dual-stream', - mode: 'dual-stream' - }) + let displayStream: MediaStream | null = null + try { + // IMPORTANT: Request display media FIRST while still in user gesture context + // getDisplayMedia() must be called synchronously from a user gesture + // Doing ANY await before this call will cause the browser to block the picker + setLegacyStep('display') + console.log('๐Ÿ–ฅ๏ธ Step 1: Requesting display media (MUST be first for user gesture)') + displayStream = await navigator.mediaDevices.getDisplayMedia({ + audio: { + sampleRate: 16000, + channelCount: 1, + echoCancellation: false, + noiseSuppression: false, + autoGainControl: false + }, + video: true // Required for picker - will be stopped immediately + }) + + // IMPORTANT: Don't stop/remove video tracks - this can end the audio track too! + // Instead, keep the video track running but we won't use it + // The browser requires video to be requested for getDisplayMedia to work properly + const videoTracks = displayStream.getVideoTracks() + console.log('๐ŸŽฌ Keeping', videoTracks.length, 'video tracks running (required for audio)') + + // Verify we got audio + const audioTracks = displayStream.getAudioTracks() + console.log('๐Ÿ”Š Display stream audio tracks:', audioTracks.length) + if (audioTracks.length > 0) { + console.log('๐Ÿ”Š Audio track details:', { + label: audioTracks[0].label, + enabled: audioTracks[0].enabled, + muted: audioTracks[0].muted, + readyState: audioTracks[0].readyState, + settings: audioTracks[0].getSettings() + }) + } + + if (audioTracks.length === 0) { + displayStream.getTracks().forEach(t => t.stop()) + throw new Error('No audio track found. When selecting a tab/window, make sure to CHECK the "Share tab audio" or "Share system audio" checkbox at the bottom of the picker!') + } - await adapter.connect() - adapterRef.current = adapter + // Now that we have display permission, do other async operations + // Use selected destinations from state (like streaming mode) + const destinations: ExposedUrl[] = availableDestinations.filter(d => + selectedDestinationIds.includes(d.instance_id) + ) - // Send audio-start - await adapter.sendAudioStart('dual-stream') + if (destinations.length === 0) { + displayStream.getTracks().forEach(t => t.stop()) + throw new Error('No audio destinations selected. Please select at least one destination to record.') + } - // Start dual-stream recording - await dualStream.startRecording('dual-stream') + console.log('Using selected audio destinations:', destinations.map(d => d.instance_name)) + + // Build relay WebSocket URL (use relay instead of direct connection) + const relayDestinations = destinations.map(dest => ({ + name: dest.instance_name, + url: getAudioPath(dest.url) + })) + + const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:' + const relayBaseUrl = BACKEND_URL ? BACKEND_URL.replace(/^https?:/, wsProtocol) : `${wsProtocol}//${window.location.host}` + const destinationsParam = encodeURIComponent(JSON.stringify(relayDestinations)) + const tokenParam = encodeURIComponent(token) + const backendUrl = `${relayBaseUrl}/ws/audio/relay?destinations=${destinationsParam}&token=${tokenParam}` + + console.log('Dual-stream connecting via relay:', backendUrl.replace(token, 'REDACTED')) + + // Create and connect adapter (will use relay instead of direct connection) + const adapter = new ChronicleWebSocketAdapter({ + backendUrl, + token, + deviceName: 'ushadow-dual-stream', + mode: 'dual-stream' + }) + + await adapter.connect() + adapterRef.current = adapter + + // Send audio-start + await adapter.sendAudioStart('dual-stream') + + // Start dual-stream recording (will request microphone internally) + console.log('๐ŸŽ™๏ธ Step 3: Starting dual-stream recording (will request microphone)...') + await dualStream.startRecording('dual-stream', displayStream) + console.log('โœ… Dual-stream recording started successfully') + } catch (error) { + // Cleanup display stream if it was captured + if (displayStream) { + console.error('โŒ Dual-stream setup failed, cleaning up display stream:', error) + displayStream.getTracks().forEach(t => t.stop()) + } + throw error // Re-throw to be caught by outer try-catch + } // Start duration timer durationIntervalRef.current = setInterval(() => { @@ -526,6 +602,9 @@ export const useWebRecording = (): WebRecordingReturn => { // Cleanup dual-stream adapterRef.current?.close() adapterRef.current = null + // Set error state for dual-stream + setLegacyStep('error') + setLegacyError(error instanceof Error ? error.message : 'Dual-stream recording failed') } else { setLegacyStep('error') setLegacyError(error instanceof Error ? error.message : 'Recording failed') @@ -639,9 +718,9 @@ export const useWebRecording = (): WebRecordingReturn => { const recordingDuration = isDualStream ? (dualStream.isRecording ? legacyDuration : 0) : legacyDuration const error = isDualStream ? (dualStream.error?.message || null) : legacyError - // Get analyser - for dual-stream, try to get from mixer + // Get analyser - for dual-stream, get the mixed output analyser const analyser = isDualStream - ? dualStream.getAnalyser('microphone') + ? dualStream.getAnalyser('mixed') : legacyAnalyser return { diff --git a/ushadow/frontend/src/modules/dual-stream-audio/adapters/chronicleAdapter.ts b/ushadow/frontend/src/modules/dual-stream-audio/adapters/chronicleAdapter.ts index 828b8804..a9407a0e 100644 --- a/ushadow/frontend/src/modules/dual-stream-audio/adapters/chronicleAdapter.ts +++ b/ushadow/frontend/src/modules/dual-stream-audio/adapters/chronicleAdapter.ts @@ -35,10 +35,14 @@ export class ChronicleWebSocketAdapter { const { backendUrl, token, deviceName = 'webui-dual-stream' } = this.config // Build WebSocket URL - // Determine protocol from the backend URL, not the current page let wsUrl: string - if (backendUrl && backendUrl.startsWith('http')) { + // Check if backendUrl is already a complete WebSocket URL (relay or direct) + if (backendUrl && (backendUrl.startsWith('ws://') || backendUrl.startsWith('wss://'))) { + // Already a complete WebSocket URL (e.g., from relay) + wsUrl = backendUrl + console.log('๐Ÿ”— Using pre-built WebSocket URL (relay)') + } else if (backendUrl && backendUrl.startsWith('http')) { // Extract protocol from backendUrl (https:// -> wss://, http:// -> ws://) const protocol = backendUrl.startsWith('https://') ? 'wss:' : 'ws:' const host = backendUrl.replace(/^https?:\/\//, '') @@ -53,7 +57,7 @@ export class ChronicleWebSocketAdapter { wsUrl = `${protocol}//${window.location.host}/ws_pcm?token=${token}&device_name=${deviceName}` } - console.log('๐Ÿ”— Connecting to Chronicle WebSocket:', wsUrl) + console.log('๐Ÿ”— Connecting to Chronicle WebSocket:', wsUrl.replace(/token=[^&]+/, 'token=REDACTED')) this.ws = new WebSocket(wsUrl) diff --git a/ushadow/frontend/src/modules/dual-stream-audio/core/audioMixer.ts b/ushadow/frontend/src/modules/dual-stream-audio/core/audioMixer.ts index ec8d8281..d8461126 100644 --- a/ushadow/frontend/src/modules/dual-stream-audio/core/audioMixer.ts +++ b/ushadow/frontend/src/modules/dual-stream-audio/core/audioMixer.ts @@ -21,12 +21,14 @@ export class AudioStreamMixer { private streams: Map private merger: ChannelMergerNode | null private destination: MediaStreamAudioDestinationNode | null + private mixedAnalyser: AnalyserNode | null constructor(sampleRate: number = 16000) { this.audioContext = new AudioContext({ sampleRate }) this.streams = new Map() this.merger = null this.destination = null + this.mixedAnalyser = null } /** @@ -41,11 +43,15 @@ export class AudioStreamMixer { // Create merger node (supports up to 6 inputs by default) this.merger = this.audioContext.createChannelMerger(2) + // Create analyser for mixed output (for visualization) + this.mixedAnalyser = createAnalyser(this.audioContext, 256) + // Create destination for mixed output this.destination = this.audioContext.createMediaStreamDestination() - // Connect merger to destination - this.merger.connect(this.destination) + // Connect: merger โ†’ mixedAnalyser โ†’ destination + this.merger.connect(this.mixedAnalyser) + this.mixedAnalyser.connect(this.destination) } /** @@ -164,6 +170,13 @@ export class AudioStreamMixer { return null } + /** + * Get analyser for the mixed output (for visualization) + */ + getMixedAnalyser(): AnalyserNode | null { + return this.mixedAnalyser + } + /** * Get the mixed output stream */ @@ -222,15 +235,17 @@ export class AudioStreamMixer { this.removeStream(streamId) } - // Disconnect merger and destination + // Disconnect merger, analyser, and destination try { this.merger?.disconnect() + this.mixedAnalyser?.disconnect() this.destination?.disconnect() } catch (error) { console.warn('Error disconnecting mixer nodes:', error) } this.merger = null + this.mixedAnalyser = null this.destination = null // Close audio context diff --git a/ushadow/frontend/src/modules/dual-stream-audio/core/types.ts b/ushadow/frontend/src/modules/dual-stream-audio/core/types.ts index 2e55ca8f..a35e22a8 100644 --- a/ushadow/frontend/src/modules/dual-stream-audio/core/types.ts +++ b/ushadow/frontend/src/modules/dual-stream-audio/core/types.ts @@ -162,11 +162,11 @@ export interface DualStreamRecordingHook { activeStreams: StreamInfo[] // Controls - startRecording: (mode: RecordingMode) => Promise + startRecording: (mode: RecordingMode, preCapturedDisplayStream?: MediaStream) => Promise stopRecording: () => void setStreamGain: (streamId: string, gain: number) => void // Utilities formatDuration: (seconds: number) => string - getAnalyser: (streamType: StreamType) => AnalyserNode | null + getAnalyser: (streamType: StreamType | 'mixed') => AnalyserNode | null } diff --git a/ushadow/frontend/src/modules/dual-stream-audio/hooks/useDualStreamRecording.ts b/ushadow/frontend/src/modules/dual-stream-audio/hooks/useDualStreamRecording.ts index dba88e3e..12a3bc77 100644 --- a/ushadow/frontend/src/modules/dual-stream-audio/hooks/useDualStreamRecording.ts +++ b/ushadow/frontend/src/modules/dual-stream-audio/hooks/useDualStreamRecording.ts @@ -97,9 +97,14 @@ export function useDualStreamRecording( /** * Start recording + * + * @param recordingMode - The recording mode ('microphone-only' or 'dual-stream') + * @param preCaptur edDisplayStream - Optional pre-captured display stream (for dual-stream mode) + * This is important for browser security: getDisplayMedia() + * must be called from a user gesture, so we capture it early */ const startRecording = useCallback( - async (recordingMode: RecordingMode) => { + async (recordingMode: RecordingMode, preCapturedDisplayStream?: MediaStream) => { try { // Check browser compatibility const capabilities = getBrowserCapabilities() @@ -136,10 +141,29 @@ export function useDualStreamRecording( // Step 2: Capture display media (if dual-stream mode) let displayStream: MediaStream | null = null if (recordingMode === 'dual-stream') { - setState('requesting-display') - console.log('๐Ÿ–ฅ๏ธ Step 2: Capturing display media...') + if (preCapturedDisplayStream) { + // Use pre-captured stream (already requested in user gesture context) + console.log('๐Ÿ–ฅ๏ธ Step 2: Using pre-captured display stream') + displayStream = preCapturedDisplayStream + + // Log stream details + const audioTracks = displayStream.getAudioTracks() + const videoTracks = displayStream.getVideoTracks() + console.log('๐Ÿ“Š Pre-captured stream status:', { + audioTracks: audioTracks.length, + videoTracks: videoTracks.length, + audioEnabled: audioTracks[0]?.enabled, + audioMuted: audioTracks[0]?.muted, + audioReadyState: audioTracks[0]?.readyState, + audioLabel: audioTracks[0]?.label + }) + } else { + // Fallback: capture display media now (may fail if not in user gesture context) + setState('requesting-display') + console.log('๐Ÿ–ฅ๏ธ Step 2: Capturing display media...') + displayStream = await captureDisplayMedia(config.displayConstraints?.audio) + } - displayStream = await captureDisplayMedia(config.displayConstraints?.audio) displayStreamRef.current = displayStream // Monitor display stream for ended event @@ -160,18 +184,33 @@ export function useDualStreamRecording( // Add streams to mixer const micStreamId = mixer.addStream(micStream, 'microphone', 1.0) + console.log('โœ… Added microphone stream to mixer:', micStreamId) const streamIds: string[] = [micStreamId] const streamTypes: Array<'microphone' | 'display'> = ['microphone'] if (displayStream) { + console.log('โž• Adding display stream to mixer...') + const displayAudioTracks = displayStream.getAudioTracks() + console.log('๐Ÿ“Š Display stream before adding to mixer:', { + audioTracks: displayAudioTracks.length, + audioEnabled: displayAudioTracks[0]?.enabled, + audioMuted: displayAudioTracks[0]?.muted, + audioReadyState: displayAudioTracks[0]?.readyState + }) + const displayStreamId = mixer.addStream(displayStream, 'display', 1.0) + console.log('โœ… Added display stream to mixer:', displayStreamId) streamIds.push(displayStreamId) streamTypes.push('display') + } else { + console.warn('โš ๏ธ No display stream to add to mixer') } // Update active streams - setActiveStreams(mixer.getActiveStreams()) + const activeStreams = mixer.getActiveStreams() + console.log('๐Ÿ“Š Active streams in mixer:', activeStreams) + setActiveStreams(activeStreams) // Get mixed output stream const mixedStream = mixer.getMixedStream() @@ -283,11 +322,14 @@ export function useDualStreamRecording( }, []) /** - * Get analyser for a stream type + * Get analyser for a stream type (or 'mixed' for the mixed output) */ const getAnalyser = useCallback( - (streamType: 'microphone' | 'display'): AnalyserNode | null => { + (streamType: 'microphone' | 'display' | 'mixed'): AnalyserNode | null => { if (!mixerRef.current) return null + if (streamType === 'mixed') { + return mixerRef.current.getMixedAnalyser() + } return mixerRef.current.getAnalyserByType(streamType) }, [] diff --git a/ushadow/frontend/src/pages/ConversationDetailPage.tsx b/ushadow/frontend/src/pages/ConversationDetailPage.tsx new file mode 100644 index 00000000..ebb2e3de --- /dev/null +++ b/ushadow/frontend/src/pages/ConversationDetailPage.tsx @@ -0,0 +1,639 @@ +import { useParams, useNavigate, useSearchParams } from 'react-router-dom' +import { useRef, useState, useEffect } from 'react' +import { ArrowLeft, MessageSquare, Clock, Calendar, User, AlertCircle, Play, Pause, Brain, ExternalLink, Share2 } from 'lucide-react' +import { useConversationDetail } from '../hooks/useConversationDetail' +import type { ConversationSource } from '../hooks/useConversations' +import { useQuery } from '@tanstack/react-query' +import { api, unifiedMemoriesApi } from '../services/api' +import { getChronicleAudioUrl } from '../services/chronicleApi' +import ReactMarkdown from 'react-markdown' +import remarkGfm from 'remark-gfm' +import { MemoryCard } from '../components/memories/MemoryCard' +import ShareDialog from '../components/ShareDialog' +import { useShare } from '../hooks/useShare' + +export default function ConversationDetailPage() { + const { id } = useParams<{ id: string }>() + const [searchParams] = useSearchParams() + const navigate = useNavigate() + const source = (searchParams.get('source') || 'chronicle') as ConversationSource + + const { conversation, isLoading, error } = useConversationDetail(id!, source) + + // Share functionality + const shareProps = useShare({ + resourceType: 'conversation', + resourceId: id || '', + }) + + // Fetch memories for this conversation (unified API for both Chronicle and Mycelia) + const { data: memoriesData, isLoading: memoriesLoading } = useQuery({ + queryKey: ['conversation-memories', id, source], + queryFn: async () => { + if (id && (source === 'chronicle' || source === 'mycelia')) { + const response = await unifiedMemoriesApi.getConversationMemories(id, source) + return response.data + } + return null + }, + enabled: (source === 'chronicle' || source === 'mycelia') && !!id, + }) + + // Audio playback state + const [playingSegment, setPlayingSegment] = useState(null) + const [playingFullAudio, setPlayingFullAudio] = useState(false) + const audioRef = useRef(null) + const segmentTimerRef = useRef(null) + + // Log for debugging + console.log('[ConversationDetailPage] Source:', source) + console.log('[ConversationDetailPage] Conversation:', conversation) + console.log('[ConversationDetailPage] Segments:', conversation?.segments) + + // Cleanup audio on unmount + useEffect(() => { + return () => { + if (audioRef.current) { + audioRef.current.pause() + } + if (segmentTimerRef.current) { + window.clearTimeout(segmentTimerRef.current) + } + } + }, []) + + // Handle segment play/pause + const handleSegmentPlayPause = async (segmentIndex: number, segment: any) => { + const segmentId = `segment-${segmentIndex}` + + // If this segment is playing, pause it + if (playingSegment === segmentId) { + if (audioRef.current) { + audioRef.current.pause() + } + if (segmentTimerRef.current) { + window.clearTimeout(segmentTimerRef.current) + segmentTimerRef.current = null + } + setPlayingSegment(null) + return + } + + // Stop any currently playing segment + if (audioRef.current) { + audioRef.current.pause() + } + if (segmentTimerRef.current) { + window.clearTimeout(segmentTimerRef.current) + segmentTimerRef.current = null + } + + try { + // Create or reuse audio element + if (!audioRef.current) { + audioRef.current = new Audio() + audioRef.current.addEventListener('ended', () => setPlayingSegment(null)) + } + + if (source === 'chronicle') { + // Chronicle: Use URL directly for instant playback (token in query string) + const audioUrl = await getChronicleAudioUrl(id!, true) + audioRef.current.src = audioUrl + audioRef.current.currentTime = segment.start + } else { + // Mycelia: Fetch as blob to include auth headers + const myceliaBackendUrl = '/api/services/mycelia-backend/proxy' + const myceliaConv = conversation as any + + // Get conversation start time from timeRanges + const conversationStart = myceliaConv?.timeRanges?.[0]?.start + if (!conversationStart) { + console.error('[ConversationDetail] No conversation start time found') + return + } + + // Calculate absolute timestamps for the segment + const convStartTime = new Date(conversationStart).getTime() + const segmentStartTime = convStartTime + (segment.start * 1000) + const segmentEndTime = convStartTime + (segment.end * 1000) + + // Convert to Unix timestamps (seconds) + // Use floor for start, ceil for end to avoid cutting off audio + const startUnix = Math.floor(segmentStartTime / 1000) + const endUnix = Math.ceil(segmentEndTime / 1000) + + const audioUrl = `${myceliaBackendUrl}/api/audio/stream?start=${startUnix}&end=${endUnix}` + + // Fetch with auth headers via axios + const response = await api.get(audioUrl, { responseType: 'blob' }) + const audioBlob = response.data + const objectUrl = URL.createObjectURL(audioBlob) + + // Clean up old object URL + if (audioRef.current.src.startsWith('blob:')) { + URL.revokeObjectURL(audioRef.current.src) + } + + audioRef.current.src = objectUrl + } + + await audioRef.current.play() + setPlayingSegment(segmentId) + + // Set timer to stop at segment end (only needed for Chronicle) + // For Mycelia, we fetch exact chunks so the 'ended' event handles it + if (source === 'chronicle') { + const duration = (segment.end - segment.start) * 1000 + segmentTimerRef.current = window.setTimeout(() => { + if (audioRef.current) { + audioRef.current.pause() + } + setPlayingSegment(null) + segmentTimerRef.current = null + }, duration) + } + } catch (err) { + console.error('[ConversationDetail] Error playing audio segment:', err) + setPlayingSegment(null) + } + } + + // Handle full conversation audio play/pause + const handleFullAudioPlayPause = async () => { + // If full audio is playing, pause it + if (playingFullAudio) { + if (audioRef.current) { + audioRef.current.pause() + } + setPlayingFullAudio(false) + return + } + + // Stop any segment that's playing + if (playingSegment) { + if (audioRef.current) { + audioRef.current.pause() + } + if (segmentTimerRef.current) { + window.clearTimeout(segmentTimerRef.current) + segmentTimerRef.current = null + } + setPlayingSegment(null) + } + + try { + // Create or reuse audio element + if (!audioRef.current) { + audioRef.current = new Audio() + audioRef.current.addEventListener('ended', () => setPlayingFullAudio(false)) + } + + if (source === 'chronicle') { + // Chronicle: Use URL directly for instant playback (token in query string) + const audioUrl = await getChronicleAudioUrl(id!, true) + audioRef.current.src = audioUrl + } else { + // Mycelia: Fetch as blob to include auth headers + const myceliaConv = conversation as any + const conversationStart = myceliaConv?.timeRanges?.[0]?.start + const conversationEnd = myceliaConv?.timeRanges?.[0]?.end + + if (!conversationStart || !conversationEnd) { + console.error('[ConversationDetail] No conversation time range found') + return + } + + // Convert to Unix timestamps (seconds) + // Use floor for start, ceil for end to avoid cutting off audio + const startUnix = Math.floor(new Date(conversationStart).getTime() / 1000) + const endUnix = Math.ceil(new Date(conversationEnd).getTime() / 1000) + + const myceliaBackendUrl = '/api/services/mycelia-backend/proxy' + const audioUrl = `${myceliaBackendUrl}/api/audio/stream?start=${startUnix}&end=${endUnix}` + + // Fetch with auth headers via axios + const response = await api.get(audioUrl, { responseType: 'blob' }) + const audioBlob = response.data + const objectUrl = URL.createObjectURL(audioBlob) + + // Clean up old object URL + if (audioRef.current.src.startsWith('blob:')) { + URL.revokeObjectURL(audioRef.current.src) + } + + audioRef.current.src = objectUrl + } + + audioRef.current.currentTime = 0 + await audioRef.current.play() + setPlayingFullAudio(true) + } catch (err) { + console.error('[ConversationDetail] Error playing full audio:', err) + setPlayingFullAudio(false) + } + } + + if (isLoading) { + return ( +
+
+
+

Loading conversation...

+
+
+ ) + } + + if (error || !conversation) { + return ( +
+ + +
+
+ +
+

+ Failed to load conversation +

+

+ {error ? String(error) : 'Conversation not found'} +

+
+
+
+
+ ) + } + + // Mycelia stores data differently than Chronicle + const myceliaConv = conversation as any + + // Extract title (mycelia uses 'name', chronicle uses 'title') + const title = conversation.title || myceliaConv?.name || 'Untitled Conversation' + + // Extract summary (mycelia uses summaries array, chronicle uses summary string) + const summary = conversation.summary || + (myceliaConv?.summaries && myceliaConv.summaries.length > 0 + ? myceliaConv.summaries[0].text + : null) + + // Extract detailed summary (mycelia uses 'details', chronicle uses 'detailed_summary') + const detailedSummary = conversation.detailed_summary || myceliaConv?.details + + // Check if segments exist (match Chronicle page logic) + const hasValidSegments = conversation.segments && conversation.segments.length > 0 + + // Extract start/end times + const startTime = myceliaConv?.timeRanges?.[0]?.start || conversation.created_at + const endTime = myceliaConv?.timeRanges?.[0]?.end || conversation.completed_at + + // Format duration + const formatDuration = (seconds?: number) => { + if (!seconds) { + // Mycelia stores timeRanges instead of duration_seconds + if (myceliaConv?.timeRanges && myceliaConv.timeRanges.length > 0) { + const range = myceliaConv.timeRanges[0] + if (range.start && range.end) { + const start = new Date(range.start).getTime() + const end = new Date(range.end).getTime() + const durationMs = end - start + const durationSec = Math.floor(durationMs / 1000) + const mins = Math.floor(durationSec / 60) + const secs = durationSec % 60 + return `${mins}m ${secs}s` + } + } + return 'Unknown' + } + const mins = Math.floor(seconds / 60) + const secs = Math.floor(seconds % 60) + return `${mins}m ${secs}s` + } + + // Format date + const formatDate = (dateString?: string) => { + if (!dateString) { + // Mycelia uses createdAt + if (myceliaConv?.createdAt) { + dateString = myceliaConv.createdAt + } else { + return 'Unknown' + } + } + try { + return new Date(dateString).toLocaleString() + } catch { + return dateString + } + } + + const sourceColor = source === 'chronicle' ? 'blue' : 'purple' + const sourceLabel = source === 'chronicle' ? 'Chronicle' : 'Mycelia' + + return ( +
+ {/* Header with back button */} +
+ + + {/* Source badge */} + + {sourceLabel} + +
+ + {/* Conversation metadata */} +
+
+ +
+

+ {title} +

+ {summary && ( +
+ + {summary} + +
+ )} + {detailedSummary && detailedSummary !== summary && ( +
+ + {detailedSummary} + +
+ )} +
+
+ + {/* Play Full Audio Button and Share Button */} +
+ + + +
+ + {/* Share Dialog */} + + + + {/* Metadata grid */} +
+ {/* Start time */} + {startTime && ( +
+ +
+

+ {source === 'mycelia' ? 'Started' : 'Created'} +

+

+ {new Date(startTime).toLocaleString()} +

+
+
+ )} + + {/* End time */} + {endTime && ( +
+ +
+

+ {source === 'mycelia' ? 'Ended' : 'Completed'} +

+

+ {new Date(endTime).toLocaleString()} +

+
+
+ )} + +
+ +
+

Duration

+

+ {formatDuration(conversation.duration_seconds)} +

+
+
+ +
+ +
+

Segments

+

+ {hasValidSegments ? (conversation.segments?.length || 0) : 0} +

+
+
+ +
+ +
+

Memories

+

+ {conversation.memory_count || 0} +

+
+
+
+
+ + {/* Memories Section (Chronicle only) */} + {source === 'chronicle' && ( +
+
+
+ +

+ Memories +

+ {memoriesData && ( + + {memoriesData.count} + + )} +
+ {memoriesData && memoriesData.count > 0 && ( + + )} +
+ + {memoriesLoading ? ( +
+
+
+ ) : memoriesData && memoriesData.memories && memoriesData.memories.length > 0 ? ( +
+ {memoriesData.memories.map((memory, idx: number) => ( + navigate(`/memories/${memory.id}`)} + showSource={true} + testId={`memory-item-${idx}`} + /> + ))} +
+ ) : ( +
+ +

No memories extracted from this conversation

+
+ )} +
+ )} + + {/* Transcript */} + {hasValidSegments || conversation.transcript ? ( +
+

+ Transcript +

+ + {/* Segmented transcript (only if segments have actual text) */} + {hasValidSegments ? ( +
+ {conversation.segments.map((segment, idx) => { + const segmentId = `segment-${idx}` + const isPlaying = playingSegment === segmentId + + return ( +
+
+
+ + {segment.speaker?.charAt(0)?.toUpperCase() || '?'} + +
+
+
+
+
+ + {segment.speaker || 'Unknown'} + + + {Math.floor(segment.start)}s - {Math.floor(segment.end)}s + +
+ +
+
+ + {segment.text} + +
+
+
+ ) + })} +
+ ) : ( + /* Plain transcript */ +
+ + {conversation.transcript} + +
+ )} +
+ ) : ( +
+ +

No transcript available

+
+ )} +
+ ) +} diff --git a/ushadow/frontend/src/pages/ConversationsPage.tsx b/ushadow/frontend/src/pages/ConversationsPage.tsx new file mode 100644 index 00000000..0fc67f38 --- /dev/null +++ b/ushadow/frontend/src/pages/ConversationsPage.tsx @@ -0,0 +1,191 @@ +import { useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { MessageSquare, RefreshCw, AlertCircle } from 'lucide-react' +import { useMultiSourceConversations, type ConversationSource } from '../hooks/useConversations' +import ConversationCard from '../components/conversations/ConversationCard' + +// Available conversation sources +const SOURCES: Array<{ id: ConversationSource; label: string; color: string }> = [ + { id: 'chronicle', label: 'Chronicle', color: 'blue' }, + { id: 'mycelia', label: 'Mycelia', color: 'purple' }, +] + +export default function ConversationsPage() { + const navigate = useNavigate() + const [selectedSources, setSelectedSources] = useState(['chronicle', 'mycelia']) + + const { chronicle, mycelia, anyLoading, allLoaded } = useMultiSourceConversations(selectedSources) + + // Toggle source selection + const toggleSource = (sourceId: ConversationSource) => { + setSelectedSources((prev) => + prev.includes(sourceId) ? prev.filter((s) => s !== sourceId) : [...prev, sourceId] + ) + } + + // Refresh all enabled sources + const handleRefresh = () => { + if (selectedSources.includes('chronicle')) chronicle.refetch() + if (selectedSources.includes('mycelia')) mycelia.refetch() + } + + return ( +
+ {/* Header */} +
+
+
+ +

Conversations

+
+

+ View conversations from multiple sources +

+
+ + +
+ + {/* Source selector */} +
+ +
+ {SOURCES.map((source) => { + const isSelected = selectedSources.includes(source.id) + const baseColor = source.color === 'blue' ? 'blue' : 'purple' + + return ( + + ) + })} +
+ + {selectedSources.length === 0 && ( +

+ + Select at least one source to view conversations +

+ )} +
+ + {/* Conversations columns */} + {selectedSources.length > 0 && ( +
+ {/* Chronicle column */} + {selectedSources.includes('chronicle') && ( +
+
+

+ Chronicle + {chronicle.isLoading && ( +
+ )} +

+ + {chronicle.data.length} conversations + +
+ + {chronicle.error && ( +
+

+ Failed to load Chronicle conversations. Service may be unavailable. +

+
+ )} + + {!chronicle.isLoading && !chronicle.error && chronicle.data.length === 0 && ( +
+ +

No conversations found

+
+ )} + +
+ {chronicle.data.map((conv) => ( + navigate(`/conversations/${conv.conversation_id || conv.audio_uuid}?source=chronicle`)} + /> + ))} +
+
+ )} + + {/* Mycelia column */} + {selectedSources.includes('mycelia') && ( +
+
+

+ Mycelia + {mycelia.isLoading && ( +
+ )} +

+ + {mycelia.data.length} conversations + +
+ + {mycelia.error && ( +
+

+ Failed to load Mycelia conversations. Service may be unavailable. +

+
+ )} + + {!mycelia.isLoading && !mycelia.error && mycelia.data.length === 0 && ( +
+ +

No conversations found

+
+ )} + +
+ {mycelia.data.map((conv) => ( + navigate(`/conversations/${conv.conversation_id || conv.audio_uuid}?source=mycelia`)} + /> + ))} +
+
+ )} +
+ )} +
+ ) +} diff --git a/ushadow/frontend/src/pages/Dashboard.tsx b/ushadow/frontend/src/pages/Dashboard.tsx index dae09734..89c7e9c5 100644 --- a/ushadow/frontend/src/pages/Dashboard.tsx +++ b/ushadow/frontend/src/pages/Dashboard.tsx @@ -1,51 +1,51 @@ -import { Activity, MessageSquare, Plug, Bot, Workflow, TrendingUp, Sparkles } from 'lucide-react' +import { Activity, MessageSquare, Clock, TrendingUp, Sparkles, Brain } from 'lucide-react' +import { useNavigate } from 'react-router-dom' import { useTheme } from '../contexts/ThemeContext' -import { useFeatureFlags } from '../contexts/FeatureFlagsContext' +import { useDashboardData } from '../hooks/useDashboardData' +import { ActivityType } from '../services/api' export default function Dashboard() { const { isDark } = useTheme() - const { isEnabled } = useFeatureFlags() - - // Define all stats with optional feature flag requirements - const allStats = [ - { - label: 'Conversations', - value: '0', - icon: MessageSquare, - accentColor: '#4ade80', // primary-400 - glowColor: 'rgba(74, 222, 128, 0.15)' - }, - { - label: 'MCP Servers', - value: '0', - icon: Plug, - accentColor: '#22c55e', // primary-500 - glowColor: 'rgba(34, 197, 94, 0.15)', - featureFlag: 'mcp_hub' - }, - { - label: 'Active Agents', - value: '0', - icon: Bot, - accentColor: '#c084fc', // accent-400 - glowColor: 'rgba(192, 132, 252, 0.15)', - featureFlag: 'agent_zero' - }, - { - label: 'n8n Workflows', - value: '0', - icon: Workflow, - accentColor: '#a855f7', // accent-500 - glowColor: 'rgba(168, 85, 247, 0.15)', - featureFlag: 'n8n_workflows' - }, - ] - - // Filter stats based on feature flags - const stats = allStats.filter(stat => { - if (!stat.featureFlag) return true - return isEnabled(stat.featureFlag) - }) + const navigate = useNavigate() + const { data, isLoading, error } = useDashboardData(10, 10) + + // Format timestamp as "2m ago", "Yesterday", etc. + const formatTimestamp = (timestamp: string) => { + const date = new Date(timestamp) + const now = new Date() + const diffMs = now.getTime() - date.getTime() + const diffMins = Math.floor(diffMs / 60000) + + if (diffMins < 1) return 'Just now' + if (diffMins < 60) return `${diffMins}m ago` + + const diffHours = Math.floor(diffMins / 60) + if (diffHours < 24) return `${diffHours}h ago` + + const diffDays = Math.floor(diffHours / 24) + if (diffDays === 1) return 'Yesterday' + if (diffDays < 7) return `${diffDays}d ago` + + return date.toLocaleDateString() + } + + // Get icon and color for activity type + const getActivityStyle = (type: ActivityType) => { + switch (type) { + case ActivityType.CONVERSATION: + return { icon: MessageSquare, color: '#4ade80' } + case ActivityType.MEMORY: + return { icon: Brain, color: '#22c55e' } + default: + return { icon: Activity, color: '#71717a' } + } + } + + // Combine and sort all activities by timestamp + const allActivities = [ + ...(data?.recent_conversations || []), + ...(data?.recent_memories || []), + ].sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()) return (
@@ -66,42 +66,76 @@ export default function Dashboard() {
{/* Stats Grid */} -
- {stats.map((stat) => ( -
-
-
-

- {stat.label} -

-

- {stat.value} -

-
- +
+ {/* Conversations Stat */} +
navigate('/conversations')} + style={{ + backgroundColor: isDark ? 'var(--surface-800)' : '#ffffff', + border: `1px solid ${isDark ? 'var(--surface-500)' : '#e4e4e7'}`, + boxShadow: isDark + ? '0 4px 20px rgba(74, 222, 128, 0.15), 0 4px 6px rgba(0, 0, 0, 0.4)' + : '0 4px 6px rgba(0, 0, 0, 0.1)', + }} + > +
+
+

+ Conversations +

+

+ {isLoading ? '...' : data?.stats.conversation_count || '0'} +

+
- ))} +
+ + {/* Memories Stat */} +
navigate('/memories')} + style={{ + backgroundColor: isDark ? 'var(--surface-800)' : '#ffffff', + border: `1px solid ${isDark ? 'var(--surface-500)' : '#e4e4e7'}`, + boxShadow: isDark + ? '0 4px 20px rgba(34, 197, 94, 0.15), 0 4px 6px rgba(0, 0, 0, 0.4)' + : '0 4px 6px rgba(0, 0, 0, 0.1)', + }} + > +
+
+

+ Memories +

+

+ {isLoading ? '...' : data?.stats.memory_count || '0'} +

+
+ +
+
{/* Activity Feed */} @@ -128,16 +162,100 @@ export default function Dashboard() { Recent Activity
-
- -

- No activity yet. Start by configuring your services in Services -

- Not working yet -
+ + {isLoading ? ( +
+
+

+ Loading activities... +

+
+ ) : error ? ( +
+ +

+ Failed to load activities. Please try again. +

+
+ ) : !allActivities.length ? ( +
+ +

+ No activity yet. Start a conversation or create memories to see activity here. +

+
+ ) : ( +
+ {allActivities.map((activity) => { + const style = getActivityStyle(activity.type) + const Icon = style.icon + + return ( +
+
+ +
+ +
+

+ {activity.title} +

+ {activity.description && ( +

+ {activity.description} +

+ )} +
+ + + {formatTimestamp(activity.timestamp)} + + {activity.source && ( + <> + โ€ข + + {activity.source} + + + )} +
+
+
+ ) + })} +
+ )}
{/* Quick Actions */} @@ -164,9 +282,10 @@ export default function Dashboard() { Quick Actions
-
+
+ - {isEnabled('mcp_hub') && ( - - )} - {isEnabled('n8n_workflows') && ( - - )}
diff --git a/ushadow/frontend/src/pages/LoginPage.tsx b/ushadow/frontend/src/pages/LoginPage.tsx index 9937ae2a..facff6e9 100644 --- a/ushadow/frontend/src/pages/LoginPage.tsx +++ b/ushadow/frontend/src/pages/LoginPage.tsx @@ -1,33 +1,37 @@ -import React, { useState, useEffect } from 'react' -import { useNavigate, Navigate, useLocation } from 'react-router-dom' -import { useAuth } from '../contexts/AuthContext' -import { Eye, EyeOff } from 'lucide-react' +import React from 'react' +import { useNavigate, useLocation } from 'react-router-dom' +import { useKeycloakAuth } from '../contexts/KeycloakAuthContext' import AuthHeader from '../components/auth/AuthHeader' +import { LogIn } from 'lucide-react' export default function LoginPage() { - const [email, setEmail] = useState('') - const [password, setPassword] = useState('') - const [showPassword, setShowPassword] = useState(false) - const [isLoading, setIsLoading] = useState(false) - const [error, setError] = useState('') const navigate = useNavigate() const location = useLocation() - - const { user, login, setupRequired, isLoading: authLoading } = useAuth() + const { isAuthenticated, isLoading, login, register } = useKeycloakAuth() // Get the intended destination from router state (set by ProtectedRoute) const from = (location.state as { from?: string })?.from || '/' // After successful login, redirect to intended destination - useEffect(() => { - if (user) { - console.log('Login successful, redirecting to:', from) + // Note: Don't redirect if we're on the callback page - that's handled by OAuthCallback component + React.useEffect(() => { + if (isAuthenticated && location.pathname !== '/oauth/callback') { navigate(from, { replace: true, state: { fromAuth: true } }) } - }, [user, navigate, from]) + }, [isAuthenticated, navigate, from, location.pathname]) + + const handleLogin = () => { + // Redirect to Keycloak login page + login(from) + } + + const handleRegister = () => { + // Redirect to Keycloak registration page + register(from) + } - // Show loading while checking setup status - if (setupRequired === null || authLoading) { + // Show loading while checking authentication + if (isLoading) { return (
- Checking setup status... + Checking authentication... ) } - // Redirect to registration if required - // IMPORTANT: This must be after all hooks to follow Rules of Hooks - if (setupRequired === true) { - return - } - - const handleSubmit = async (e: React.FormEvent) => { - e.preventDefault() - setIsLoading(true) - setError('') - - const result = await login(email, password) - if (!result.success) { - // Show specific error message based on error type - if (result.errorType === 'connection_failure') { - setError('Unable to connect to server. Please check your connection and try again.') - } else if (result.errorType === 'authentication_failure') { - setError('Invalid email or password') - } else { - setError(result.error || 'Login failed. Please try again.') - } - } - setIsLoading(false) - } - return (
-
- {/* Decorative background blur circles - brand green and purple */} - {/* Using fixed positioning so glows extend to viewport edges, not container edges */} -
-
-
-
+ {/* Geometric grid background pattern */} +
+ + {/* Diagonal cross pattern overlay */} +
-
- +
+
+ - {/* Login Form */} + {/* Login Card */}
-
-
- - setEmail(e.target.value)} - className="appearance-none block w-full px-4 py-3 rounded-lg transition-all sm:text-sm focus:outline-none focus:ring-1" - style={{ - backgroundColor: 'var(--surface-700)', - border: '1px solid var(--surface-400)', - color: 'var(--text-primary)', - }} - placeholder="your@email.com" - data-testid="login-email-input" - /> -
+
+

+ Welcome to Ushadow +

+

+ Secure authentication powered by Keycloak +

+
-
- -
- setPassword(e.target.value)} - className="appearance-none block w-full px-4 py-3 pr-12 rounded-lg transition-all sm:text-sm focus:outline-none focus:ring-1" - style={{ - backgroundColor: 'var(--surface-700)', - border: '1px solid var(--surface-400)', - color: 'var(--text-primary)', - }} - placeholder="Enter your password" - data-testid="login-password-input" - /> - -
-
+ {/* Sign in with Keycloak Button */} + - {error && ( -
-

{error}

-
- )} +
+

+ You'll be redirected to Keycloak for secure authentication +

+
+ + {/* Divider */} +
+
+
+
+
+ + Or + +
+
-
+
+

+ Don't have an account?{' '} -

- +

+
+
-

- Ushadow Dashboard v0.1.0 + {/* Info Card */} +

+

+ New to Ushadow? Your administrator will provide you with access credentials.

diff --git a/ushadow/frontend/src/pages/MemoryDetailPage.tsx b/ushadow/frontend/src/pages/MemoryDetailPage.tsx new file mode 100644 index 00000000..8896211d --- /dev/null +++ b/ushadow/frontend/src/pages/MemoryDetailPage.tsx @@ -0,0 +1,548 @@ +import { useParams, useNavigate } from 'react-router-dom' +import { ArrowLeft, Brain, Calendar, Tag, MessageSquare, Edit2, Trash2, AlertCircle, Database, Copy, Check, ExternalLink } from 'lucide-react' +import { useQuery } from '@tanstack/react-query' +import { useState } from 'react' +import { unifiedMemoriesApi, memoriesApi, type ConversationMemory } from '../services/api' +import { useConversationDetail } from '../hooks/useConversationDetail' +import ConfirmDialog from '../components/ConfirmDialog' + +interface ConversationLink { + conversation_id: string + title: string + created_at: string + source: 'chronicle' | 'mycelia' +} + +interface RelatedMemory { + id: string + memory: string + categories: string[] + created_at: number + state: string +} + +interface AccessLogEntry { + id: string + app_name: string + accessed_at: string +} + +export default function MemoryDetailPage() { + const { id } = useParams<{ id: string }>() + const navigate = useNavigate() + const [showDeleteDialog, setShowDeleteDialog] = useState(false) + const [copiedId, setCopiedId] = useState(false) + + // Fetch memory details using unified backend API + const { data: memory, isLoading, error } = useQuery({ + queryKey: ['memory', id], + queryFn: async () => { + if (!id) throw new Error('Memory ID is required') + const response = await unifiedMemoriesApi.getMemoryById(id) + return response.data + }, + enabled: !!id, + }) + + // Fetch related memories (only for openmemory source) + const { data: relatedMemories, isLoading: relatedLoading } = useQuery({ + queryKey: ['related-memories', id], + queryFn: async () => { + if (!id || !memory) return [] + // Extract user_id from metadata + const userId = memory.metadata?.user_id || memory.metadata?.chronicle_user_email || 'default' + try { + const memories = await memoriesApi.getRelatedMemories(userId, id) + return memories + } catch (err) { + console.error('Failed to fetch related memories:', err) + return [] + } + }, + enabled: !!id && !!memory && memory.source === 'openmemory', + }) + + // Fetch access logs (only for openmemory source) + const { data: accessLogs, isLoading: logsLoading } = useQuery({ + queryKey: ['memory-access-logs', id], + queryFn: async () => { + if (!id) return [] + try { + const result = await memoriesApi.getAccessLogs(id, 1, 10) + return result.logs + } catch (err) { + console.error('Failed to fetch access logs:', err) + return [] + } + }, + enabled: !!id && memory?.source === 'openmemory', + }) + + // Derive conversation link from metadata + const conversationId = memory?.metadata?.source_id + const conversationSource = memory?.metadata ? ( + memory.metadata.conversation_source || + (memory.metadata.app_name?.toLowerCase().includes('mycelia') ? 'mycelia' : 'chronicle') + ) as 'chronicle' | 'mycelia' : 'chronicle' + + // Fetch full conversation details to get title and summary + const { conversation, isLoading: conversationLoading } = useConversationDetail( + conversationId || '', + conversationSource, + { enabled: !!conversationId } + ) + + const handleDelete = async () => { + if (!id || !memory) return + + try { + // For now, show message that delete needs implementation + alert('Memory deletion is not yet implemented for unified memories API') + setShowDeleteDialog(false) + } catch (err) { + console.error('Failed to delete memory:', err) + alert('Failed to delete memory') + } + } + + const handleCopyId = async () => { + if (id) { + await navigator.clipboard.writeText(id) + setCopiedId(true) + setTimeout(() => setCopiedId(false), 2000) + } + } + + const formatDate = (dateString: string) => { + const timestamp = dateString.includes('T') || dateString.includes('-') + ? new Date(dateString).getTime() + : parseInt(dateString) * 1000 + return new Date(timestamp).toLocaleString() + } + + const formatAccessDate = (dateString: string) => { + return new Date(dateString + 'Z').toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: 'numeric', + minute: 'numeric', + }) + } + + // Extract categories from metadata + // Debug: log memory object to see structure + if (memory) { + console.log('[MemoryDetailPage] Memory object:', memory) + console.log('[MemoryDetailPage] Metadata:', memory.metadata) + console.log('[MemoryDetailPage] Categories from metadata:', memory.metadata?.categories) + } + const categories = memory?.metadata?.categories || [] + console.log('[MemoryDetailPage] Final categories:', categories) + + // Source badge colors + const sourceColors = { + openmemory: 'bg-purple-100 dark:bg-purple-900/30 text-purple-700 dark:text-purple-300', + chronicle: 'bg-blue-100 dark:bg-blue-900/30 text-blue-700 dark:text-blue-300', + mycelia: 'bg-green-100 dark:bg-green-900/30 text-green-700 dark:text-green-300', + } + + // Category colors + const categoryColors: Record = { + personal: 'bg-purple-100 dark:bg-purple-900/30 text-purple-700 dark:text-purple-300', + work: 'bg-blue-100 dark:bg-blue-900/30 text-blue-700 dark:text-blue-300', + health: 'bg-green-100 dark:bg-green-900/30 text-green-700 dark:text-green-300', + finance: 'bg-yellow-100 dark:bg-yellow-900/30 text-yellow-700 dark:text-yellow-300', + travel: 'bg-orange-100 dark:bg-orange-900/30 text-orange-700 dark:text-orange-300', + education: 'bg-cyan-100 dark:bg-cyan-900/30 text-cyan-700 dark:text-cyan-300', + preferences: 'bg-pink-100 dark:bg-pink-900/30 text-pink-700 dark:text-pink-300', + relationships: 'bg-red-100 dark:bg-red-900/30 text-red-700 dark:text-red-300', + } + + if (isLoading) { + return ( +
+
+
+

Loading memory...

+
+
+ ) + } + + if (error || !memory) { + return ( +
+ + +
+
+ +
+

+ Failed to load memory +

+

+ {error ? String(error) : 'Memory not found'} +

+
+
+
+
+ ) + } + + return ( +
+ {/* Back button */} + + + {/* Main layout: 2/3 content + 1/3 sidebar */} +
+ {/* Main content (2/3) */} +
+ {/* Memory card */} +
+ {/* Header */} +
+
+ +

+ Memory + + #{id?.slice(0, 6)} + +

+ +
+
+ + +
+
+ + {/* Content */} +
+ {/* Memory text with accent border */} +
+

+ {memory.content} +

+
+ + {/* Categories and metadata row */} +
+ {/* Categories */} +
+ {categories.length > 0 && ( + <> + + {categories.map((category: string, idx: number) => ( + + {category} + + ))} + + )} +
+ + {/* Created by */} + {memory.metadata?.app_name && ( +
+ Created by: + + + {memory.metadata.app_name} + +
+ )} +
+ + {/* Metadata section */} + {memory.metadata && Object.keys(memory.metadata).length > 0 && ( +
+

+ Metadata +

+
+
+                      {JSON.stringify(memory.metadata, null, 2)}
+                    
+
+
+ )} + + {/* Additional info */} +
+
+ +
+

Created

+

+ {formatDate(memory.created_at)} +

+
+
+ +
+ +
+

Source

+ + {memory.source} + +
+
+ + {memory.score !== null && memory.score !== undefined && ( +
+ +
+

Relevance

+

+ {(memory.score * 100).toFixed(1)}% +

+
+
+ )} +
+
+
+ + {/* Linked Conversations */} + {conversationId && ( +
+
+ +

+ Linked Conversation +

+
+ + {conversationLoading ? ( +
+

Loading conversation...

+
+ ) : conversation ? ( +
navigate(`/conversations/${conversationId}?source=${conversationSource}`)} + data-testid="conversation-link-0" + > +
+
+ {/* Tags above title */} +
+ {categories.length > 0 && ( + <> + {categories.slice(0, 3).map((category: string, idx: number) => ( + + {category} + + ))} + {categories.length > 3 && ( + + +{categories.length - 3} + + )} + + )} + + {conversationSource === 'chronicle' ? 'Chronicle' : 'Mycelia'} + +
+ + {/* Title */} +

+ {conversation.title || 'Untitled Conversation'} + +

+ + {/* Summary */} + {conversation.summary && ( +

+ {conversation.summary} +

+ )} + + {/* Date */} +

+ {formatDate(conversation.created_at || memory.created_at)} +

+
+
+
+ ) : ( +
+

+ Conversation details not available +

+
+ )} +
+ )} +
+ + {/* Sidebar (1/3) */} +
+ {/* Access Log */} +
+
+

Access Log

+
+
+ {logsLoading ? ( +

+ Loading access logs... +

+ ) : accessLogs && accessLogs.length > 0 ? ( +
+ {accessLogs.map((entry: AccessLogEntry, index: number) => ( +
+
+ +
+ {index < accessLogs.length - 1 && ( +
+ )} +
+ + {entry.app_name} + + + {formatAccessDate(entry.accessed_at)} + +
+
+ ))} +
+ ) : ( +

+ No access logs available +

+ )} +
+
+ + {/* Related Memories */} +
+
+

Related Memories

+
+
+ {relatedLoading ? ( +

+ Loading related memories... +

+ ) : relatedMemories && relatedMemories.length > 0 ? ( +
+ {relatedMemories.map((relMem: RelatedMemory) => ( +
navigate(`/memories/${relMem.id}`)} + data-testid={`related-memory-${relMem.id}`} + > +

+ {relMem.memory} +

+
+ {relMem.categories.slice(0, 2).map((cat, idx) => ( + + {cat} + + ))} + {relMem.state !== 'active' && ( + + {relMem.state} + + )} +
+
+ ))} +
+ ) : ( +

+ No related memories found +

+ )} +
+
+
+
+ + {/* Delete Confirmation Dialog */} + setShowDeleteDialog(false)} + onConfirm={handleDelete} + title="Delete Memory?" + message="Are you sure you want to delete this memory? This action cannot be undone." + confirmLabel="Delete" + variant="danger" + /> +
+ ) +} diff --git a/ushadow/frontend/src/pages/ServiceConfigsPage.tsx b/ushadow/frontend/src/pages/ServiceConfigsPage.tsx index fd770196..e114132a 100644 --- a/ushadow/frontend/src/pages/ServiceConfigsPage.tsx +++ b/ushadow/frontend/src/pages/ServiceConfigsPage.tsx @@ -431,6 +431,7 @@ export default function ServiceConfigsPage() { // Try to find instance first, otherwise treat as template ID const consumerInstance = instances.find(inst => inst.id === consumerId) const templateId = consumerInstance?.template_id || consumerId + console.log('[DEBUG handleDeployConsumer]', { consumerId, consumerInstance: consumerInstance?.id, templateId, configId: target.configId }) // Load ALL available targets (both Docker and K8s) for unified selection setLoadingTargets(true) @@ -1396,6 +1397,7 @@ export default function ServiceConfigsPage() { providerTemplates={providerTemplates} serviceStatuses={serviceStatuses} deployments={filteredDeployments} + splitServicesEnabled={isEnabled('split_services')} onAddConfig={showServiceConfigs ? handleAddConfig : () => {}} onWiringChange={handleWiringChange} onWiringClear={handleWiringClear} diff --git a/ushadow/frontend/src/pages/ServicesPage.tsx b/ushadow/frontend/src/pages/ServicesPage.tsx index d01bd5c1..052956ab 100644 --- a/ushadow/frontend/src/pages/ServicesPage.tsx +++ b/ushadow/frontend/src/pages/ServicesPage.tsx @@ -693,6 +693,9 @@ export default function ServicesPage() {

Services

+ + LEGACY +

Configure providers and compose services @@ -730,6 +733,29 @@ export default function ServicesPage() {

+ {/* Legacy Notice Banner */} +
+
+ +
+

+ Legacy Services Page +

+

+ This is the legacy service management interface. For advanced features like service wiring, + custom configurations, and deployment management, please use the{' '} + + . +

+
+
+
+ {/* Stats */}
diff --git a/ushadow/frontend/src/services/api.ts b/ushadow/frontend/src/services/api.ts index 41af818b..bc00aab5 100644 --- a/ushadow/frontend/src/services/api.ts +++ b/ushadow/frontend/src/services/api.ts @@ -62,7 +62,15 @@ export const api = axios.create({ // Add request interceptor to include auth token api.interceptors.request.use((config) => { - const token = localStorage.getItem(getStorageKey('token')) + // Check for Keycloak token first (in sessionStorage) + const kcToken = sessionStorage.getItem('kc_access_token') + + // Fallback to legacy JWT token (in localStorage) + const legacyToken = localStorage.getItem(getStorageKey('token')) + + // Prefer Keycloak token if both are present + const token = kcToken || legacyToken + if (token) { config.headers.Authorization = `Bearer ${token}` } @@ -126,6 +134,32 @@ export const chronicleApi = { getConversation: (id: string) => api.get(`/api/chronicle/conversations/${id}`), } +// Mycelia integration endpoints +// Mycelia service name constant - ensures consistency +const MYCELIA_SERVICE = 'mycelia-backend' + +export const myceliaApi = { + // Connection info for service discovery + getConnectionInfo: () => api.get(`/api/services/${MYCELIA_SERVICE}/connection-info`), + + getStatus: () => api.get(`/api/services/${MYCELIA_SERVICE}/proxy/health`), + + // Conversations + getConversations: (params?: { limit?: number; skip?: number; start?: string; end?: string }) => + api.get(`/api/services/${MYCELIA_SERVICE}/proxy/data/conversations`, { params }), + getConversation: (id: string) => + api.get(`/api/services/${MYCELIA_SERVICE}/proxy/data/conversations/${id}`), + getConversationStats: () => api.get(`/api/services/${MYCELIA_SERVICE}/proxy/data/conversations/stats`), + + // Audio Timeline Data + getAudioItems: (params: { start: string; end: string; resolution?: string }) => + api.get(`/api/services/${MYCELIA_SERVICE}/proxy/data/audio/items`, { params }), + + // Generic Resource Access (for MCP-style resources) + callResource: (resourceName: string, body: any) => + api.post(`/api/services/${MYCELIA_SERVICE}/proxy/api/resource/${resourceName}`, body), +} + // MCP integration endpoints export const mcpApi = { getStatus: () => api.get('/api/mcp/status'), @@ -558,6 +592,12 @@ export interface KubernetesCluster { labels: Record infra_scans?: Record deployment_target_id?: string // Unified deployment target ID: {name}.k8s.{environment} + + // Ingress configuration + ingress_domain?: string + ingress_class?: string + ingress_enabled_by_default?: boolean + tailscale_magicdns_enabled?: boolean } export const kubernetesApi = { @@ -569,6 +609,8 @@ export const kubernetesApi = { api.get(`/api/kubernetes/${clusterId}`), removeCluster: (clusterId: string) => api.delete(`/api/kubernetes/${clusterId}`), + updateCluster: (clusterId: string, updates: Partial>) => + api.patch(`/api/kubernetes/${clusterId}`, updates), // Service management getAvailableServices: () => @@ -1821,6 +1863,39 @@ export const audioApi = { api.get('/api/providers/audio_consumer/available'), } +// ============================================================================= +// Unified Memories API - Cross-source memory retrieval +// ============================================================================= + +export interface ConversationMemory { + id: string + content: string + created_at: string + metadata: Record + source: 'openmemory' | 'mycelia' | 'chronicle' + score?: number +} + +export interface ConversationMemoriesResponse { + conversation_id: string + conversation_source: 'chronicle' | 'mycelia' + memories: ConversationMemory[] + count: number + sources_queried: string[] +} + +export const unifiedMemoriesApi = { + /** Get all memories for a conversation across all sources (OpenMemory + native backend) */ + getConversationMemories: (conversationId: string, source: 'chronicle' | 'mycelia') => + api.get(`/api/memories/by-conversation/${conversationId}`, { + params: { conversation_source: source } + }), + + /** Get a single memory by ID from any memory source */ + getMemoryById: (memoryId: string) => + api.get(`/api/memories/${memoryId}`), +} + export const githubImportApi = { /** Scan a GitHub repository for docker-compose files */ scan: (github_url: string, branch?: string, compose_path?: string) => @@ -1890,3 +1965,45 @@ export const githubImportApi = { compose_path }), } + +// ============================================================================= +// Dashboard API - Chronicle activity monitoring +// ============================================================================= + +export enum ActivityType { + CONVERSATION = 'conversation', + MEMORY = 'memory', +} + +export interface ActivityEvent { + id: string + type: ActivityType + title: string + description?: string + timestamp: string + metadata: Record + source?: string +} + +export interface DashboardStats { + conversation_count: number + memory_count: number +} + +export interface DashboardData { + stats: DashboardStats + recent_conversations: ActivityEvent[] + recent_memories: ActivityEvent[] + last_updated: string +} + +export const dashboardApi = { + /** Get complete dashboard data (stats + recent conversations & memories) */ + getDashboardData: (conversationLimit?: number, memoryLimit?: number) => + api.get('/api/dashboard/', { + params: { + conversation_limit: conversationLimit, + memory_limit: memoryLimit + }, + }), +} diff --git a/ushadow/frontend/src/services/chronicleApi.ts b/ushadow/frontend/src/services/chronicleApi.ts index a7678b11..403ae7ea 100644 --- a/ushadow/frontend/src/services/chronicleApi.ts +++ b/ushadow/frontend/src/services/chronicleApi.ts @@ -413,6 +413,15 @@ export async function getChronicleAudioUrl(conversationId: string, cropped: bool return url } +/** + * Get memories associated with a conversation + */ +export async function getConversationMemories(conversationId: string) { + const proxyUrl = await getChronicleProxyUrl() + const response = await api.get(`${proxyUrl}/api/conversations/${conversationId}/memories`) + return response.data +} + // ============================================================================= // Legacy compatibility exports // ============================================================================= diff --git a/ushadow/frontend/src/wizards/MyceliaWizard.tsx b/ushadow/frontend/src/wizards/MyceliaWizard.tsx index 90d0c1ab..22549206 100644 --- a/ushadow/frontend/src/wizards/MyceliaWizard.tsx +++ b/ushadow/frontend/src/wizards/MyceliaWizard.tsx @@ -188,8 +188,8 @@ export default function MyceliaWizard() { wizard.next() } } else if (wizard.currentStep.id === 'complete') { - // Navigate to services page - navigate('/services') + // Navigate to service configs page + navigate('/instances') } } @@ -367,15 +367,15 @@ function CompleteStep({ tokenData }: CompleteStepProps) {
  • - Access the web UI at https://localhost:14433 + Connect Apple Voice Memos, Google Drive, or local audio files
  • - Connect Apple Voice Memos, Google Drive, or local audio files + Search your voice notes and conversations
  • - Search your voice notes and conversations + View and manage the service on the Instances page
diff --git a/ushadow/frontend/src/wizards/QuickstartWizard.tsx b/ushadow/frontend/src/wizards/QuickstartWizard.tsx index 0e297358..0d18d0a1 100644 --- a/ushadow/frontend/src/wizards/QuickstartWizard.tsx +++ b/ushadow/frontend/src/wizards/QuickstartWizard.tsx @@ -1,6 +1,6 @@ import { useState, useEffect } from 'react' import { useNavigate } from 'react-router-dom' -import { Sparkles, Loader2, RefreshCw } from 'lucide-react' +import { Sparkles, Loader2, RefreshCw, CheckCircle } from 'lucide-react' import { servicesApi, quickstartApi, type QuickstartConfig, type CapabilityRequirement, type ServiceInfo } from '../services/api' import { ServiceStatusCard, type ServiceStatus } from '../components/services' diff --git a/ushadow/frontend/tailwind.config.js b/ushadow/frontend/tailwind.config.js index 078bacd2..53c62ec4 100644 --- a/ushadow/frontend/tailwind.config.js +++ b/ushadow/frontend/tailwind.config.js @@ -171,5 +171,7 @@ export default { }, }, }, - plugins: [], + plugins: [ + require('@tailwindcss/typography'), + ], } diff --git a/ushadow/launcher/src-tauri/src/commands/docker.rs b/ushadow/launcher/src-tauri/src/commands/docker.rs index 31a57fba..21a73f0a 100644 --- a/ushadow/launcher/src-tauri/src/commands/docker.rs +++ b/ushadow/launcher/src-tauri/src/commands/docker.rs @@ -154,7 +154,7 @@ pub async fn start_infrastructure(state: State<'_, AppState>) -> Result { @@ -186,10 +190,20 @@ export default function HomeScreen() { setShowLoginScreen(true)} + onWebSocketLog={(status, message, details) => logEvent('websocket', status, message, details)} + onBluetoothLog={(status, message, details) => logEvent('bluetooth', status, message, details)} + onSessionStart={startSession} + onSessionUpdate={updateSessionStatus} + onSessionEnd={endSession} testID="unified-streaming" /> + {/* Background Task Debug Panel */} + + + + {/* Login Screen Modal */} setShowLogViewer(false)} entries={logEntries} - connectionState={connectionState} + connectionState={logConnectionState} + sessions={sessions} onClearLogs={clearLogs} + onClearLogsByType={clearLogsByType} + onClearSessions={clearAllSessions} /> ); @@ -312,4 +329,8 @@ const styles = StyleSheet.create({ flex: 1, paddingHorizontal: spacing.lg, }, + debugPanel: { + paddingHorizontal: spacing.lg, + paddingBottom: spacing.md, + }, }); diff --git a/ushadow/mobile/app/(tabs)/sessions.tsx b/ushadow/mobile/app/(tabs)/sessions.tsx new file mode 100644 index 00000000..28c28821 --- /dev/null +++ b/ushadow/mobile/app/(tabs)/sessions.tsx @@ -0,0 +1,464 @@ +/** + * Sessions Tab - Ushadow Mobile + * + * Displays streaming session history with: + * - Duration, data volume, source/destination + * - Active session indicator + * - Link to Chronicle conversations + * - Session filtering and search + */ + +import React, { useState } from 'react'; +import { + View, + Text, + StyleSheet, + SafeAreaView, + FlatList, + TouchableOpacity, + Alert, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { LinearGradient } from 'expo-linear-gradient'; +import { + StreamingSession, + formatDuration, + formatBytes, + isSessionActive, +} from '../types/streamingSession'; +import { useSessionTracking } from '../hooks/useSessionTracking'; +import { colors, theme, gradients, spacing, borderRadius, fontSize } from '../theme'; + +export default function SessionsScreen() { + const { sessions, activeSession, deleteSession, clearAllSessions, isLoading } = useSessionTracking(); + const [filter, setFilter] = useState<'all' | 'active' | 'failed'>('all'); + + const filteredSessions = sessions.filter(session => { + if (filter === 'active') return isSessionActive(session); + if (filter === 'failed') return session.error; + return true; + }); + + const handleDeleteSession = (sessionId: string) => { + Alert.alert( + 'Delete Session', + 'Remove this session from history?', + [ + { text: 'Cancel', style: 'cancel' }, + { + text: 'Delete', + style: 'destructive', + onPress: () => deleteSession(sessionId), + }, + ] + ); + }; + + const handleClearAll = () => { + Alert.alert( + 'Clear All Sessions', + 'This will remove all session history. This cannot be undone.', + [ + { text: 'Cancel', style: 'cancel' }, + { + text: 'Clear All', + style: 'destructive', + onPress: clearAllSessions, + }, + ] + ); + }; + + const renderSession = ({ item: session }: { item: StreamingSession }) => { + const isActive = isSessionActive(session); + const duration = session.durationSeconds ?? 0; + const sourceLabel = session.source.type === 'omi' + ? `OMI: ${session.source.deviceName || session.source.deviceId.slice(0, 8)}` + : 'Phone Mic'; + + return ( + + {/* Header */} + + + + {sourceLabel} + {isActive && ( + + + Active + + )} + + handleDeleteSession(session.id)} + style={styles.deleteButton} + testID={`delete-session-${session.id}`} + > + + + + + {/* Metrics */} + + + + {formatDuration(duration)} + + + + {formatBytes(session.bytesTransferred)} + + + + {session.chunksTransferred} chunks + + + + {/* Destinations */} + {session.destinations.length > 0 && ( + + {session.destinations.map((dest, idx) => ( + + + {dest.name} + + {!dest.connected && ( + + )} + + ))} + + )} + + {/* Error */} + {session.error && ( + + + {session.error} + + )} + + {/* Timestamp */} + + {new Date(session.startTime).toLocaleString()} + + + {/* Conversation Link */} + {session.conversationId && ( + + + + Conversation: {session.conversationId.slice(0, 8)} + + + )} + + ); + }; + + return ( + + {/* Header */} + + + Streaming Sessions + + Track your audio streaming history + + + {/* Filter Chips */} + + setFilter('all')} + testID="filter-all" + > + + All ({sessions.length}) + + + setFilter('active')} + testID="filter-active" + > + + Active ({activeSession ? 1 : 0}) + + + setFilter('failed')} + testID="filter-failed" + > + + Failed ({sessions.filter(s => s.error).length}) + + + + + {/* Clear All Button */} + {sessions.length > 0 && ( + + Clear All History + + )} + + {/* Session List */} + {filteredSessions.length === 0 ? ( + + + + {filter === 'all' + ? 'No sessions yet' + : filter === 'active' + ? 'No active sessions' + : 'No failed sessions'} + + + Start streaming to track session data + + + ) : ( + item.id} + contentContainerStyle={styles.listContent} + testID="sessions-list" + /> + )} + + ); +} + +const styles = StyleSheet.create({ + container: { + flex: 1, + backgroundColor: theme.background, + }, + header: { + paddingHorizontal: spacing.lg, + paddingTop: spacing.md, + paddingBottom: spacing.sm, + alignItems: 'center', + }, + titleGradientContainer: { + paddingHorizontal: spacing.lg, + paddingVertical: spacing.sm, + borderRadius: borderRadius.md, + marginBottom: spacing.xs, + }, + title: { + fontSize: fontSize['2xl'], + fontWeight: 'bold', + color: theme.text, + textAlign: 'center', + }, + subtitle: { + fontSize: fontSize.sm, + color: theme.textMuted, + textAlign: 'center', + }, + filterContainer: { + flexDirection: 'row', + paddingHorizontal: spacing.lg, + paddingVertical: spacing.sm, + gap: spacing.sm, + }, + filterChip: { + paddingHorizontal: spacing.md, + paddingVertical: spacing.xs, + borderRadius: borderRadius.full, + backgroundColor: theme.backgroundCard, + borderWidth: 1, + borderColor: theme.border, + }, + filterChipActive: { + backgroundColor: colors.primary[400], + borderColor: colors.primary[400], + }, + filterText: { + fontSize: fontSize.sm, + color: theme.textMuted, + }, + filterTextActive: { + color: theme.text, + fontWeight: '600', + }, + clearAllButton: { + alignSelf: 'center', + paddingHorizontal: spacing.md, + paddingVertical: spacing.xs, + marginBottom: spacing.sm, + }, + clearAllButtonText: { + fontSize: fontSize.sm, + color: colors.error.default, + fontWeight: '500', + }, + listContent: { + paddingHorizontal: spacing.lg, + paddingBottom: spacing.xl, + }, + sessionCard: { + backgroundColor: theme.backgroundCard, + borderRadius: borderRadius.lg, + padding: spacing.md, + marginBottom: spacing.md, + borderWidth: 1, + borderColor: theme.border, + }, + sessionHeader: { + flexDirection: 'row', + justifyContent: 'space-between', + alignItems: 'center', + marginBottom: spacing.sm, + }, + sessionHeaderLeft: { + flexDirection: 'row', + alignItems: 'center', + gap: spacing.sm, + flex: 1, + }, + sessionSource: { + fontSize: fontSize.base, + fontWeight: '600', + color: theme.text, + }, + activeBadge: { + flexDirection: 'row', + alignItems: 'center', + backgroundColor: colors.success.bg, + paddingHorizontal: spacing.sm, + paddingVertical: 2, + borderRadius: borderRadius.full, + gap: 4, + }, + activeDot: { + width: 6, + height: 6, + borderRadius: 3, + backgroundColor: colors.success.default, + }, + activeBadgeText: { + fontSize: fontSize.xs, + color: colors.success.default, + fontWeight: '600', + }, + deleteButton: { + padding: spacing.xs, + }, + sessionMetrics: { + flexDirection: 'row', + gap: spacing.lg, + marginBottom: spacing.sm, + }, + metric: { + flexDirection: 'row', + alignItems: 'center', + gap: 4, + }, + metricText: { + fontSize: fontSize.sm, + color: theme.textSecondary, + }, + destinations: { + flexDirection: 'row', + flexWrap: 'wrap', + gap: spacing.xs, + marginBottom: spacing.sm, + }, + destinationChip: { + flexDirection: 'row', + alignItems: 'center', + backgroundColor: colors.primary[900], + paddingHorizontal: spacing.sm, + paddingVertical: 2, + borderRadius: borderRadius.full, + gap: 4, + }, + destinationChipDisconnected: { + backgroundColor: colors.error.bg, + }, + destinationText: { + fontSize: fontSize.xs, + color: colors.primary[400], + fontWeight: '500', + }, + destinationTextDisconnected: { + color: colors.error.default, + }, + errorContainer: { + flexDirection: 'row', + alignItems: 'center', + gap: spacing.xs, + backgroundColor: colors.error.bg, + padding: spacing.sm, + borderRadius: borderRadius.md, + marginBottom: spacing.sm, + }, + errorText: { + fontSize: fontSize.sm, + color: colors.error.default, + flex: 1, + }, + sessionTime: { + fontSize: fontSize.xs, + color: theme.textMuted, + marginTop: spacing.xs, + }, + conversationLink: { + flexDirection: 'row', + alignItems: 'center', + gap: 4, + marginTop: spacing.xs, + }, + conversationLinkText: { + fontSize: fontSize.xs, + color: colors.primary[400], + }, + emptyState: { + flex: 1, + justifyContent: 'center', + alignItems: 'center', + paddingHorizontal: spacing.xl, + }, + emptyStateText: { + fontSize: fontSize.lg, + fontWeight: '600', + color: theme.textSecondary, + marginTop: spacing.md, + }, + emptyStateSubtext: { + fontSize: fontSize.sm, + color: theme.textMuted, + marginTop: spacing.xs, + textAlign: 'center', + }, +}); diff --git a/ushadow/mobile/app/_utils/sessionStorage.ts b/ushadow/mobile/app/_utils/sessionStorage.ts new file mode 100644 index 00000000..31a6b9c2 --- /dev/null +++ b/ushadow/mobile/app/_utils/sessionStorage.ts @@ -0,0 +1,99 @@ +/** + * Session Storage Utilities + * + * AsyncStorage persistence for streaming sessions. + * Maintains list of recent sessions with size limits. + */ + +import AsyncStorage from '@react-native-async-storage/async-storage'; +import { StreamingSession } from '../types/streamingSession'; + +const STORAGE_KEY = '@ushadow/streaming_sessions'; +const MAX_SESSIONS = 100; // Keep last 100 sessions + +/** + * Load sessions from storage + */ +export const loadSessions = async (): Promise => { + try { + const json = await AsyncStorage.getItem(STORAGE_KEY); + if (!json) return []; + + const sessions = JSON.parse(json) as StreamingSession[]; + + // Convert date strings back to Date objects + return sessions.map(session => ({ + ...session, + startTime: new Date(session.startTime), + endTime: session.endTime ? new Date(session.endTime) : undefined, + })); + } catch (error) { + console.error('[SessionStorage] Failed to load sessions:', error); + return []; + } +}; + +/** + * Save sessions to storage + */ +export const saveSessions = async (sessions: StreamingSession[]): Promise => { + try { + // Keep only the most recent sessions + const recentSessions = sessions.slice(0, MAX_SESSIONS); + await AsyncStorage.setItem(STORAGE_KEY, JSON.stringify(recentSessions)); + } catch (error) { + console.error('[SessionStorage] Failed to save sessions:', error); + } +}; + +/** + * Add a new session + */ +export const addSession = async (session: StreamingSession): Promise => { + const sessions = await loadSessions(); + sessions.unshift(session); // Add to beginning + await saveSessions(sessions); +}; + +/** + * Update an existing session + */ +export const updateSession = async (sessionId: string, updates: Partial): Promise => { + const sessions = await loadSessions(); + const index = sessions.findIndex(s => s.id === sessionId); + + if (index !== -1) { + sessions[index] = { ...sessions[index], ...updates }; + await saveSessions(sessions); + } +}; + +/** + * Delete a session + */ +export const deleteSession = async (sessionId: string): Promise => { + const sessions = await loadSessions(); + const filtered = sessions.filter(s => s.id !== sessionId); + await saveSessions(filtered); +}; + +/** + * Clear all sessions + */ +export const clearAllSessions = async (): Promise => { + try { + await AsyncStorage.removeItem(STORAGE_KEY); + } catch (error) { + console.error('[SessionStorage] Failed to clear sessions:', error); + } +}; + +/** + * Link a session to a conversation + */ +export const linkSessionToConversation = async ( + sessionId: string, + conversationId: string +): Promise => { + await updateSession(sessionId, { conversationId }); +}; diff --git a/ushadow/mobile/app/components/BackgroundTaskDebugPanel.tsx b/ushadow/mobile/app/components/BackgroundTaskDebugPanel.tsx new file mode 100644 index 00000000..e230779f --- /dev/null +++ b/ushadow/mobile/app/components/BackgroundTaskDebugPanel.tsx @@ -0,0 +1,439 @@ +/** + * Background Task Debug Panel + * + * Shows diagnostic information about background task execution. + * Useful for debugging and verifying background task is working. + * + * Usage: + * ```tsx + * import { BackgroundTaskDebugPanel } from './components/BackgroundTaskDebugPanel'; + * + * // In your component: + * + * ``` + */ + +import React, { useState, useEffect } from 'react'; +import { + View, + Text, + StyleSheet, + TouchableOpacity, + ScrollView, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { + getBackgroundTaskStatus, + getStoredConnectionState, + clearBackgroundTaskData, +} from '../services/backgroundTasks'; +import { + getPersistentLogs, + clearPersistentLogs, + PersistentLogEntry, +} from '../services/persistentLogger'; +import { theme, colors, spacing, borderRadius, fontSize } from '../theme'; + +export const BackgroundTaskDebugPanel: React.FC<{ testID?: string }> = ({ testID }) => { + const [isExpanded, setIsExpanded] = useState(false); + const [status, setStatus] = useState<{ + isRegistered: boolean; + lastCheck: string | null; + checkCount: number; + lastError: string | null; + } | null>(null); + const [connectionState, setConnectionState] = useState<{ + isConnected: boolean; + isStreaming: boolean; + deviceId?: string; + timestamp?: string; + } | null>(null); + const [persistentLogs, setPersistentLogs] = useState([]); + const [lastRefresh, setLastRefresh] = useState(new Date()); + + const refreshStatus = async () => { + const taskStatus = await getBackgroundTaskStatus(); + const connState = await getStoredConnectionState(); + const logs = await getPersistentLogs(); + setStatus(taskStatus); + setConnectionState(connState); + setPersistentLogs(logs); + setLastRefresh(new Date()); + }; + + useEffect(() => { + if (isExpanded) { + refreshStatus(); + } + }, [isExpanded]); + + const handleClearData = async () => { + await clearBackgroundTaskData(); + await clearPersistentLogs(); + await refreshStatus(); + }; + + const formatTimestamp = (timestamp: string | null): string => { + if (!timestamp) return 'Never'; + try { + const date = new Date(timestamp); + const now = new Date(); + const diffMs = now.getTime() - date.getTime(); + const diffMins = Math.floor(diffMs / 60000); + + if (diffMins < 1) return 'Just now'; + if (diffMins < 60) return `${diffMins}m ago`; + const diffHours = Math.floor(diffMins / 60); + if (diffHours < 24) return `${diffHours}h ago`; + return date.toLocaleString(); + } catch { + return timestamp; + } + }; + + if (!isExpanded) { + return ( + setIsExpanded(true)} + testID={testID} + > + + Background Task Debug + + + ); + } + + return ( + + {/* Header */} + + + + Background Task Debug + + + + + + setIsExpanded(false)} + testID={`${testID}-collapse`} + > + + + + + + + {/* Task Status */} + + Task Status + + Registered: + + + + {status?.isRegistered ? 'Yes' : 'No'} + + + + + Last Check: + + {formatTimestamp(status?.lastCheck || null)} + + + + Check Count: + {status?.checkCount || 0} + + {status?.lastError && ( + + Last Error: + + {status.lastError} + + + )} + + + {/* Connection State */} + + Connection State + {connectionState ? ( + <> + + Connected: + + + + {connectionState.isConnected ? 'Yes' : 'No'} + + + + + Streaming: + + + + {connectionState.isStreaming ? 'Yes' : 'No'} + + + + {connectionState.deviceId && ( + + Device: + + {connectionState.deviceId.substring(0, 20)}... + + + )} + + Updated: + + {formatTimestamp(connectionState.timestamp || null)} + + + + ) : ( + No connection state + )} + + + {/* Persistent Logs (survives reload) */} + + Recent Events (Survives Reload) + {persistentLogs.length > 0 ? ( + + {persistentLogs.slice(0, 5).map((log, index) => ( + + + {log.type} + + {new Date(log.timestamp).toLocaleTimeString()} + + + {log.message} + + ))} + {persistentLogs.length > 5 && ( + + ...and {persistentLogs.length - 5} more events + + )} + + ) : ( + No events yet + )} + + + {/* Actions */} + + Actions + + + Clear Debug Data + + + + {/* Info */} + + + + Background tasks run every ~15 min on iOS, more frequently on Android. + Last refreshed: {lastRefresh.toLocaleTimeString()} + + + + + ); +}; + +const styles = StyleSheet.create({ + collapsedContainer: { + flexDirection: 'row', + alignItems: 'center', + gap: spacing.sm, + padding: spacing.md, + backgroundColor: theme.backgroundCard, + borderRadius: borderRadius.md, + borderWidth: 1, + borderColor: theme.border, + }, + collapsedText: { + flex: 1, + fontSize: fontSize.sm, + color: theme.textMuted, + }, + container: { + backgroundColor: theme.backgroundCard, + borderRadius: borderRadius.lg, + borderWidth: 1, + borderColor: theme.border, + overflow: 'hidden', + }, + header: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'space-between', + padding: spacing.md, + backgroundColor: theme.backgroundInput, + borderBottomWidth: 1, + borderBottomColor: theme.border, + }, + headerLeft: { + flexDirection: 'row', + alignItems: 'center', + gap: spacing.sm, + }, + headerRight: { + flexDirection: 'row', + alignItems: 'center', + gap: spacing.sm, + }, + title: { + fontSize: fontSize.base, + fontWeight: '600', + color: theme.textPrimary, + }, + iconButton: { + padding: spacing.xs, + }, + content: { + padding: spacing.md, + maxHeight: 400, + }, + section: { + marginBottom: spacing.lg, + }, + sectionTitle: { + fontSize: fontSize.sm, + fontWeight: '600', + color: theme.textSecondary, + marginBottom: spacing.sm, + textTransform: 'uppercase', + }, + row: { + flexDirection: 'row', + justifyContent: 'space-between', + alignItems: 'center', + paddingVertical: spacing.xs, + }, + label: { + fontSize: fontSize.sm, + color: theme.textMuted, + }, + value: { + fontSize: fontSize.sm, + color: theme.textPrimary, + flex: 1, + textAlign: 'right', + }, + valueRow: { + flexDirection: 'row', + alignItems: 'center', + gap: spacing.xs, + }, + valueSuccess: { + color: colors.success.default, + }, + valueError: { + color: colors.error.default, + }, + noData: { + fontSize: fontSize.sm, + color: theme.textMuted, + fontStyle: 'italic', + }, + actionButton: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'center', + gap: spacing.sm, + padding: spacing.md, + backgroundColor: theme.backgroundInput, + borderRadius: borderRadius.md, + borderWidth: 1, + borderColor: colors.error.default, + }, + actionButtonText: { + fontSize: fontSize.sm, + fontWeight: '600', + color: colors.error.default, + }, + logsContainer: { + gap: spacing.sm, + }, + logEntry: { + backgroundColor: theme.backgroundInput, + padding: spacing.sm, + borderRadius: borderRadius.sm, + borderLeftWidth: 3, + borderLeftColor: colors.primary[400], + }, + logHeader: { + flexDirection: 'row', + justifyContent: 'space-between', + marginBottom: spacing.xs, + }, + logType: { + fontSize: fontSize.xs, + fontWeight: '600', + color: colors.primary[400], + textTransform: 'uppercase', + }, + logTime: { + fontSize: fontSize.xs, + color: theme.textMuted, + }, + logMessage: { + fontSize: fontSize.sm, + color: theme.textPrimary, + }, + moreLogsText: { + fontSize: fontSize.xs, + color: theme.textMuted, + fontStyle: 'italic', + textAlign: 'center', + marginTop: spacing.xs, + }, + infoBox: { + flexDirection: 'row', + gap: spacing.sm, + padding: spacing.md, + backgroundColor: colors.primary[50], + borderRadius: borderRadius.md, + marginTop: spacing.md, + }, + infoText: { + flex: 1, + fontSize: fontSize.xs, + color: colors.primary[700], + lineHeight: 16, + }, +}); + +export default BackgroundTaskDebugPanel; diff --git a/ushadow/mobile/app/components/ConnectionLogViewer.tsx b/ushadow/mobile/app/components/ConnectionLogViewer.tsx index eb5827cf..e7128e64 100644 --- a/ushadow/mobile/app/components/ConnectionLogViewer.tsx +++ b/ushadow/mobile/app/components/ConnectionLogViewer.tsx @@ -22,6 +22,7 @@ import { ConnectionState, CONNECTION_TYPE_LABELS, } from '../types/connectionLog'; +import { StreamingSession } from '../types/streamingSession'; import { colors, theme, spacing, borderRadius, fontSize } from '../theme'; interface ConnectionLogViewerProps { @@ -29,24 +30,29 @@ interface ConnectionLogViewerProps { onClose: () => void; entries: ConnectionLogEntry[]; connectionState: ConnectionState; + sessions?: StreamingSession[]; onClearLogs: () => void; + onClearLogsByType: (type: ConnectionType) => void; + onClearSessions?: () => void; } -type FilterType = 'all' | ConnectionType; +type FilterType = 'all' | ConnectionType | 'sessions'; // Type-specific colors and icons -const TYPE_COLORS: Record = { +const TYPE_COLORS: Record = { network: colors.info.default, server: colors.primary[400], bluetooth: '#5E5CE6', websocket: colors.success.default, + sessions: colors.warning.default, }; -const TYPE_ICONS: Record = { +const TYPE_ICONS: Record = { network: 'wifi', server: 'server', bluetooth: 'bluetooth', websocket: 'swap-horizontal', + sessions: 'time-outline', }; const STATUS_ICONS: Record = { @@ -65,12 +71,13 @@ const STATUS_COLORS: Record = { unknown: theme.textMuted, }; -const FILTER_OPTIONS: { key: FilterType; label: string }[] = [ - { key: 'all', label: 'All' }, - { key: 'network', label: 'Network' }, - { key: 'server', label: 'Server' }, - { key: 'bluetooth', label: 'Bluetooth' }, - { key: 'websocket', label: 'WebSocket' }, +const TAB_OPTIONS: { key: FilterType; label: string; icon: keyof typeof Ionicons.glyphMap }[] = [ + { key: 'all', label: 'All', icon: 'list' }, + { key: 'network', label: 'Network', icon: 'wifi' }, + { key: 'server', label: 'Server', icon: 'server' }, + { key: 'bluetooth', label: 'BT', icon: 'bluetooth' }, + { key: 'websocket', label: 'WS', icon: 'swap-horizontal' }, + { key: 'sessions', label: 'Sessions', icon: 'time-outline' }, ]; export const ConnectionLogViewer: React.FC = ({ @@ -78,14 +85,20 @@ export const ConnectionLogViewer: React.FC = ({ onClose, entries, connectionState, + sessions = [], onClearLogs, + onClearLogsByType, + onClearSessions, }) => { - const [activeFilter, setActiveFilter] = useState('all'); + const [activeTab, setActiveTab] = useState('all'); const filteredEntries = useMemo(() => { - if (activeFilter === 'all') return entries; - return entries.filter((entry) => entry.type === activeFilter); - }, [entries, activeFilter]); + if (activeTab === 'all') return entries; + if (activeTab === 'sessions') return []; + return entries.filter((entry) => entry.type === activeTab); + }, [entries, activeTab]); + + const isSessionsView = activeTab === 'sessions'; const formatTime = (date: Date): string => { return date.toLocaleTimeString('en-US', { @@ -111,63 +124,49 @@ export const ConnectionLogViewer: React.FC = ({ }); }; - const renderStatusSummary = () => ( - - {(['network', 'server', 'bluetooth', 'websocket'] as ConnectionType[]).map((type) => { - const status = connectionState[type]; - const typeColor = TYPE_COLORS[type]; - const statusColor = STATUS_COLORS[status]; - const typeIcon = TYPE_ICONS[type]; - const statusIcon = STATUS_ICONS[status]; + const renderTabs = () => ( + + {TAB_OPTIONS.map((tab) => { + const isActive = activeTab === tab.key; + let tabColor = colors.primary[400]; + let status: string | undefined; - return ( - - - - - - - {CONNECTION_TYPE_LABELS[type]} - - - - ); - })} - - ); + if (tab.key !== 'all' && tab.key !== 'sessions') { + tabColor = TYPE_COLORS[tab.key as ConnectionType]; + status = connectionState[tab.key as ConnectionType]; + } else if (tab.key === 'sessions') { + tabColor = TYPE_COLORS.sessions; + } - const renderFilters = () => ( - - {FILTER_OPTIONS.map((option) => { - const isActive = activeFilter === option.key; - const chipColor = option.key === 'all' - ? colors.primary[400] - : TYPE_COLORS[option.key as ConnectionType]; + const statusColor = status ? STATUS_COLORS[status] : undefined; return ( setActiveFilter(option.key)} - testID={`filter-${option.key}`} + onPress={() => setActiveTab(tab.key)} + testID={`tab-${tab.key}`} > - {option.key !== 'all' && ( + - )} + {status && statusColor && ( + + )} + - {option.label} + {tab.label} ); @@ -215,12 +214,98 @@ export const ConnectionLogViewer: React.FC = ({ ); }; + const renderSessionItem = ({ item }: { item: StreamingSession }) => { + const duration = item.durationSeconds + ? `${Math.floor(item.durationSeconds / 60)}m ${item.durationSeconds % 60}s` + : 'In progress'; + const startTime = new Date(item.startTime).toLocaleString(); + const hasError = !!item.error; + + // Format end reason + const endReasonLabels = { + manual_stop: 'Stopped by user', + connection_lost: 'Connection lost', + error: 'Error occurred', + timeout: 'Connection timeout', + }; + const endReasonText = item.endReason ? endReasonLabels[item.endReason] : 'Unknown'; + + return ( + + + + + + + + {item.source.type === 'phone' ? 'Phone Microphone' : `OMI Device (${item.source.deviceName})`} + + {startTime} + + {hasError && ( + + )} + + + + Duration: + {duration} + + + Codec: + {item.codec.toUpperCase()} + + + Data: + + {(item.bytesTransferred / 1024).toFixed(1)} KB ({item.chunksTransferred} chunks) + + + {item.endTime && ( + + Ended: + + {endReasonText} + + + )} + {item.destinations && item.destinations.length > 0 && ( + + Destinations: + + {item.destinations.map(d => d.name).join(', ')} + + + )} + {hasError && ( + + {item.error} + + )} + + + ); + }; + const renderEmptyState = () => ( - - No log entries + + + {isSessionsView ? 'No sessions' : 'No log entries'} + - Connection events will appear here as they occur + {isSessionsView + ? 'Streaming sessions will appear here as you use the app' + : 'Connection events will appear here as they occur' + } ); @@ -245,37 +330,68 @@ export const ConnectionLogViewer: React.FC = ({ - {/* Current Status Summary */} - {renderStatusSummary()} - - {/* Filters */} - {renderFilters()} + {/* Tabs */} + {renderTabs()} - {/* Log Count */} + {/* Count and Actions */} - {filteredEntries.length} {filteredEntries.length === 1 ? 'entry' : 'entries'} + {isSessionsView + ? `${sessions.length} ${sessions.length === 1 ? 'session' : 'sessions'}` + : `${filteredEntries.length} ${filteredEntries.length === 1 ? 'entry' : 'entries'}` + } - {entries.length > 0 && ( - - Clear All - - )} + + {!isSessionsView && activeTab !== 'all' && filteredEntries.length > 0 && ( + onClearLogsByType(activeTab as ConnectionType)} + testID={`clear-${activeTab}-logs-button`} + > + Clear {CONNECTION_TYPE_LABELS[activeTab as ConnectionType]} + + )} + {!isSessionsView && entries.length > 0 && ( + + Clear All + + )} + {isSessionsView && sessions.length > 0 && onClearSessions && ( + + Clear Sessions + + )} + - {/* Log List */} - item.id} - renderItem={renderLogEntry} - ListEmptyComponent={renderEmptyState} - contentContainerStyle={styles.listContent} - showsVerticalScrollIndicator={true} - /> + {/* Content List */} + {isSessionsView ? ( + item.id} + renderItem={renderSessionItem} + ListEmptyComponent={renderEmptyState} + contentContainerStyle={styles.listContent} + showsVerticalScrollIndicator={true} + /> + ) : ( + item.id} + renderItem={renderLogEntry} + ListEmptyComponent={renderEmptyState} + contentContainerStyle={styles.listContent} + showsVerticalScrollIndicator={true} + /> + )} ); @@ -309,69 +425,41 @@ const styles = StyleSheet.create({ fontWeight: '600', color: colors.primary[400], }, - statusSummary: { + tabContainer: { flexDirection: 'row', justifyContent: 'space-around', - paddingVertical: spacing.md, - paddingHorizontal: spacing.sm, backgroundColor: theme.backgroundCard, borderBottomWidth: 1, borderBottomColor: theme.border, }, - statusItem: { + tab: { + flex: 1, alignItems: 'center', - gap: 4, + paddingVertical: spacing.md, + borderBottomWidth: 3, + borderBottomColor: 'transparent', }, - statusIconContainer: { - width: 40, - height: 40, - borderRadius: 20, - borderWidth: 2, - alignItems: 'center', - justifyContent: 'center', - backgroundColor: theme.backgroundInput, + tabActive: { + borderBottomWidth: 3, + }, + tabIconContainer: { position: 'relative', + marginBottom: 4, }, - statusDot: { + tabStatusDot: { position: 'absolute', - bottom: -2, + top: -2, right: -2, - width: 12, - height: 12, - borderRadius: 6, + width: 10, + height: 10, + borderRadius: 5, borderWidth: 2, borderColor: theme.backgroundCard, }, - statusLabel: { + tabLabel: { fontSize: fontSize.xs, fontWeight: '500', }, - filterContainer: { - flexDirection: 'row', - paddingHorizontal: spacing.lg, - paddingVertical: spacing.sm, - gap: spacing.sm, - borderBottomWidth: 1, - borderBottomColor: theme.border, - }, - filterChip: { - flexDirection: 'row', - alignItems: 'center', - gap: 4, - paddingHorizontal: spacing.md, - paddingVertical: spacing.xs, - borderRadius: borderRadius.full, - backgroundColor: theme.backgroundInput, - borderWidth: 2, - borderColor: 'transparent', - }, - filterChipActive: { - backgroundColor: theme.backgroundCard, - }, - filterText: { - fontSize: fontSize.sm, - color: theme.textMuted, - }, countContainer: { flexDirection: 'row', justifyContent: 'space-between', @@ -383,6 +471,10 @@ const styles = StyleSheet.create({ fontSize: fontSize.sm, color: theme.textMuted, }, + clearButtonsContainer: { + flexDirection: 'row', + gap: spacing.sm, + }, clearButton: { paddingHorizontal: spacing.sm, paddingVertical: spacing.xs, @@ -468,6 +560,69 @@ const styles = StyleSheet.create({ marginTop: spacing.xs, textAlign: 'center', }, + // Session styles + sessionEntry: { + backgroundColor: theme.backgroundCard, + marginHorizontal: spacing.lg, + marginVertical: spacing.sm, + borderRadius: borderRadius.md, + padding: spacing.md, + borderWidth: 1, + borderColor: theme.border, + }, + sessionHeader: { + flexDirection: 'row', + alignItems: 'center', + marginBottom: spacing.sm, + }, + sessionIconContainer: { + width: 36, + height: 36, + borderRadius: 18, + backgroundColor: colors.primary[400] + '20', + alignItems: 'center', + justifyContent: 'center', + marginRight: spacing.sm, + }, + sessionInfo: { + flex: 1, + }, + sessionSource: { + fontSize: fontSize.sm, + fontWeight: '600', + color: theme.textPrimary, + }, + sessionTime: { + fontSize: fontSize.xs, + color: theme.textMuted, + marginTop: 2, + }, + sessionDetails: { + gap: spacing.xs, + }, + sessionDetailRow: { + flexDirection: 'row', + justifyContent: 'space-between', + }, + sessionDetailLabel: { + fontSize: fontSize.sm, + color: theme.textSecondary, + }, + sessionDetailValue: { + fontSize: fontSize.sm, + color: theme.textPrimary, + fontWeight: '500', + }, + sessionErrorContainer: { + marginTop: spacing.sm, + padding: spacing.sm, + backgroundColor: colors.error.bgSolid, + borderRadius: borderRadius.sm, + }, + sessionErrorText: { + fontSize: fontSize.sm, + color: colors.error.light, + }, }); export default ConnectionLogViewer; diff --git a/ushadow/mobile/app/components/LoginScreen.tsx b/ushadow/mobile/app/components/LoginScreen.tsx index 3ebaccc3..d07a6c74 100644 --- a/ushadow/mobile/app/components/LoginScreen.tsx +++ b/ushadow/mobile/app/components/LoginScreen.tsx @@ -25,7 +25,7 @@ import { ScrollView, } from 'react-native'; import { colors, theme, spacing, borderRadius, fontSize } from '../theme'; -import { saveAuthToken, saveApiUrl } from '../_utils/authStorage'; +import { saveAuthToken, saveApiUrl, getDefaultServerUrl, setDefaultServerUrl } from '../_utils/authStorage'; interface LoginScreenProps { visible: boolean; diff --git a/ushadow/mobile/app/components/OmiDeviceCard.tsx b/ushadow/mobile/app/components/OmiDeviceCard.tsx index 061a2466..ba1308f2 100644 --- a/ushadow/mobile/app/components/OmiDeviceCard.tsx +++ b/ushadow/mobile/app/components/OmiDeviceCard.tsx @@ -23,6 +23,8 @@ import { useDeviceConnection, useAudioListener, useAudioStreamer, + useAppLifecycle, + useConnectionHealth, } from '../hooks'; import { useBluetooth, useOmiConnection } from '../contexts'; import { theme, colors, spacing, borderRadius, fontSize } from '../theme'; @@ -33,6 +35,11 @@ import { setActiveOmiDevice, } from '../_utils/omiDeviceStorage'; import { appendTokenToUrl } from '../_utils/authStorage'; +import { + registerBackgroundTask, + unregisterBackgroundTask, + updateConnectionState, +} from '../services/backgroundTasks'; interface OmiDeviceCardProps { device: SavedOmiDevice; @@ -105,20 +112,10 @@ export const OmiDeviceCard: React.FC = ({ const isConnected = connectedDeviceId === device.id; console.log('[OmiDeviceCard] All hooks done, isConnected:', isConnected); - // Notify parent of streaming state changes - // Use ref to avoid infinite loop from callback changing on every render - const onStreamingStateChangeRef = useRef(onStreamingStateChange); - onStreamingStateChangeRef.current = onStreamingStateChange; - - useEffect(() => { - console.log('[OmiDeviceCard] useEffect: streaming state changed, isStreaming:', isStreaming); - onStreamingStateChangeRef.current?.(isStreaming); - console.log('[OmiDeviceCard] useEffect: callback complete'); - }, [isStreaming]); - /** * Build WebSocket URL for OMI device streaming * Uses ws_omi endpoint with authentication + * Moved here to be available in lifecycle callbacks */ const buildOmiWebSocketUrl = useCallback((): string => { let url = webSocketUrl.trim(); @@ -141,6 +138,113 @@ export const OmiDeviceCard: React.FC = ({ return url; }, [webSocketUrl, authToken]); + // Connection health monitoring + const { checkHealth, bluetoothHealthy, websocketHealthy } = useConnectionHealth({ + omiConnection: isConnected ? omiConnection : undefined, + websocketReadyState: audioStreamer.getWebSocketReadyState(), + onUnhealthy: (type) => { + console.log(`[OmiDeviceCard] โš ๏ธ Connection unhealthy: ${type}`); + }, + }); + + // App lifecycle management - handle background/foreground transitions + const { appState, isActive: isAppActive } = useAppLifecycle({ + onForeground: async () => { + console.log('[OmiDeviceCard] ๐ŸŸข App returned to foreground'); + + // Only check health for THIS device if it's supposed to be active + if (!isActive) { + console.log('[OmiDeviceCard] Device not active, skipping health check'); + return; + } + + console.log('[OmiDeviceCard] Running connection health check...'); + const health = await checkHealth(); + + // Handle Bluetooth connection health + if (isConnected && !health.bluetooth) { + console.log('[OmiDeviceCard] โŒ Bluetooth connection lost in background, reconnecting...'); + try { + await connectToDevice(device.id); + } catch (error) { + console.error('[OmiDeviceCard] Failed to reconnect Bluetooth:', error); + } + } + + // Handle WebSocket connection health + if (isStreaming && !health.websocket) { + console.log('[OmiDeviceCard] โŒ WebSocket connection lost in background, reconnecting...'); + try { + const wsUrl = buildOmiWebSocketUrl(); + await audioStreamer.startStreaming(wsUrl); + + // Restart audio listener if Bluetooth is healthy + if (health.bluetooth) { + await startAudioListener(async (audioBytes: Uint8Array) => { + if (audioBytes.length > 0 && audioStreamer.getWebSocketReadyState() === WebSocket.OPEN) { + await audioStreamer.sendAudio(audioBytes); + } + }); + } + } catch (error) { + console.error('[OmiDeviceCard] Failed to reconnect WebSocket:', error); + setIsStreaming(false); + } + } + + console.log('[OmiDeviceCard] Foreground transition complete'); + }, + + onBackground: () => { + console.log('[OmiDeviceCard] ๐Ÿ”ด App moved to background'); + console.log('[OmiDeviceCard] Connection state:', { + isConnected, + isStreaming, + bluetoothHealthy, + websocketHealthy, + }); + // Note: We keep connections alive, OS may suspend them + // Reconnection will happen in onForeground if needed + }, + }); + + // Notify parent of streaming state changes + // Use ref to avoid infinite loop from callback changing on every render + const onStreamingStateChangeRef = useRef(onStreamingStateChange); + onStreamingStateChangeRef.current = onStreamingStateChange; + + useEffect(() => { + console.log('[OmiDeviceCard] useEffect: streaming state changed, isStreaming:', isStreaming); + onStreamingStateChangeRef.current?.(isStreaming); + console.log('[OmiDeviceCard] useEffect: callback complete'); + }, [isStreaming]); + + /** + * Cross-connection monitoring (Phase 3: Fix 5) + * Stop one connection if the other fails to save resources + */ + useEffect(() => { + // Only monitor if this device is supposed to be streaming + if (!isStreaming) return; + + const wsReady = audioStreamer.getWebSocketReadyState(); + + // If Bluetooth is listening but WebSocket disconnected + if (isListeningAudio && wsReady !== WebSocket.OPEN) { + console.log('[OmiDeviceCard] โš ๏ธ WebSocket disconnected while Bluetooth streaming, stopping Bluetooth'); + stopAudioListener().catch((err) => { + console.error('[OmiDeviceCard] Failed to stop audio listener:', err); + }); + } + + // If WebSocket is open but Bluetooth stopped listening + if (!isListeningAudio && wsReady === WebSocket.OPEN) { + console.log('[OmiDeviceCard] โš ๏ธ Bluetooth stopped while WebSocket open, stopping WebSocket'); + audioStreamer.stopStreaming(); + setIsStreaming(false); + } + }, [isListeningAudio, isStreaming, audioStreamer, stopAudioListener]); + /** * Handle device connection/disconnection */ @@ -151,8 +255,18 @@ export const OmiDeviceCard: React.FC = ({ await stopAudioListener(); audioStreamer.stopStreaming(); setIsStreaming(false); + + // Unregister background task when stopping streaming + await unregisterBackgroundTask(); } await disconnectFromDevice(); + + // Update connection state + await updateConnectionState({ + isConnected: false, + isStreaming: false, + deviceId: device.id, + }); } else { if (!isBluetoothOn) { Alert.alert('Bluetooth Required', 'Please enable Bluetooth to connect.'); @@ -168,6 +282,13 @@ export const OmiDeviceCard: React.FC = ({ // Set as active device when connected await setActiveOmiDevice(device.id); onSetActive(device.id); + + // Update connection state + await updateConnectionState({ + isConnected: true, + isStreaming: false, + deviceId: device.id, + }); } catch (error) { console.error('[OmiDeviceCard] Connection failed:', error); } @@ -205,6 +326,17 @@ export const OmiDeviceCard: React.FC = ({ await stopAudioListener(); audioStreamer.stopStreaming(); setIsStreaming(false); + + // Unregister background task + console.log('[OmiDeviceCard] Unregistering background task...'); + await unregisterBackgroundTask(); + + // Update connection state for background task + await updateConnectionState({ + isConnected, + isStreaming: false, + deviceId: device.id, + }); } else { // Start streaming try { @@ -222,10 +354,33 @@ export const OmiDeviceCard: React.FC = ({ }); setIsStreaming(true); + + // Register background task to keep connection alive + console.log('[OmiDeviceCard] Registering background task...'); + const registered = await registerBackgroundTask(60); // Check every 60s minimum + if (registered) { + console.log('[OmiDeviceCard] โœ… Background task registered'); + } else { + console.warn('[OmiDeviceCard] โš ๏ธ Failed to register background task'); + } + + // Update connection state for background task + await updateConnectionState({ + isConnected, + isStreaming: true, + deviceId: device.id, + }); } catch (error) { console.error('[OmiDeviceCard] Failed to start streaming:', error); Alert.alert('Streaming Error', 'Failed to start audio streaming.'); audioStreamer.stopStreaming(); + + // Clean up connection state + await updateConnectionState({ + isConnected, + isStreaming: false, + deviceId: device.id, + }); } } }, [ diff --git a/ushadow/mobile/app/components/index.ts b/ushadow/mobile/app/components/index.ts index 99829a66..e31c1325 100644 --- a/ushadow/mobile/app/components/index.ts +++ b/ushadow/mobile/app/components/index.ts @@ -16,6 +16,9 @@ export { OmiDeviceScanner } from './OmiDeviceScanner'; export { OmiDeviceCard } from './OmiDeviceCard'; export { OmiDeviceSection } from './OmiDeviceSection'; +// Debug Components +export { BackgroundTaskDebugPanel } from './BackgroundTaskDebugPanel'; + // Streaming Components (unified) export { StreamingDisplay, diff --git a/ushadow/mobile/app/components/streaming/UnifiedStreamingPage.tsx b/ushadow/mobile/app/components/streaming/UnifiedStreamingPage.tsx index 766dddd6..64bd1322 100644 --- a/ushadow/mobile/app/components/streaming/UnifiedStreamingPage.tsx +++ b/ushadow/mobile/app/components/streaming/UnifiedStreamingPage.tsx @@ -59,15 +59,29 @@ import { appendTokenToUrl, saveAuthToken } from '../../_utils/authStorage'; import { verifyUnodeAuth } from '../../services/chronicleApi'; import { AudioDestination } from '../../services/audioProviderApi'; +// Types +import { SessionSource as SessionSourceType } from '../../types/streamingSession'; +import { RelayStatus } from '../../hooks/useAudioStreamer'; + interface UnifiedStreamingPageProps { authToken: string | null; onAuthRequired?: () => void; + onWebSocketLog?: (status: 'connecting' | 'connected' | 'disconnected' | 'error', message: string, details?: string) => void; + onBluetoothLog?: (status: 'connecting' | 'connected' | 'disconnected' | 'error', message: string, details?: string) => void; + onSessionStart?: (source: SessionSourceType, codec: 'pcm' | 'opus') => Promise; + onSessionUpdate?: (sessionId: string, relayStatus: RelayStatus) => void; + onSessionEnd?: (sessionId: string, error?: string, endReason?: 'manual_stop' | 'connection_lost' | 'error' | 'timeout') => void; testID?: string; } export const UnifiedStreamingPage: React.FC = ({ authToken, onAuthRequired, + onWebSocketLog, + onBluetoothLog, + onSessionStart, + onSessionUpdate, + onSessionEnd, testID = 'unified-streaming', }) => { // Source state @@ -95,13 +109,16 @@ export const UnifiedStreamingPage: React.FC = ({ const streamingStartTime = useRef(null); const [startTime, setStartTime] = useState(undefined); + // Session tracking + const currentSessionIdRef = useRef(null); + // OMI connection context const omiConnection = useOmiConnection(); // Phone microphone streaming hook const phoneStreaming = useStreaming(); - // OMI device connection + // OMI device connection with bluetooth logging const { connectedDeviceId, isConnecting: isOmiConnecting, @@ -110,7 +127,9 @@ export const UnifiedStreamingPage: React.FC = ({ batteryLevel, getBatteryLevel, currentCodec, // BROKEN: codec detection unreliable, so we hardcode Opus for all OMI devices - } = useDeviceConnection(omiConnection); + } = useDeviceConnection(omiConnection, { + onLog: onBluetoothLog, + }); // Derive OMI connection status for SourceSelector const omiConnectionStatus: 'disconnected' | 'connecting' | 'connected' = @@ -131,7 +150,14 @@ export const UnifiedStreamingPage: React.FC = ({ } = useAudioListener(omiConnection, isOmiConnected); // WebSocket streamer for OMI - const omiStreamer = useAudioStreamer(); + const omiStreamer = useAudioStreamer({ + onLog: onWebSocketLog, + onRelayStatus: (status) => { + if (currentSessionIdRef.current && onSessionUpdate) { + onSessionUpdate(currentSessionIdRef.current, status); + } + }, + }); // Combined state const isStreaming = selectedSource.type === 'microphone' @@ -150,6 +176,30 @@ export const UnifiedStreamingPage: React.FC = ({ }); }, [selectedSource.type, isStreaming, phoneStreaming.isStreaming, phoneStreaming.isRecording, omiStreamer.isStreaming, isListeningAudio]); + // Monitor for permanent connection failures (when reconnection attempts exhausted) + useEffect(() => { + const currentError = selectedSource.type === 'microphone' ? phoneStreaming.error : omiStreamer.error; + const currentRetrying = selectedSource.type === 'microphone' ? phoneStreaming.isRetrying : omiStreamer.isRetrying; + const wasStreaming = selectedSource.type === 'microphone' ? phoneStreaming.isStreaming : omiStreamer.isStreaming; + + // If there's an error, not retrying anymore, and we have an active session, it means connection failed permanently + if (currentError && !currentRetrying && !wasStreaming && currentSessionIdRef.current && onSessionEnd) { + console.log('[UnifiedStreaming] Connection failed permanently, ending session'); + const endReason = currentError.toLowerCase().includes('timeout') ? 'timeout' : 'connection_lost'; + onSessionEnd(currentSessionIdRef.current, currentError, endReason); + currentSessionIdRef.current = null; + } + }, [ + selectedSource.type, + phoneStreaming.error, + phoneStreaming.isRetrying, + phoneStreaming.isStreaming, + omiStreamer.error, + omiStreamer.isRetrying, + omiStreamer.isStreaming, + onSessionEnd, + ]); + const isConnecting = selectedSource.type === 'microphone' ? phoneStreaming.isConnecting : (isOmiConnecting || omiStreamer.isConnecting); @@ -371,11 +421,15 @@ export const UnifiedStreamingPage: React.FC = ({ const { buildRelayUrl } = await import('../../services/audioProviderApi'); + // Determine audio source for codec parameter + // mic = pcm, omi = opus + const audioSource = selectedSource.type === 'microphone' ? 'mic' : 'omi'; + // Always use relay URL (even for single destination) // Mobile can't connect directly to internal Docker container names like "chronicle:5001" // The relay handles forwarding to internal services - const streamUrl = buildRelayUrl(selectedUNode.apiUrl, authToken, selectedDestinations); - console.log('[UnifiedStreaming] Built relay URL for', selectedDestinations.length, 'destination(s):', streamUrl); + const streamUrl = buildRelayUrl(selectedUNode.apiUrl, authToken, selectedDestinations, audioSource); + console.log('[UnifiedStreaming] Built relay URL for', selectedDestinations.length, 'destination(s) with codec:', audioSource === 'mic' ? 'pcm' : 'opus', streamUrl); return streamUrl; } catch (err) { @@ -383,7 +437,7 @@ export const UnifiedStreamingPage: React.FC = ({ Alert.alert('Error', err instanceof Error ? err.message : 'Failed to build stream URL'); return null; } - }, [selectedUNode, authToken, availableDestinations, selectedDestinationIds]); + }, [selectedUNode, authToken, availableDestinations, selectedDestinationIds, selectedSource.type]); // Handle source change const handleSourceChange = useCallback(async (source: StreamSource) => { @@ -441,6 +495,19 @@ export const UnifiedStreamingPage: React.FC = ({ } console.log('[UnifiedStreaming] Starting stream to:', streamUrl); + + // Start session tracking + if (onSessionStart) { + const sessionSource: SessionSourceType = selectedSource.type === 'omi' && selectedSource.deviceId + ? { type: 'omi', deviceId: selectedSource.deviceId, deviceName: selectedSource.deviceName } + : { type: 'microphone' }; + + const codec = selectedSource.type === 'microphone' ? 'pcm' : 'opus'; + const sessionId = await onSessionStart(sessionSource, codec); + currentSessionIdRef.current = sessionId; + console.log('[UnifiedStreaming] Session started:', sessionId); + } + try { if (selectedSource.type === 'microphone') { // Phone microphone uses PCM @@ -467,8 +534,15 @@ export const UnifiedStreamingPage: React.FC = ({ }); } } catch (err) { + const errorMessage = (err as Error).message || 'Failed to start streaming'; console.error('[UnifiedStreaming] Failed to start streaming:', err); - Alert.alert('Streaming Error', (err as Error).message || 'Failed to start streaming'); + Alert.alert('Streaming Error', errorMessage); + + // End session with error + if (currentSessionIdRef.current && onSessionEnd) { + onSessionEnd(currentSessionIdRef.current, errorMessage, 'error'); + currentSessionIdRef.current = null; + } } }, [ selectedSource, @@ -478,6 +552,8 @@ export const UnifiedStreamingPage: React.FC = ({ connectOmiDevice, omiStreamer, startAudioListener, + onSessionStart, + onSessionEnd, ]); // Stop streaming @@ -489,10 +565,23 @@ export const UnifiedStreamingPage: React.FC = ({ await stopAudioListener(); omiStreamer.stopStreaming(); } + + // End session (clean stop via user button) + if (currentSessionIdRef.current && onSessionEnd) { + onSessionEnd(currentSessionIdRef.current, undefined, 'manual_stop'); + currentSessionIdRef.current = null; + } } catch (err) { + const errorMessage = (err as Error).message || 'Failed to stop streaming'; console.error('[UnifiedStreaming] Failed to stop streaming:', err); + + // End session with error + if (currentSessionIdRef.current && onSessionEnd) { + onSessionEnd(currentSessionIdRef.current, errorMessage, 'error'); + currentSessionIdRef.current = null; + } } - }, [selectedSource, phoneStreaming, stopAudioListener, omiStreamer]); + }, [selectedSource, phoneStreaming, stopAudioListener, omiStreamer, onSessionEnd]); // Toggle streaming const handleStreamingPress = useCallback(async () => { diff --git a/ushadow/mobile/app/contexts/FeatureFlagContext.tsx b/ushadow/mobile/app/contexts/FeatureFlagContext.tsx new file mode 100644 index 00000000..cced4128 --- /dev/null +++ b/ushadow/mobile/app/contexts/FeatureFlagContext.tsx @@ -0,0 +1,85 @@ +/** + * Feature Flag Context + * + * Provides global feature flag state management for the mobile app. + */ + +import React, { createContext, useContext, useState, useEffect, useCallback } from 'react'; +import { fetchFeatureFlags, FeatureFlagsResponse, clearFeatureFlagsCache } from '../services/featureFlagService'; + +interface FeatureFlagContextValue { + flags: FeatureFlagsResponse; + loading: boolean; + error: string | null; + refreshFlags: () => Promise; + isEnabled: (flagName: string) => boolean; +} + +const FeatureFlagContext = createContext(undefined); + +interface FeatureFlagProviderProps { + children: React.ReactNode; +} + +export function FeatureFlagProvider({ children }: FeatureFlagProviderProps) { + const [flags, setFlags] = useState({}); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + const loadFlags = useCallback(async () => { + console.log('[FeatureFlagContext] Loading feature flags...'); + try { + setLoading(true); + setError(null); + const fetchedFlags = await fetchFeatureFlags(); + console.log('[FeatureFlagContext] Loaded flags:', Object.keys(fetchedFlags).length); + setFlags(fetchedFlags); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to load feature flags'; + setError(message); + console.error('[FeatureFlagContext] Error loading flags:', err); + } finally { + setLoading(false); + } + }, []); + + const refreshFlags = useCallback(async () => { + // Clear cache to force fresh fetch + await clearFeatureFlagsCache(); + await loadFlags(); + }, [loadFlags]); + + const isEnabled = useCallback( + (flagName: string): boolean => { + return flags[flagName]?.enabled ?? false; + }, + [flags] + ); + + // Load flags on mount + useEffect(() => { + console.log('[FeatureFlagContext] Provider mounted, loading flags on startup...'); + loadFlags(); + }, [loadFlags]); + + const value: FeatureFlagContextValue = { + flags, + loading, + error, + refreshFlags, + isEnabled, + }; + + return {children}; +} + +/** + * Hook to access feature flags context. + */ +export function useFeatureFlagContext() { + const context = useContext(FeatureFlagContext); + if (context === undefined) { + throw new Error('useFeatureFlagContext must be used within a FeatureFlagProvider'); + } + return context; +} diff --git a/ushadow/mobile/app/hooks/index.ts b/ushadow/mobile/app/hooks/index.ts index ed902bf7..29d2212f 100644 --- a/ushadow/mobile/app/hooks/index.ts +++ b/ushadow/mobile/app/hooks/index.ts @@ -21,8 +21,17 @@ export type { UseStreaming } from './useStreaming'; export { useTailscaleDiscovery } from './useTailscaleDiscovery'; // Bluetooth and connection hooks -export { useBluetoothManager } from './useBluetoothManager'; +// NOTE: useBluetoothManager is deprecated - use useBluetooth from contexts instead +// export { useBluetoothManager } from './useBluetoothManager'; export { useConnectionLog } from './useConnectionLog'; +export { useSessionTracking } from './useSessionTracking'; + +// App lifecycle hooks +export { useAppLifecycle } from './useAppLifecycle'; +export type { UseAppLifecycle } from './useAppLifecycle'; + +export { useConnectionHealth } from './useConnectionHealth'; +export type { UseConnectionHealth } from './useConnectionHealth'; // OMI Device hooks (from chronicle) export { useDeviceConnection } from './useDeviceConnection'; diff --git a/ushadow/mobile/app/hooks/useAppLifecycle.ts b/ushadow/mobile/app/hooks/useAppLifecycle.ts new file mode 100644 index 00000000..8154be84 --- /dev/null +++ b/ushadow/mobile/app/hooks/useAppLifecycle.ts @@ -0,0 +1,107 @@ +/** + * useAppLifecycle Hook + * + * Monitors React Native AppState to detect when app moves to/from background. + * This is CRITICAL for maintaining Bluetooth and WebSocket connections. + * + * Usage: + * ```typescript + * const { appState, isActive } = useAppLifecycle({ + * onForeground: () => { + * console.log('App returned to foreground - check connection health'); + * // Verify connections are alive, trigger reconnection if needed + * }, + * onBackground: () => { + * console.log('App moved to background'); + * // Optional: prepare for potential connection suspension + * } + * }); + * ``` + */ + +import { useState, useEffect, useCallback, useRef } from 'react'; +import { AppState, AppStateStatus } from 'react-native'; +import { addPersistentLog } from '../services/persistentLogger'; + +interface UseAppLifecycleOptions { + onForeground?: () => void; + onBackground?: () => void; + onInactive?: () => void; +} + +interface UseAppLifecycle { + appState: AppStateStatus; + isActive: boolean; + isBackground: boolean; + isInactive: boolean; +} + +/** + * Hook to monitor app lifecycle state changes. + * + * AppState values: + * - 'active': App is in foreground and receiving events + * - 'background': App is in background (user pressed Home) + * - 'inactive': Transition state (iOS only, e.g., during calls or app switcher) + */ +export const useAppLifecycle = (options?: UseAppLifecycleOptions): UseAppLifecycle => { + const { onForeground, onBackground, onInactive } = options || {}; + + const [appState, setAppState] = useState(AppState.currentState); + + // Use refs for callbacks to avoid recreating listener on every render + const onForegroundRef = useRef(onForeground); + const onBackgroundRef = useRef(onBackground); + const onInactiveRef = useRef(onInactive); + + useEffect(() => { + onForegroundRef.current = onForeground; + onBackgroundRef.current = onBackground; + onInactiveRef.current = onInactive; + }, [onForeground, onBackground, onInactive]); + + useEffect(() => { + console.log('[AppLifecycle] Setting up AppState listener, current state:', AppState.currentState); + + const handleAppStateChange = (nextAppState: AppStateStatus) => { + console.log(`[AppLifecycle] State change: ${appState} -> ${nextAppState}`); + + // Detect transitions + if (appState.match(/inactive|background/) && nextAppState === 'active') { + // App has come to foreground + console.log('[AppLifecycle] ๐ŸŸข App entered FOREGROUND'); + addPersistentLog('lifecycle', '๐ŸŸข App entered FOREGROUND', { from: appState, to: nextAppState }); + onForegroundRef.current?.(); + } else if (appState === 'active' && nextAppState === 'background') { + // App has gone to background + console.log('[AppLifecycle] ๐Ÿ”ด App entered BACKGROUND'); + addPersistentLog('lifecycle', '๐Ÿ”ด App entered BACKGROUND', { from: appState, to: nextAppState }); + onBackgroundRef.current?.(); + } else if (nextAppState === 'inactive') { + // App is in transition (iOS only) + console.log('[AppLifecycle] ๐ŸŸก App is INACTIVE'); + addPersistentLog('lifecycle', '๐ŸŸก App is INACTIVE', { from: appState, to: nextAppState }); + onInactiveRef.current?.(); + } + + setAppState(nextAppState); + }; + + // Subscribe to app state changes + const subscription = AppState.addEventListener('change', handleAppStateChange); + + return () => { + console.log('[AppLifecycle] Removing AppState listener'); + subscription.remove(); + }; + }, [appState]); + + return { + appState, + isActive: appState === 'active', + isBackground: appState === 'background', + isInactive: appState === 'inactive', + }; +}; + +export default useAppLifecycle; diff --git a/ushadow/mobile/app/hooks/useAudioManager.ts b/ushadow/mobile/app/hooks/useAudioManager.ts index 0b4e2588..396bd478 100644 --- a/ushadow/mobile/app/hooks/useAudioManager.ts +++ b/ushadow/mobile/app/hooks/useAudioManager.ts @@ -26,6 +26,7 @@ interface PhoneAudioRecorder { interface ConnectionEventHandlers { onWebSocketDisconnect?: (sessionId: string, conversationId: string | null) => void; onWebSocketReconnect?: () => void; + onWebSocketLog?: (status: 'connecting' | 'connected' | 'disconnected' | 'error', message: string, details?: string) => void; } // Optional offline mode integration interface @@ -153,6 +154,7 @@ export const useAudioManager = ({ sessionIdRef.current = sessionId; setCurrentSessionId(sessionId); + connectionHandlers?.onWebSocketLog?.('disconnected', 'Audio streaming disconnected, entering offline mode', `Session: ${sessionId}`); offlineMode.enterOfflineMode(sessionId, conversationIdRef.current); setIsOfflineBuffering(true); @@ -162,6 +164,7 @@ export const useAudioManager = ({ // Detect reconnect transition if (!wasConnected && isConnected && offlineMode?.isOffline) { console.log('[useAudioManager] WebSocket reconnected, exiting offline mode'); + connectionHandlers?.onWebSocketLog?.('connected', 'Audio streaming reconnected, exiting offline mode'); await offlineMode.exitOfflineMode(); setIsOfflineBuffering(false); diff --git a/ushadow/mobile/app/hooks/useAudioStreamer.ts b/ushadow/mobile/app/hooks/useAudioStreamer.ts index 3857e4ea..77ecfbfb 100644 --- a/ushadow/mobile/app/hooks/useAudioStreamer.ts +++ b/ushadow/mobile/app/hooks/useAudioStreamer.ts @@ -7,9 +7,8 @@ * Wyoming Protocol: JSON header + binary payload for structured audio sessions. * * URL Format: - * - Streaming URL: wss://{tailscale-host}/chronicle/ws_pcm?token={jwt} - * - /chronicle prefix routes through Caddy to Chronicle backend - * - /ws_pcm is the Wyoming protocol PCM audio endpoint + * - Audio relay: wss://{tailscale-host}/ws/audio/relay?destinations=[...]&token={jwt} + * - The relay forwards to Chronicle/Mycelia backends internally * - Token is appended automatically via appendTokenToUrl() */ import { useState, useRef, useCallback, useEffect } from 'react'; @@ -29,6 +28,21 @@ export interface UseAudioStreamer { getWebSocketReadyState: () => number | undefined; } +export interface RelayStatus { + destinations: Array<{ + name: string; + connected: boolean; + errors: number; + }>; + bytes_relayed: number; + chunks_relayed: number; +} + +export interface UseAudioStreamerOptions { + onLog?: (status: 'connecting' | 'connected' | 'disconnected' | 'error', message: string, details?: string) => void; + onRelayStatus?: (status: RelayStatus) => void; +} + // Wyoming Protocol Types interface WyomingEvent { type: string; @@ -71,7 +85,8 @@ const BASE_RECONNECT_MS = 3000; const MAX_RECONNECT_MS = 30000; const HEARTBEAT_MS = 25000; -export const useAudioStreamer = (): UseAudioStreamer => { +export const useAudioStreamer = (options?: UseAudioStreamerOptions): UseAudioStreamer => { + const { onLog, onRelayStatus } = options || {}; const [isStreaming, setIsStreaming] = useState(false); const [isConnecting, setIsConnecting] = useState(false); const [isRetrying, setIsRetrying] = useState(false); @@ -153,12 +168,14 @@ export const useAudioStreamer = (): UseAudioStreamer => { websocketRef.current = null; } + onLog?.('disconnected', 'Manually stopped streaming'); + setStateSafe(setIsStreaming, false); setStateSafe(setIsConnecting, false); setStateSafe(setIsRetrying, false); setStateSafe(setRetryCount, 0); reconnectAttemptsRef.current = 0; - }, [sendWyomingEvent, setStateSafe]); + }, [sendWyomingEvent, setStateSafe, onLog]); // Cancel retry attempts const cancelRetry = useCallback(() => { @@ -186,6 +203,7 @@ export const useAudioStreamer = (): UseAudioStreamer => { if (reconnectAttemptsRef.current >= MAX_RECONNECT_ATTEMPTS) { console.log('[AudioStreamer] Reconnect attempts exhausted'); manuallyStoppedRef.current = true; + onLog?.('error', 'Failed to reconnect after multiple attempts', `Max attempts: ${MAX_RECONNECT_ATTEMPTS}`); setStateSafe(setIsStreaming, false); setStateSafe(setIsConnecting, false); setStateSafe(setIsRetrying, false); @@ -199,6 +217,7 @@ export const useAudioStreamer = (): UseAudioStreamer => { reconnectAttemptsRef.current = attempt; console.log(`[AudioStreamer] Reconnect attempt ${attempt}/${MAX_RECONNECT_ATTEMPTS} in ${delay}ms`); + onLog?.('connecting', `Reconnecting (attempt ${attempt}/${MAX_RECONNECT_ATTEMPTS})`, `Delay: ${delay}ms`); if (reconnectTimeoutRef.current) clearTimeout(reconnectTimeoutRef.current); setStateSafe(setIsConnecting, true); @@ -219,7 +238,7 @@ export const useAudioStreamer = (): UseAudioStreamer => { }); } }, delay); - }, [setStateSafe]); + }, [setStateSafe, onLog]); // Start streaming const startStreaming = useCallback(async ( @@ -249,6 +268,7 @@ export const useAudioStreamer = (): UseAudioStreamer => { console.log(`[AudioStreamer] Initializing WebSocket: ${trimmed}`); console.log(`[AudioStreamer] Network state:`, netState); + onLog?.('connecting', 'Initializing WebSocket connection', trimmed); if (websocketRef.current) await stopStreaming(); setStateSafe(setIsConnecting, true); @@ -262,6 +282,7 @@ export const useAudioStreamer = (): UseAudioStreamer => { ws.onopen = async () => { console.log('[AudioStreamer] WebSocket open'); + onLog?.('connected', 'WebSocket connected successfully', `Mode: ${currentModeRef.current}, Codec: ${currentCodecRef.current}`); // Set binary type to arraybuffer (matches web implementation) if (ws.binaryType !== 'arraybuffer') { @@ -304,20 +325,27 @@ export const useAudioStreamer = (): UseAudioStreamer => { ws.onmessage = (event) => { console.log('[AudioStreamer] Message:', event.data); - // Parse message to check for errors + // Parse message to check for errors and status updates try { const data = typeof event.data === 'string' ? JSON.parse(event.data) : null; if (data) { + // Handle relay_status message + if (data.type === 'relay_status' && data.data) { + console.log('[AudioStreamer] Relay status:', data.data); + onRelayStatus?.(data.data as RelayStatus); + } // Check for error responses from server - if (data.type === 'error' || data.error || data.status === 'error') { + else if (data.type === 'error' || data.error || data.status === 'error') { serverErrorCountRef.current += 1; const errorMsg = data.message || data.error || 'Server error'; console.error(`[AudioStreamer] Server error ${serverErrorCountRef.current}/${MAX_SERVER_ERRORS}: ${errorMsg}`); + onLog?.('error', `Server error (${serverErrorCountRef.current}/${MAX_SERVER_ERRORS})`, errorMsg); setStateSafe(setError, errorMsg); // Auto-stop after too many consecutive server errors if (serverErrorCountRef.current >= MAX_SERVER_ERRORS) { console.log('[AudioStreamer] Too many server errors, stopping stream'); + onLog?.('error', 'Too many server errors, stopped stream', `${MAX_SERVER_ERRORS} consecutive errors`); manuallyStoppedRef.current = true; ws.close(1000, 'too-many-errors'); setStateSafe(setError, `Stopped: ${errorMsg} (${MAX_SERVER_ERRORS} errors)`); @@ -335,6 +363,7 @@ export const useAudioStreamer = (): UseAudioStreamer => { ws.onerror = (e) => { const msg = (e as ErrorEvent).message || 'WebSocket connection error.'; console.error('[AudioStreamer] Error:', msg); + onLog?.('error', 'WebSocket connection error', msg); setStateSafe(setError, msg); setStateSafe(setIsConnecting, false); setStateSafe(setIsStreaming, false); @@ -346,6 +375,10 @@ export const useAudioStreamer = (): UseAudioStreamer => { console.log('[AudioStreamer] Closed. Code:', event.code, 'Reason:', event.reason); const isManual = event.code === 1000 && (event.reason === 'manual-stop' || event.reason === 'too-many-errors'); + if (!isManual) { + onLog?.('disconnected', 'WebSocket connection closed', `Code: ${event.code}, Reason: ${event.reason || 'none'}`); + } + setStateSafe(setIsConnecting, false); setStateSafe(setIsStreaming, false); diff --git a/ushadow/mobile/app/hooks/useConnectionHealth.ts b/ushadow/mobile/app/hooks/useConnectionHealth.ts new file mode 100644 index 00000000..9cbc473d --- /dev/null +++ b/ushadow/mobile/app/hooks/useConnectionHealth.ts @@ -0,0 +1,151 @@ +/** + * useConnectionHealth Hook + * + * Monitors health of Bluetooth and WebSocket connections. + * Useful for detecting "zombie" connections that appear connected but aren't working. + * + * Usage: + * ```typescript + * const { checkHealth, bluetoothHealthy, websocketHealthy } = useConnectionHealth({ + * omiConnection, + * websocketReadyState: audioStreamer.getWebSocketReadyState(), + * onUnhealthy: (type) => { + * console.log(`${type} connection is unhealthy, reconnecting...`); + * } + * }); + * + * // Check health when returning to foreground + * await checkHealth(); + * ``` + */ + +import { useState, useCallback } from 'react'; +import { OmiConnection } from 'friend-lite-react-native'; + +interface UseConnectionHealthOptions { + omiConnection?: OmiConnection; + websocketReadyState?: number; + onUnhealthy?: (type: 'bluetooth' | 'websocket' | 'both') => void; + autoCheck?: boolean; // Not implemented yet, for future periodic checks +} + +interface UseConnectionHealth { + bluetoothHealthy: boolean | null; // null = not checked yet + websocketHealthy: boolean | null; + lastHealthCheck: Date | null; + isChecking: boolean; + checkHealth: () => Promise<{ bluetooth: boolean; websocket: boolean }>; +} + +/** + * Hook to check connection health status. + * + * Bluetooth health check: + * - Verify isConnected() returns true + * - Try to read battery level (low-cost operation that proves BLE is working) + * + * WebSocket health check: + * - Verify readyState === WebSocket.OPEN (1) + */ +export const useConnectionHealth = (options?: UseConnectionHealthOptions): UseConnectionHealth => { + const { omiConnection, websocketReadyState, onUnhealthy } = options || {}; + + const [bluetoothHealthy, setBluetoothHealthy] = useState(null); + const [websocketHealthy, setWebsocketHealthy] = useState(null); + const [lastHealthCheck, setLastHealthCheck] = useState(null); + const [isChecking, setIsChecking] = useState(false); + + /** + * Check if Bluetooth connection is actually working. + * + * We consider Bluetooth healthy if: + * 1. isConnected() returns true + * 2. We can successfully read battery level (proves BLE communication works) + */ + const checkBluetoothHealth = useCallback(async (): Promise => { + if (!omiConnection) { + console.log('[ConnectionHealth] No omiConnection provided, skipping Bluetooth check'); + return true; // Not applicable + } + + try { + const isConnected = omiConnection.isConnected(); + console.log('[ConnectionHealth] Bluetooth isConnected():', isConnected); + + if (!isConnected) { + console.log('[ConnectionHealth] โŒ Bluetooth: Not connected'); + return false; + } + + // Try to read battery level to verify BLE communication works + const batteryLevel = await omiConnection.getBatteryLevel(); + console.log('[ConnectionHealth] โœ… Bluetooth: Connected and responsive (battery:', batteryLevel, '%)'); + return true; + + } catch (error) { + console.error('[ConnectionHealth] โŒ Bluetooth: Error during health check:', error); + return false; + } + }, [omiConnection]); + + /** + * Check if WebSocket connection is actually working. + * + * We consider WebSocket healthy if readyState === OPEN (1). + * Future enhancement: Could send ping and wait for pong. + */ + const checkWebSocketHealth = useCallback((): boolean => { + if (websocketReadyState === undefined) { + console.log('[ConnectionHealth] No websocketReadyState provided, skipping WebSocket check'); + return true; // Not applicable + } + + const isOpen = websocketReadyState === WebSocket.OPEN; // 1 + console.log('[ConnectionHealth]', isOpen ? 'โœ…' : 'โŒ', 'WebSocket: readyState =', websocketReadyState); + + return isOpen; + }, [websocketReadyState]); + + /** + * Check health of both connections. + * Returns { bluetooth: boolean, websocket: boolean } + */ + const checkHealth = useCallback(async (): Promise<{ bluetooth: boolean; websocket: boolean }> => { + console.log('[ConnectionHealth] Starting health check...'); + setIsChecking(true); + + const btHealthy = await checkBluetoothHealth(); + const wsHealthy = checkWebSocketHealth(); + + setBluetoothHealthy(btHealthy); + setWebsocketHealthy(wsHealthy); + setLastHealthCheck(new Date()); + setIsChecking(false); + + console.log('[ConnectionHealth] Health check complete:', { + bluetooth: btHealthy ? 'โœ…' : 'โŒ', + websocket: wsHealthy ? 'โœ…' : 'โŒ', + }); + + // Trigger callback if unhealthy + if (!btHealthy && !wsHealthy) { + onUnhealthy?.('both'); + } else if (!btHealthy) { + onUnhealthy?.('bluetooth'); + } else if (!wsHealthy) { + onUnhealthy?.('websocket'); + } + + return { bluetooth: btHealthy, websocket: wsHealthy }; + }, [checkBluetoothHealth, checkWebSocketHealth, onUnhealthy]); + + return { + bluetoothHealthy, + websocketHealthy, + lastHealthCheck, + isChecking, + checkHealth, + }; +}; + +export default useConnectionHealth; diff --git a/ushadow/mobile/app/hooks/useConnectionLog.ts b/ushadow/mobile/app/hooks/useConnectionLog.ts index 63a17e87..f6217ac5 100644 --- a/ushadow/mobile/app/hooks/useConnectionLog.ts +++ b/ushadow/mobile/app/hooks/useConnectionLog.ts @@ -27,6 +27,7 @@ interface UseConnectionLogReturn { metadata?: Record ) => void; clearLogs: () => void; + clearLogsByType: (type: ConnectionType) => void; // Loading state isLoading: boolean; @@ -164,11 +165,22 @@ export const useConnectionLog = (): UseConnectionLogReturn => { } }, []); + // Clear logs for a specific connection type + const clearLogsByType = useCallback(async (type: ConnectionType) => { + setEntries(prev => prev.filter(entry => entry.type !== type)); + setConnectionState(prev => ({ + ...prev, + [type]: 'unknown', + })); + // Storage will be updated automatically via the useEffect + }, []); + return { entries, connectionState, logEvent, clearLogs, + clearLogsByType, isLoading, }; }; diff --git a/ushadow/mobile/app/hooks/useConversations.ts b/ushadow/mobile/app/hooks/useConversations.ts new file mode 100644 index 00000000..56131582 --- /dev/null +++ b/ushadow/mobile/app/hooks/useConversations.ts @@ -0,0 +1,245 @@ +/** + * Conversation Hooks + * + * React hooks for fetching conversations from Chronicle and Mycelia backends. + * Supports single-source and multi-source conversation fetching. + */ + +import { useState, useEffect, useCallback } from 'react'; +import * as chronicleApi from '../services/chronicleApi'; +import * as myceliaApi from '../services/myceliaApi'; +import type { Conversation, ConversationsResponse } from '../services/chronicleApi'; + +export type ConversationSource = 'chronicle' | 'mycelia'; + +interface UseConversationsOptions { + enabled?: boolean; + page?: number; + limit?: number; + autoRefresh?: boolean; + refreshInterval?: number; // in milliseconds +} + +interface ConversationState { + data: Conversation[]; + loading: boolean; + error: string | null; + refetch: () => Promise; +} + +/** + * Hook to fetch conversations from Chronicle backend. + */ +export function useChronicleConversations( + options: UseConversationsOptions = {} +): ConversationState { + const { + enabled = true, + page = 1, + limit = 50, + autoRefresh = false, + refreshInterval = 30000, + } = options; + + const [data, setData] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + const fetchData = useCallback(async () => { + if (!enabled) { + setLoading(false); + return; + } + + try { + setLoading(true); + setError(null); + + const response = await chronicleApi.fetchConversations(page, limit); + + // Handle both array and object response formats + const conversations = response.conversations || []; + setData(conversations); + + console.log(`[useChronicleConversations] Fetched ${conversations.length} conversations`); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to fetch Chronicle conversations'; + setError(message); + console.error('[useChronicleConversations] Error:', err); + setData([]); + } finally { + setLoading(false); + } + }, [enabled, page, limit]); + + // Initial fetch + useEffect(() => { + fetchData(); + }, [fetchData]); + + // Auto-refresh if enabled + useEffect(() => { + if (!autoRefresh || !enabled) { + return; + } + + const intervalId = setInterval(fetchData, refreshInterval); + return () => clearInterval(intervalId); + }, [autoRefresh, enabled, refreshInterval, fetchData]); + + return { data, loading, error, refetch: fetchData }; +} + +/** + * Hook to fetch conversations from Mycelia backend. + */ +export function useMyceliaConversations( + options: UseConversationsOptions = {} +): ConversationState { + const { + enabled = true, + page = 1, + limit = 25, // Mycelia default + autoRefresh = false, + refreshInterval = 30000, + } = options; + + const [data, setData] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + const fetchData = useCallback(async () => { + if (!enabled) { + setLoading(false); + return; + } + + try { + setLoading(true); + setError(null); + + const response = await myceliaApi.fetchConversations(page, limit); + + // Handle both array and object response formats + const conversations = response.conversations || []; + setData(conversations); + + console.log(`[useMyceliaConversations] Fetched ${conversations.length} conversations`); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to fetch Mycelia conversations'; + setError(message); + console.error('[useMyceliaConversations] Error:', err); + + // Don't treat service unavailable as critical error + if (message.includes('not available') || message.includes('503')) { + console.log('[useMyceliaConversations] Mycelia service unavailable - returning empty data'); + } + + setData([]); + } finally { + setLoading(false); + } + }, [enabled, page, limit]); + + // Initial fetch + useEffect(() => { + fetchData(); + }, [fetchData]); + + // Auto-refresh if enabled + useEffect(() => { + if (!autoRefresh || !enabled) { + return; + } + + const intervalId = setInterval(fetchData, refreshInterval); + return () => clearInterval(intervalId); + }, [autoRefresh, enabled, refreshInterval, fetchData]); + + return { data, loading, error, refetch: fetchData }; +} + +/** + * Hook to fetch conversations from multiple sources. + * Merges and deduplicates conversations from Chronicle and Mycelia. + */ +export function useMultiSourceConversations( + enabledSources: ConversationSource[], + options: UseConversationsOptions = {} +) { + const chronicleEnabled = enabledSources.includes('chronicle'); + const myceliaEnabled = enabledSources.includes('mycelia'); + + const chronicle = useChronicleConversations({ + ...options, + enabled: chronicleEnabled, + }); + + const mycelia = useMyceliaConversations({ + ...options, + enabled: myceliaEnabled, + limit: options.limit || 25, // Mycelia default + }); + + // Merge and deduplicate conversations + const mergedData = useCallback(() => { + const conversationMap = new Map(); + + // Add Chronicle conversations + chronicle.data.forEach((conv) => { + conversationMap.set(conv.conversation_id, { + ...conv, + source: 'chronicle' as ConversationSource, + }); + }); + + // Add Mycelia conversations (may have duplicates) + mycelia.data.forEach((conv) => { + if (!conversationMap.has(conv.conversation_id)) { + conversationMap.set(conv.conversation_id, { + ...conv, + source: 'mycelia' as ConversationSource, + }); + } + }); + + // Sort by created_at descending (most recent first) + return Array.from(conversationMap.values()).sort((a, b) => { + const dateA = new Date(a.created_at).getTime(); + const dateB = new Date(b.created_at).getTime(); + return dateB - dateA; + }); + }, [chronicle.data, mycelia.data]); + + const refetchAll = useCallback(async () => { + await Promise.all([ + chronicleEnabled ? chronicle.refetch() : Promise.resolve(), + myceliaEnabled ? mycelia.refetch() : Promise.resolve(), + ]); + }, [chronicleEnabled, myceliaEnabled, chronicle.refetch, mycelia.refetch]); + + return { + // Individual sources + chronicle: { + data: chronicle.data, + loading: chronicle.loading, + error: chronicle.error, + refetch: chronicle.refetch, + }, + mycelia: { + data: mycelia.data, + loading: mycelia.loading, + error: mycelia.error, + refetch: mycelia.refetch, + }, + // Merged data + data: mergedData(), + // Aggregate states + loading: chronicle.loading || mycelia.loading, + anyLoading: chronicle.loading || mycelia.loading, + allLoaded: (!chronicleEnabled || !chronicle.loading) && (!myceliaEnabled || !mycelia.loading), + error: chronicle.error || mycelia.error, + // Refetch all sources + refetch: refetchAll, + }; +} diff --git a/ushadow/mobile/app/hooks/useDeviceConnection.ts b/ushadow/mobile/app/hooks/useDeviceConnection.ts index 0ea9e69a..0a6a22e7 100644 --- a/ushadow/mobile/app/hooks/useDeviceConnection.ts +++ b/ushadow/mobile/app/hooks/useDeviceConnection.ts @@ -15,11 +15,17 @@ interface UseDeviceConnection { connectedDeviceId: string | null; } +interface UseDeviceConnectionOptions { + onDisconnect?: () => void; // Callback for when disconnection happens + onConnect?: () => void; // Callback for when connection happens + onLog?: (status: 'connecting' | 'connected' | 'disconnected' | 'error', message: string, details?: string) => void; +} + export const useDeviceConnection = ( omiConnection: OmiConnection, - onDisconnect?: () => void, // Callback for when disconnection happens, e.g., to stop audio listener - onConnect?: () => void // Callback for when connection happens + options?: UseDeviceConnectionOptions ): UseDeviceConnection => { + const { onDisconnect, onConnect, onLog } = options || {}; const [connectedDevice, setConnectedDevice] = useState(null); const [isConnecting, setIsConnecting] = useState(false); const [connectionError, setConnectionError] = useState(null); @@ -35,6 +41,7 @@ export const useDeviceConnection = ( if (isNowConnected) { setConnectedDeviceId(id); setConnectionError(null); // Clear any previous error on successful connection + onLog?.('connected', 'OMI device connected', `Device ID: ${id}`); // Potentially fetch the device details from omiConnection if needed to set connectedDevice // For now, we'll assume the app manages the full OmiDevice object elsewhere or doesn't need it here. if (onConnect) onConnect(); @@ -43,9 +50,10 @@ export const useDeviceConnection = ( setConnectedDevice(null); setCurrentCodec(null); setBatteryLevel(-1); + onLog?.('disconnected', 'OMI device disconnected', `Device ID: ${id}`); if (onDisconnect) onDisconnect(); } - }, [onDisconnect, onConnect]); + }, [onDisconnect, onConnect, onLog]); const connectToDevice = useCallback(async (deviceId: string) => { if (connectedDeviceId && connectedDeviceId !== deviceId) { @@ -62,6 +70,7 @@ export const useDeviceConnection = ( setConnectedDevice(null); // Clear previous device details setCurrentCodec(null); setBatteryLevel(-1); + onLog?.('connecting', 'Connecting to OMI device', `Device ID: ${deviceId}`); try { const success = await omiConnection.connect(deviceId, handleConnectionStateChange); @@ -72,6 +81,7 @@ export const useDeviceConnection = ( setIsConnecting(false); const errorMsg = 'Could not connect to the device. Please try again.'; setConnectionError(errorMsg); + onLog?.('error', 'Failed to connect to OMI device', errorMsg); Alert.alert('Connection Failed', errorMsg); } } catch (error) { @@ -81,13 +91,15 @@ export const useDeviceConnection = ( setConnectedDeviceId(null); const errorMsg = String(error); setConnectionError(errorMsg); + onLog?.('error', 'OMI connection error', errorMsg); Alert.alert('Connection Error', errorMsg); } - }, [omiConnection, handleConnectionStateChange, connectedDeviceId]); + }, [omiConnection, handleConnectionStateChange, connectedDeviceId, onLog]); const disconnectFromDevice = useCallback(async () => { console.log('Attempting to disconnect...'); setIsConnecting(false); // No longer attempting to connect if we are disconnecting + onLog?.('disconnected', 'Disconnecting from OMI device'); try { if (onDisconnect) { await onDisconnect(); // Call pre-disconnect cleanup (e.g., stop audio) @@ -101,6 +113,7 @@ export const useDeviceConnection = ( // The handleConnectionStateChange should also be triggered by the SDK upon disconnection } catch (error) { console.error('Disconnect error:', error); + onLog?.('error', 'OMI disconnect error', String(error)); Alert.alert('Disconnect Error', String(error)); // Even if disconnect fails, reset state as we intend to be disconnected setConnectedDevice(null); @@ -108,7 +121,7 @@ export const useDeviceConnection = ( setCurrentCodec(null); setBatteryLevel(-1); } - }, [omiConnection, onDisconnect]); + }, [omiConnection, onDisconnect, onLog]); const getAudioCodec = useCallback(async () => { if (!omiConnection.isConnected() || !connectedDeviceId) { diff --git a/ushadow/mobile/app/hooks/useFeatureFlags.ts b/ushadow/mobile/app/hooks/useFeatureFlags.ts new file mode 100644 index 00000000..f0b2370e --- /dev/null +++ b/ushadow/mobile/app/hooks/useFeatureFlags.ts @@ -0,0 +1,33 @@ +/** + * Feature Flag Hooks + * + * Convenience hooks for accessing feature flags in components. + */ + +import { useFeatureFlagContext } from '../contexts/FeatureFlagContext'; + +/** + * Hook to check if a specific feature flag is enabled. + * + * @param flagName - The name of the feature flag to check + * @returns boolean indicating if the flag is enabled + * + * @example + * const isDualSourceEnabled = useFeatureFlag('mobile_dual_source_conversations'); + */ +export function useFeatureFlag(flagName: string): boolean { + const { isEnabled } = useFeatureFlagContext(); + return isEnabled(flagName); +} + +/** + * Hook to access all feature flags and their state. + * + * @returns Feature flags object, loading state, error, and refresh function + * + * @example + * const { flags, loading, error, refreshFlags } = useFeatureFlags(); + */ +export function useFeatureFlags() { + return useFeatureFlagContext(); +} diff --git a/ushadow/mobile/app/hooks/useSessionTracking.ts b/ushadow/mobile/app/hooks/useSessionTracking.ts new file mode 100644 index 00000000..7cceff37 --- /dev/null +++ b/ushadow/mobile/app/hooks/useSessionTracking.ts @@ -0,0 +1,198 @@ +/** + * useSessionTracking Hook + * + * Manages streaming session lifecycle and persistence. + * Tracks active sessions and maintains session history. + */ + +import { useState, useCallback, useEffect } from 'react'; +import NetInfo from '@react-native-community/netinfo'; +import { + StreamingSession, + SessionSource, + SessionDestination, + generateSessionId, + getSessionDuration, +} from '../types/streamingSession'; +import { loadSessions, saveSessions, addSession, updateSession } from '../_utils/sessionStorage'; +import { RelayStatus } from './useAudioStreamer'; + +interface UseSessionTrackingReturn { + sessions: StreamingSession[]; + activeSession: StreamingSession | null; + startSession: (source: SessionSource, codec: 'pcm' | 'opus') => string; + updateSessionStatus: (sessionId: string, relayStatus: RelayStatus) => void; + endSession: (sessionId: string, error?: string) => void; + linkToConversation: (sessionId: string, conversationId: string) => void; + deleteSession: (sessionId: string) => void; + clearAllSessions: () => void; + isLoading: boolean; +} + +export const useSessionTracking = (): UseSessionTrackingReturn => { + const [sessions, setSessions] = useState([]); + const [activeSession, setActiveSession] = useState(null); + const [isLoading, setIsLoading] = useState(true); + + // Load sessions from storage on mount + useEffect(() => { + const loadData = async () => { + const loaded = await loadSessions(); + setSessions(loaded); + // Find any active session (shouldn't happen, but handle gracefully) + const active = loaded.find(s => !s.endTime); + if (active) { + setActiveSession(active); + } + setIsLoading(false); + }; + loadData(); + }, []); + + // Auto-save sessions when they change + useEffect(() => { + if (!isLoading && sessions.length > 0) { + const saveData = async () => { + await saveSessions(sessions); + }; + // Debounce saves + const timeout = setTimeout(saveData, 500); + return () => clearTimeout(timeout); + } + }, [sessions, isLoading]); + + /** + * Start a new streaming session + */ + const startSession = useCallback(async (source: SessionSource, codec: 'pcm' | 'opus'): Promise => { + const sessionId = generateSessionId(); + + // Get network info + const netInfo = await NetInfo.fetch(); + const networkType = netInfo.type; + + const newSession: StreamingSession = { + id: sessionId, + source, + destinations: [], // Will be populated when relay_status arrives + startTime: new Date(), + bytesTransferred: 0, + chunksTransferred: 0, + codec, + networkType, + }; + + setSessions(prev => [newSession, ...prev]); + setActiveSession(newSession); + + console.log('[SessionTracking] Started session:', sessionId); + return sessionId; + }, []); + + /** + * Update session with relay status from backend + */ + const updateSessionStatus = useCallback((sessionId: string, relayStatus: RelayStatus) => { + setSessions(prev => { + const updated = prev.map(session => { + if (session.id === sessionId) { + const updatedSession = { + ...session, + destinations: relayStatus.destinations, + bytesTransferred: relayStatus.bytes_relayed, + chunksTransferred: relayStatus.chunks_relayed, + }; + if (activeSession?.id === sessionId) { + setActiveSession(updatedSession); + } + return updatedSession; + } + return session; + }); + return updated; + }); + }, [activeSession]); + + /** + * End a streaming session + * + * Called when WebSocket connection drops and doesn't reconnect. + * Keeps ALL sessions (including failed ones) for debugging conversation drops. + */ + const endSession = useCallback((sessionId: string, error?: string, endReason?: 'manual_stop' | 'connection_lost' | 'error' | 'timeout') => { + const endTime = new Date(); + + setSessions(prev => prev.map(session => { + if (session.id === sessionId) { + const durationSeconds = Math.floor((endTime.getTime() - session.startTime.getTime()) / 1000); + // Infer end reason if not provided + let finalEndReason = endReason; + if (!finalEndReason) { + if (error) { + finalEndReason = error.toLowerCase().includes('timeout') ? 'timeout' : 'error'; + } else { + finalEndReason = 'manual_stop'; + } + } + return { + ...session, + endTime, + durationSeconds, + error, + endReason: finalEndReason, + }; + } + return session; + })); + + if (activeSession?.id === sessionId) { + setActiveSession(null); + } + + console.log('[SessionTracking] Ended session:', sessionId, endReason || 'unknown', error ? `Error: ${error}` : ''); + }, [activeSession]); + + /** + * Link session to a Chronicle conversation + */ + const linkToConversation = useCallback((sessionId: string, conversationId: string) => { + setSessions(prev => { + const updated = prev.map(session => { + if (session.id === sessionId) { + return { ...session, conversationId }; + } + return session; + }); + return updated; + }); + // Also persist immediately + updateSession(sessionId, { conversationId }); + }, []); + + /** + * Delete a session from history + */ + const deleteSessionCallback = useCallback((sessionId: string) => { + setSessions(prev => prev.filter(s => s.id !== sessionId)); + }, []); + + /** + * Clear all session history + */ + const clearAllSessionsCallback = useCallback(() => { + setSessions([]); + setActiveSession(null); + }, []); + + return { + sessions, + activeSession, + startSession, + updateSessionStatus, + endSession, + linkToConversation, + deleteSession: deleteSessionCallback, + clearAllSessions: clearAllSessionsCallback, + isLoading, + }; +}; diff --git a/ushadow/mobile/app/services/audioProviderApi.ts b/ushadow/mobile/app/services/audioProviderApi.ts index 5f05e867..6d92c083 100644 --- a/ushadow/mobile/app/services/audioProviderApi.ts +++ b/ushadow/mobile/app/services/audioProviderApi.ts @@ -131,13 +131,13 @@ export function buildAudioStreamUrl( * // This API tells it WHERE to send audio (the consumer) * * const consumer = await getActiveAudioConsumer('https://ushadow.ts.net', jwtToken); - * // Returns: { provider_id: "chronicle", websocket_url: "ws://chronicle:5001/chronicle/ws_pcm", ... } + * // Returns: { provider_id: "chronicle", websocket_url: "wss://host/ws/audio/relay", ... } * * const wsUrl = buildAudioStreamUrl(consumer, jwtToken); - * // Result: "ws://chronicle:5001/chronicle/ws_pcm?token=JWT" + * // Result: "wss://host/ws/audio/relay?destinations=[...]&token=JWT" * * await audioStreamer.startStreaming(wsUrl, 'streaming'); - * // Mobile mic โ†’ Chronicle + * // Mobile mic โ†’ Audio Relay โ†’ Chronicle/Mycelia */ // ============================================================================= @@ -203,23 +203,37 @@ export async function getAvailableAudioDestinations( * Build relay WebSocket URL with multiple destinations. * Connects to relay endpoint which fans out to all selected destinations. * - * Note: Services should use unified /ws/audio endpoint that auto-detects format. - * No need to swap paths based on source type - the server detects Opus/PCM/float32. + * Adds codec parameter to destination URLs based on audio source: + * - mic (device microphone) โ†’ ?codec=pcm + * - omi (hardware device) โ†’ ?codec=opus */ export function buildRelayUrl( baseUrl: string, token: string, - selectedDestinations: AudioDestination[] + selectedDestinations: AudioDestination[], + audioSource: 'mic' | 'omi' = 'mic' ): string { // Convert http(s) to ws(s) const wsBaseUrl = baseUrl.replace(/^http/, 'ws'); - // Build destinations array for relay - use URLs as-is - // Services should expose /ws/audio unified endpoint that auto-detects format - const destinations = selectedDestinations.map(dest => ({ - name: dest.instance_name, - url: dest.url, - })); + // Determine codec based on audio source + const codec = audioSource === 'omi' ? 'opus' : 'pcm'; + + // Build destinations array for relay - add codec parameter if not present + const destinations = selectedDestinations.map(dest => { + let destUrl = dest.url; + + // Add codec parameter if the URL doesn't already have it + if (!destUrl.includes('codec=')) { + const separator = destUrl.includes('?') ? '&' : '?'; + destUrl = `${destUrl}${separator}codec=${codec}`; + } + + return { + name: dest.instance_name, + url: destUrl, + }; + }); // Create relay URL const url = new URL(`${wsBaseUrl}/ws/audio/relay`); diff --git a/ushadow/mobile/app/services/backgroundTasks.ts b/ushadow/mobile/app/services/backgroundTasks.ts new file mode 100644 index 00000000..c3e22057 --- /dev/null +++ b/ushadow/mobile/app/services/backgroundTasks.ts @@ -0,0 +1,274 @@ +/** + * Background Task Manager + * + * Keeps Bluetooth connections alive during background execution. + * Uses expo-task-manager and expo-background-fetch to run periodic health checks. + * + * Platform Behavior: + * - iOS: Runs every ~15 minutes in background (iOS limitation) + * - Android: Can run more frequently, configurable + * + * Usage: + * ```typescript + * import { registerBackgroundTask, unregisterBackgroundTask } from './services/backgroundTasks'; + * + * // Start background monitoring + * await registerBackgroundTask(); + * + * // Stop background monitoring + * await unregisterBackgroundTask(); + * ``` + */ + +import * as TaskManager from 'expo-task-manager'; +import * as BackgroundFetch from 'expo-background-fetch'; +import AsyncStorage from '@react-native-async-storage/async-storage'; + +// Task name constant +export const BACKGROUND_BLUETOOTH_TASK = 'BACKGROUND_BLUETOOTH_TASK'; + +// Storage keys for background task state +const STORAGE_KEYS = { + LAST_CHECK: '@background_task_last_check', + CHECK_COUNT: '@background_task_check_count', + CONNECTION_STATE: '@background_connection_state', + ERRORS: '@background_task_errors', +}; + +/** + * Background task definition. + * + * This function runs in the background when triggered by the OS. + * It has limited execution time (~30s on iOS, more on Android). + * + * IMPORTANT: Cannot use React hooks or access React components here. + * This runs in a separate JavaScript context. + */ +TaskManager.defineTask(BACKGROUND_BLUETOOTH_TASK, async () => { + const now = new Date().toISOString(); + console.log(`[BackgroundTask] Task triggered at ${now}`); + + try { + // Increment check counter + const checkCountStr = await AsyncStorage.getItem(STORAGE_KEYS.CHECK_COUNT); + const checkCount = checkCountStr ? parseInt(checkCountStr, 10) : 0; + await AsyncStorage.setItem(STORAGE_KEYS.CHECK_COUNT, (checkCount + 1).toString()); + + // Store last check timestamp + await AsyncStorage.setItem(STORAGE_KEYS.LAST_CHECK, now); + + // Get current connection state (saved by foreground app) + const connectionStateStr = await AsyncStorage.getItem(STORAGE_KEYS.CONNECTION_STATE); + const connectionState = connectionStateStr ? JSON.parse(connectionStateStr) : null; + + console.log('[BackgroundTask] Connection state:', connectionState); + + if (connectionState?.isStreaming) { + console.log('[BackgroundTask] App is streaming, connections should be maintained'); + + // Note: We can't directly interact with BLE or WebSocket from here + // The main purpose is to: + // 1. Keep the JS thread alive + // 2. Log that we're monitoring + // 3. Store diagnostic data + // + // Actual reconnection happens in foreground (useAppLifecycle) + + // Future enhancement: Could use native modules to check BLE state + // For now, we just keep the task alive + } else { + console.log('[BackgroundTask] App not streaming, no action needed'); + } + + // Return success + return BackgroundFetch.BackgroundFetchResult.NewData; + + } catch (error) { + console.error('[BackgroundTask] Error:', error); + + // Store error for debugging + const errorLog = { + timestamp: now, + error: String(error), + }; + await AsyncStorage.setItem(STORAGE_KEYS.ERRORS, JSON.stringify(errorLog)); + + return BackgroundFetch.BackgroundFetchResult.Failed; + } +}); + +/** + * Register background task with the OS. + * + * Call this when the user starts streaming to enable background monitoring. + * + * @param minimumInterval - Minimum seconds between task executions (default: 60) + * Note: iOS enforces minimum ~15 minutes regardless of this value + */ +export const registerBackgroundTask = async (minimumInterval: number = 60): Promise => { + try { + console.log('[BackgroundTask] Registering background task...'); + + // Check if task is already registered + const isRegistered = await TaskManager.isTaskRegisteredAsync(BACKGROUND_BLUETOOTH_TASK); + + if (isRegistered) { + console.log('[BackgroundTask] Task already registered'); + return true; + } + + // Register the task + await BackgroundFetch.registerTaskAsync(BACKGROUND_BLUETOOTH_TASK, { + minimumInterval, // seconds + stopOnTerminate: false, // Continue after app is killed + startOnBoot: true, // Start after device reboot + }); + + console.log('[BackgroundTask] โœ… Task registered successfully'); + + // Initialize counters + await AsyncStorage.setItem(STORAGE_KEYS.CHECK_COUNT, '0'); + await AsyncStorage.setItem(STORAGE_KEYS.LAST_CHECK, new Date().toISOString()); + + return true; + + } catch (error) { + console.error('[BackgroundTask] โŒ Failed to register task:', error); + return false; + } +}; + +/** + * Unregister background task. + * + * Call this when the user stops streaming or closes the app. + */ +export const unregisterBackgroundTask = async (): Promise => { + try { + console.log('[BackgroundTask] Unregistering background task...'); + + const isRegistered = await TaskManager.isTaskRegisteredAsync(BACKGROUND_BLUETOOTH_TASK); + + if (!isRegistered) { + console.log('[BackgroundTask] Task not registered, nothing to unregister'); + return true; + } + + await BackgroundFetch.unregisterTaskAsync(BACKGROUND_BLUETOOTH_TASK); + console.log('[BackgroundTask] โœ… Task unregistered successfully'); + + return true; + + } catch (error) { + console.error('[BackgroundTask] โŒ Failed to unregister task:', error); + return false; + } +}; + +/** + * Check if background task is currently registered. + */ +export const isBackgroundTaskRegistered = async (): Promise => { + try { + return await TaskManager.isTaskRegisteredAsync(BACKGROUND_BLUETOOTH_TASK); + } catch (error) { + console.error('[BackgroundTask] Error checking registration:', error); + return false; + } +}; + +/** + * Get background task status. + * + * Returns diagnostic information about background task execution. + */ +export const getBackgroundTaskStatus = async (): Promise<{ + isRegistered: boolean; + lastCheck: string | null; + checkCount: number; + lastError: string | null; +}> => { + try { + const isRegistered = await isBackgroundTaskRegistered(); + const lastCheck = await AsyncStorage.getItem(STORAGE_KEYS.LAST_CHECK); + const checkCountStr = await AsyncStorage.getItem(STORAGE_KEYS.CHECK_COUNT); + const checkCount = checkCountStr ? parseInt(checkCountStr, 10) : 0; + const errorLogStr = await AsyncStorage.getItem(STORAGE_KEYS.ERRORS); + const errorLog = errorLogStr ? JSON.parse(errorLogStr) : null; + + return { + isRegistered, + lastCheck, + checkCount, + lastError: errorLog?.error || null, + }; + } catch (error) { + console.error('[BackgroundTask] Error getting status:', error); + return { + isRegistered: false, + lastCheck: null, + checkCount: 0, + lastError: String(error), + }; + } +}; + +/** + * Update connection state for background task. + * + * Call this from foreground app to tell background task about current state. + */ +export const updateConnectionState = async (state: { + isConnected: boolean; + isStreaming: boolean; + deviceId?: string; +}): Promise => { + try { + await AsyncStorage.setItem( + STORAGE_KEYS.CONNECTION_STATE, + JSON.stringify({ + ...state, + timestamp: new Date().toISOString(), + }) + ); + console.log('[BackgroundTask] Connection state updated:', state); + } catch (error) { + console.error('[BackgroundTask] Error updating connection state:', error); + } +}; + +/** + * Get stored connection state. + */ +export const getStoredConnectionState = async (): Promise<{ + isConnected: boolean; + isStreaming: boolean; + deviceId?: string; + timestamp?: string; +} | null> => { + try { + const stateStr = await AsyncStorage.getItem(STORAGE_KEYS.CONNECTION_STATE); + return stateStr ? JSON.parse(stateStr) : null; + } catch (error) { + console.error('[BackgroundTask] Error getting connection state:', error); + return null; + } +}; + +/** + * Clear all background task data. + * Useful for debugging or resetting state. + */ +export const clearBackgroundTaskData = async (): Promise => { + try { + await Promise.all([ + AsyncStorage.removeItem(STORAGE_KEYS.LAST_CHECK), + AsyncStorage.removeItem(STORAGE_KEYS.CHECK_COUNT), + AsyncStorage.removeItem(STORAGE_KEYS.CONNECTION_STATE), + AsyncStorage.removeItem(STORAGE_KEYS.ERRORS), + ]); + console.log('[BackgroundTask] All background task data cleared'); + } catch (error) { + console.error('[BackgroundTask] Error clearing data:', error); + } +}; diff --git a/ushadow/mobile/app/services/chronicleApi.ts b/ushadow/mobile/app/services/chronicleApi.ts index 688cb918..28033443 100644 --- a/ushadow/mobile/app/services/chronicleApi.ts +++ b/ushadow/mobile/app/services/chronicleApi.ts @@ -4,7 +4,7 @@ * API client for fetching conversations and memories from the Chronicle backend. */ -import { getAuthToken, getApiUrl } from '../_utils/authStorage'; +import { getAuthToken, getApiUrl, getDefaultServerUrl } from '../_utils/authStorage'; import { getActiveUnode } from '../_utils/unodeStorage'; // Types matching Chronicle backend responses @@ -292,19 +292,25 @@ export async function verifyUnodeAuth( const chronicleOk = chronicleResponse.ok; console.log(`[ChronicleAPI] Chronicle auth: ${chronicleResponse.status}`); - // Both must be OK for full auth - if (ushadowOk && chronicleOk) { - console.log('[ChronicleAPI] Auth verified successfully (both services)'); - return { valid: true, ushadowOk: true, chronicleOk: true }; + // Only ushadow auth is required for overall success + // Chronicle is optional since multiple audio sources are available + if (ushadowOk) { + if (chronicleOk) { + console.log('[ChronicleAPI] Auth verified successfully (both services)'); + return { valid: true, ushadowOk: true, chronicleOk: true }; + } else { + console.log('[ChronicleAPI] Auth verified (ushadow only, chronicle unavailable)'); + return { valid: true, ushadowOk: true, chronicleOk: false }; + } } - // Build error message based on what failed + // Build error message - only fail if ushadow failed const errors: string[] = []; if (!ushadowOk) { errors.push(`ushadow: ${ushadowResponse.status}`); } if (!chronicleOk) { - errors.push(`chronicle: ${chronicleResponse.status}`); + errors.push(`chronicle: ${chronicleResponse.status} (optional)`); } console.log(`[ChronicleAPI] Auth failed: ${errors.join(', ')}`); diff --git a/ushadow/mobile/app/services/featureFlagService.ts b/ushadow/mobile/app/services/featureFlagService.ts new file mode 100644 index 00000000..9a6e2716 --- /dev/null +++ b/ushadow/mobile/app/services/featureFlagService.ts @@ -0,0 +1,211 @@ +/** + * Feature Flag Service + * + * API client for fetching and managing feature flags from the backend. + * Caches flags in AsyncStorage for offline support. + */ + +import AsyncStorage from '@react-native-async-storage/async-storage'; +import { getAuthToken, getApiUrl, getDefaultServerUrl } from '../_utils/authStorage'; +import { getActiveUnode } from '../_utils/unodeStorage'; + +// Storage key for cached feature flags +const STORAGE_KEY = '@ushadow_feature_flags'; +const CACHE_DURATION_MS = 5 * 60 * 1000; // 5 minutes + +// Types matching backend feature flag responses +export interface FeatureFlag { + enabled: boolean; + description?: string; + type?: 'release' | 'experiment' | 'ops'; +} + +export interface FeatureFlagsResponse { + [key: string]: FeatureFlag; +} + +interface CachedFlags { + flags: FeatureFlagsResponse; + timestamp: number; +} + +/** + * Get the backend API base URL. + */ +async function getBackendApiUrl(): Promise { + const activeUnode = await getActiveUnode(); + + // First, check if UNode has explicit API URL + if (activeUnode?.apiUrl) { + console.log(`[FeatureFlags] Using UNode apiUrl: ${activeUnode.apiUrl}`); + return activeUnode.apiUrl; + } + + // Fall back to global storage (legacy) + const storedUrl = await getApiUrl(); + if (storedUrl) { + console.log(`[FeatureFlags] Using stored URL: ${storedUrl}`); + return storedUrl; + } + + // Default fallback - use configured default server URL + const defaultUrl = await getDefaultServerUrl(); + console.log(`[FeatureFlags] Using default URL: ${defaultUrl}`); + return defaultUrl; +} + +/** + * Get the auth token from active UNode or global storage. + */ +async function getToken(): Promise { + // First, try to get token from active UNode + const activeUnode = await getActiveUnode(); + if (activeUnode?.authToken) { + return activeUnode.authToken; + } + + // Fall back to global storage (legacy) + return getAuthToken(); +} + +/** + * Load cached feature flags from AsyncStorage. + */ +async function loadCachedFlags(): Promise { + try { + const cached = await AsyncStorage.getItem(STORAGE_KEY); + if (!cached) { + return null; + } + + const { flags, timestamp }: CachedFlags = JSON.parse(cached); + + // Check if cache is still valid + const now = Date.now(); + if (now - timestamp > CACHE_DURATION_MS) { + console.log('[FeatureFlags] Cache expired'); + return null; + } + + console.log('[FeatureFlags] Loaded from cache'); + return flags; + } catch (error) { + console.error('[FeatureFlags] Failed to load cached flags:', error); + return null; + } +} + +/** + * Save feature flags to AsyncStorage cache. + */ +async function saveCachedFlags(flags: FeatureFlagsResponse): Promise { + try { + const cached: CachedFlags = { + flags, + timestamp: Date.now(), + }; + await AsyncStorage.setItem(STORAGE_KEY, JSON.stringify(cached)); + console.log('[FeatureFlags] Saved to cache'); + } catch (error) { + console.error('[FeatureFlags] Failed to save cached flags:', error); + } +} + +/** + * Fetch feature flags from the backend API. + * Returns cached flags on network error. + */ +export async function fetchFeatureFlags(): Promise { + try { + const apiUrl = await getBackendApiUrl(); + + // If no API URL configured yet, return cached flags or empty + if (!apiUrl) { + console.log('[FeatureFlags] No API URL configured yet - checking cache'); + const cached = await loadCachedFlags(); + if (cached) { + console.log('[FeatureFlags] Using cached flags'); + return cached; + } + console.log('[FeatureFlags] No cache available - returning empty flags'); + return {}; + } + + const url = `${apiUrl}/api/feature-flags/status`; + console.log(`[FeatureFlags] Fetching from: ${url}`); + + // Note: /api/feature-flags/status is a public endpoint (no auth required) + // This allows fetching flags before user has authenticated + const response = await fetch(url, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + timeout: 5000, // 5 second timeout + }); + + if (!response.ok) { + console.error(`[FeatureFlags] Error ${response.status}: ${response.statusText}`); + + // On error, try to use cached flags + const cached = await loadCachedFlags(); + if (cached) { + console.log('[FeatureFlags] Using cached flags after API error'); + return cached; + } + + throw new Error(`Failed to fetch feature flags: ${response.status}`); + } + + const data = await response.json(); + console.log(`[FeatureFlags] Raw response:`, data); + + // Extract flags from response (backend returns { flags: {...}, enabled: true, ... }) + const flags: FeatureFlagsResponse = data.flags || data; + console.log(`[FeatureFlags] Fetched ${Object.keys(flags).length} flags`); + + // Cache the flags + await saveCachedFlags(flags); + + return flags; + } catch (error) { + console.error('[FeatureFlags] Failed to fetch feature flags:', error); + + // On network error, try to use cached flags + const cached = await loadCachedFlags(); + if (cached) { + console.log('[FeatureFlags] Using cached flags after network error'); + return cached; + } + + // If no cache available, return empty object + console.log('[FeatureFlags] No cache available - returning empty flags'); + return {}; + } +} + +/** + * Check if a specific feature flag is enabled. + */ +export async function isFeatureEnabled(flagName: string): Promise { + try { + const flags = await fetchFeatureFlags(); + return flags[flagName]?.enabled ?? false; + } catch (error) { + console.error(`[FeatureFlags] Failed to check flag '${flagName}':`, error); + return false; + } +} + +/** + * Clear cached feature flags. + * Useful for forcing a refresh or during logout. + */ +export async function clearFeatureFlagsCache(): Promise { + try { + await AsyncStorage.removeItem(STORAGE_KEY); + console.log('[FeatureFlags] Cache cleared'); + } catch (error) { + console.error('[FeatureFlags] Failed to clear cache:', error); + } +} diff --git a/ushadow/mobile/app/services/myceliaApi.ts b/ushadow/mobile/app/services/myceliaApi.ts new file mode 100644 index 00000000..6f74d7f0 --- /dev/null +++ b/ushadow/mobile/app/services/myceliaApi.ts @@ -0,0 +1,191 @@ +/** + * Mycelia API Service + * + * API client for fetching conversations and memories from the Mycelia backend. + */ + +import { getAuthToken, getApiUrl, getDefaultServerUrl } from '../_utils/authStorage'; +import { getActiveUnode } from '../_utils/unodeStorage'; +import type { Conversation, ConversationsResponse } from './chronicleApi'; + +// Mycelia service name for proxy routing +const MYCELIA_SERVICE = 'mycelia-backend'; + +/** + * Get the backend API base URL. + */ +async function getBackendApiUrl(): Promise { + const activeUnode = await getActiveUnode(); + + // First, check if UNode has explicit API URL + if (activeUnode?.apiUrl) { + console.log(`[MyceliaAPI] Using UNode apiUrl: ${activeUnode.apiUrl}`); + return activeUnode.apiUrl; + } + + // Fall back to global storage (legacy) + const storedUrl = await getApiUrl(); + if (storedUrl) { + console.log(`[MyceliaAPI] Using stored URL: ${storedUrl}`); + return storedUrl; + } + + // Default fallback - use configured default server URL + const defaultUrl = await getDefaultServerUrl(); + console.log(`[MyceliaAPI] Using default URL: ${defaultUrl}`); + return defaultUrl; +} + +/** + * Get the auth token from active UNode or global storage. + */ +async function getToken(): Promise { + // First, try to get token from active UNode + const activeUnode = await getActiveUnode(); + if (activeUnode?.authToken) { + return activeUnode.authToken; + } + + // Fall back to global storage (legacy) + return getAuthToken(); +} + +/** + * Make an authenticated API request to Mycelia backend via generic proxy. + */ +async function apiRequest(endpoint: string, options: RequestInit = {}): Promise { + const [apiUrl, token] = await Promise.all([getBackendApiUrl(), getToken()]); + + if (!token) { + throw new Error('Not authenticated. Please log in first.'); + } + + // Use generic proxy pattern: /api/services/mycelia-backend/proxy + const url = `${apiUrl}/api/services/${MYCELIA_SERVICE}/proxy${endpoint}`; + console.log(`[MyceliaAPI] ${options.method || 'GET'} ${url}`); + + const response = await fetch(url, { + ...options, + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + ...options.headers, + }, + }); + + if (!response.ok) { + const errorText = await response.text(); + console.error(`[MyceliaAPI] Error ${response.status}:`, errorText); + + // Handle 401 Unauthorized - token is invalid or expired + if (response.status === 401) { + console.log('[MyceliaAPI] Token invalid or expired - clearing auth and prompting re-login'); + + // Clear the invalid token from storage + const { clearAuthToken } = await import('../_utils/authStorage'); + await clearAuthToken(); + + throw new Error('Authentication expired. Please scan QR code to reconnect.'); + } + + // Handle 503 Service Unavailable - Mycelia backend is not running + if (response.status === 503) { + throw new Error('Mycelia service is not available'); + } + + throw new Error(`API request failed: ${response.status} ${response.statusText}`); + } + + return response.json(); +} + +/** + * Normalize Mycelia conversation to match Chronicle format. + * Mycelia may use different field names (id vs conversation_id). + */ +function normalizeConversation(conv: any): Conversation { + return { + conversation_id: conv.conversation_id || conv.id, + audio_uuid: conv.audio_uuid, + user_id: conv.user_id || '', + client_id: conv.client_id || '', + audio_path: conv.audio_path, + cropped_audio_path: conv.cropped_audio_path, + created_at: conv.created_at || conv.timeRanges?.[0]?.start || new Date().toISOString(), + deleted: conv.deleted, + title: conv.title, + summary: conv.summary, + detailed_summary: conv.detailed_summary, + active_transcript_version: conv.active_transcript_version, + segment_count: conv.segment_count, + has_memory: conv.has_memory, + memory_count: conv.memory_count, + transcript_version_count: conv.transcript_version_count, + status: conv.status, + duration_seconds: conv.duration_seconds, + }; +} + +/** + * Fetch user's conversations from Mycelia backend. + */ +export async function fetchConversations( + page: number = 1, + limit: number = 25 +): Promise { + try { + // Calculate skip for pagination (Mycelia uses skip, not page) + const skip = (page - 1) * limit; + + const response = await apiRequest( + `/data/conversations?limit=${limit}&skip=${skip}` + ); + + // Handle both array and object response formats + let conversations: Conversation[]; + if (Array.isArray(response)) { + conversations = response.map(normalizeConversation); + } else if (response && typeof response === 'object' && 'conversations' in response) { + conversations = (response.conversations || []).map(normalizeConversation); + } else { + console.warn('[MyceliaAPI] Unexpected response format:', response); + conversations = []; + } + + return { + conversations, + total: conversations.length, + page, + limit, + }; + } catch (error) { + console.error('[MyceliaAPI] Failed to fetch conversations:', error); + throw error; + } +} + +/** + * Fetch a single conversation by ID from Mycelia. + */ +export async function fetchConversation(conversationId: string): Promise { + try { + const response = await apiRequest(`/data/conversations/${conversationId}`); + return normalizeConversation(response); + } catch (error) { + console.error('[MyceliaAPI] Failed to fetch conversation:', error); + throw error; + } +} + +/** + * Check if Mycelia service is available. + */ +export async function checkAvailability(): Promise { + try { + await apiRequest('/health'); + return true; + } catch (error) { + console.log('[MyceliaAPI] Mycelia service not available:', error); + return false; + } +} diff --git a/ushadow/mobile/app/services/persistentLogger.ts b/ushadow/mobile/app/services/persistentLogger.ts new file mode 100644 index 00000000..f118a19a --- /dev/null +++ b/ushadow/mobile/app/services/persistentLogger.ts @@ -0,0 +1,91 @@ +/** + * Persistent Logger + * + * Logs survive app reloads/refreshes - perfect for debugging background behavior + */ + +import AsyncStorage from '@react-native-async-storage/async-storage'; + +const LOG_STORAGE_KEY = '@persistent_logs'; +const MAX_LOGS = 100; + +export interface PersistentLogEntry { + timestamp: string; + type: 'lifecycle' | 'connection' | 'health' | 'background' | 'error'; + message: string; + details?: any; +} + +/** + * Add a log entry that persists across app reloads + */ +export const addPersistentLog = async ( + type: PersistentLogEntry['type'], + message: string, + details?: any +): Promise => { + try { + const entry: PersistentLogEntry = { + timestamp: new Date().toISOString(), + type, + message, + details, + }; + + // Get existing logs + const existingLogsStr = await AsyncStorage.getItem(LOG_STORAGE_KEY); + const existingLogs: PersistentLogEntry[] = existingLogsStr + ? JSON.parse(existingLogsStr) + : []; + + // Add new log at the beginning + const updatedLogs = [entry, ...existingLogs].slice(0, MAX_LOGS); + + // Save back + await AsyncStorage.setItem(LOG_STORAGE_KEY, JSON.stringify(updatedLogs)); + + // Also console log + console.log(`[PersistentLog] [${type}] ${message}`, details || ''); + } catch (error) { + console.error('[PersistentLog] Error saving log:', error); + } +}; + +/** + * Get all persistent logs + */ +export const getPersistentLogs = async (): Promise => { + try { + const logsStr = await AsyncStorage.getItem(LOG_STORAGE_KEY); + return logsStr ? JSON.parse(logsStr) : []; + } catch (error) { + console.error('[PersistentLog] Error reading logs:', error); + return []; + } +}; + +/** + * Clear all persistent logs + */ +export const clearPersistentLogs = async (): Promise => { + try { + await AsyncStorage.removeItem(LOG_STORAGE_KEY); + console.log('[PersistentLog] Logs cleared'); + } catch (error) { + console.error('[PersistentLog] Error clearing logs:', error); + } +}; + +/** + * Get logs as formatted text for sharing + */ +export const getPersistentLogsText = async (): Promise => { + const logs = await getPersistentLogs(); + return logs + .map(log => { + const time = new Date(log.timestamp).toLocaleTimeString(); + const details = log.details ? ` | ${JSON.stringify(log.details)}` : ''; + return `[${time}] [${log.type}] ${log.message}${details}`; + }) + .join('\n'); +}; diff --git a/ushadow/mobile/app/types/streamingSession.ts b/ushadow/mobile/app/types/streamingSession.ts new file mode 100644 index 00000000..7dc0f7e0 --- /dev/null +++ b/ushadow/mobile/app/types/streamingSession.ts @@ -0,0 +1,77 @@ +/** + * Streaming Session Types + * + * Tracks audio streaming sessions with metadata: + * - Source (OMI device or phone microphone) + * - Destinations (Chronicle, Mycelia, etc.) + * - Duration and data volume + * - Connection to Chronicle conversation_id (if available) + */ + +export type SessionSource = + | { type: 'omi'; deviceId: string; deviceName?: string } + | { type: 'microphone' }; + +export interface SessionDestination { + name: string; + url: string; + connected: boolean; + errors: number; +} + +export interface StreamingSession { + id: string; // Client-generated session ID + source: SessionSource; // Audio source + destinations: SessionDestination[]; // Audio destinations + startTime: Date; // Session start timestamp + endTime?: Date; // Session end timestamp (null if active) + durationSeconds?: number; // Calculated duration + bytesTransferred: number; // Total bytes relayed + chunksTransferred: number; // Total audio chunks relayed + conversationId?: string; // Chronicle conversation_id (if available) + codec: 'pcm' | 'opus'; // Audio codec used + networkType?: string; // WiFi, cellular, etc. + error?: string; // Error message if session failed + endReason?: 'manual_stop' | 'connection_lost' | 'error' | 'timeout'; // How the session ended +} + +export interface SessionState { + activeSessions: Map; // Currently active sessions + recentSessions: StreamingSession[]; // Historical sessions +} + +// Helper to generate session ID +export const generateSessionId = (): string => { + return `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; +}; + +// Helper to format duration +export const formatDuration = (seconds: number): string => { + if (seconds < 60) return `${seconds}s`; + const minutes = Math.floor(seconds / 60); + const secs = seconds % 60; + if (minutes < 60) return `${minutes}m ${secs}s`; + const hours = Math.floor(minutes / 60); + const mins = minutes % 60; + return `${hours}h ${mins}m`; +}; + +// Helper to format bytes +export const formatBytes = (bytes: number): string => { + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; + return `${(bytes / (1024 * 1024)).toFixed(1)} MB`; +}; + +// Helper to get session duration in seconds +export const getSessionDuration = (session: StreamingSession): number => { + if (session.durationSeconds !== undefined) return session.durationSeconds; + const start = new Date(session.startTime).getTime(); + const end = session.endTime ? new Date(session.endTime).getTime() : Date.now(); + return Math.floor((end - start) / 1000); +}; + +// Helper to check if session is active +export const isSessionActive = (session: StreamingSession): boolean => { + return !session.endTime; +}; diff --git a/ushadow/mobile/app/unode-details.tsx b/ushadow/mobile/app/unode-details.tsx index a4307d54..b479425b 100644 --- a/ushadow/mobile/app/unode-details.tsx +++ b/ushadow/mobile/app/unode-details.tsx @@ -471,8 +471,9 @@ export default function UNodeDetailsPage() { }; // Determine overall status for collapsed view - const isConnected = status.ushadow === 'connected' && status.chronicle === 'connected'; - const hasError = status.ushadow === 'error' || status.chronicle === 'error'; + // Only ushadow connection is required (chronicle is optional) + const isConnected = status.ushadow === 'connected'; + const hasError = status.ushadow === 'error'; // Only fail on ushadow error const isChecking = status.ushadow === 'checking' || status.chronicle === 'checking'; return ( @@ -741,8 +742,9 @@ export default function UNodeDetailsPage() { // Render other node item const renderOtherNode = (node: UNode) => { const status = statuses[node.id]; - const isConnected = status?.ushadow === 'connected' && status?.chronicle === 'connected'; - const hasError = status?.ushadow === 'error' || status?.chronicle === 'error'; + // Only ushadow connection is required (chronicle is optional) + const isConnected = status?.ushadow === 'connected'; + const hasError = status?.ushadow === 'error'; // Only fail on ushadow error return ( =6.9.0" } @@ -3279,6 +3283,7 @@ "resolved": "https://registry.npmjs.org/@react-navigation/native/-/native-7.1.26.tgz", "integrity": "sha512-RhKmeD0E2ejzKS6z8elAfdfwShpcdkYY8zJzvHYLq+wv183BBcElTeyMLcIX6wIn7QutXeI92Yi21t7aUWfqNQ==", "license": "MIT", + "peer": true, "dependencies": { "@react-navigation/core": "^7.13.7", "escape-string-regexp": "^4.0.0", @@ -3477,6 +3482,7 @@ "integrity": "sha512-Qec1E3mhALmaspIrhWt9jkQMNdw6bReVu64mjvhbhq2NFPftLPVr+l1SZgmw/66WwBNpDh7ao5AT6gF5v41PFA==", "devOptional": true, "license": "MIT", + "peer": true, "dependencies": { "csstype": "^3.0.2" } @@ -3547,6 +3553,7 @@ "integrity": "sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.51.0", "@typescript-eslint/types": "8.51.0", @@ -4115,6 +4122,7 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -4815,6 +4823,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", @@ -5798,6 +5807,7 @@ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -5994,6 +6004,7 @@ "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@rtsao/scc": "^1.1.0", "array-includes": "^3.1.9", @@ -6232,6 +6243,7 @@ "resolved": "https://registry.npmjs.org/expo/-/expo-54.0.31.tgz", "integrity": "sha512-kQ3RDqA/a59I7y+oqQGyrPbbYlgPMUdKBOgvFLpoHbD2bCM+F75i4N0mUijy7dG5F/CUCu2qHmGGUCXBbMDkCg==", "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.20.0", "@expo/cli": "54.0.21", @@ -6311,6 +6323,18 @@ } } }, + "node_modules/expo-background-fetch": { + "version": "14.0.9", + "resolved": "https://registry.npmjs.org/expo-background-fetch/-/expo-background-fetch-14.0.9.tgz", + "integrity": "sha512-IhdbjIu9EdsYaL7mCCvf/i48Qy4a5rpRy038/4KNUoa9xmsETRwFCdsoZj4VHg4dVt2D0kiDrgqVVlPBSSWt+Q==", + "license": "MIT", + "dependencies": { + "expo-task-manager": "~14.0.9" + }, + "peerDependencies": { + "expo": "*" + } + }, "node_modules/expo-build-properties": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/expo-build-properties/-/expo-build-properties-1.0.10.tgz", @@ -6383,6 +6407,7 @@ "resolved": "https://registry.npmjs.org/expo-constants/-/expo-constants-18.0.13.tgz", "integrity": "sha512-FnZn12E1dRYKDHlAdIyNFhBurKTS3F9CrfrBDJI5m3D7U17KBHMQ6JEfYlSj7LG7t+Ulr+IKaj58L1k5gBwTcQ==", "license": "MIT", + "peer": true, "dependencies": { "@expo/config": "~12.0.13", "@expo/env": "~2.0.8" @@ -6480,6 +6505,7 @@ "resolved": "https://registry.npmjs.org/expo-font/-/expo-font-14.0.10.tgz", "integrity": "sha512-UqyNaaLKRpj4pKAP4HZSLnuDQqueaO5tB1c/NWu5vh1/LF9ulItyyg2kF/IpeOp0DeOLk0GY0HrIXaKUMrwB+Q==", "license": "MIT", + "peer": true, "dependencies": { "fontfaceobserver": "^2.1.0" }, @@ -6547,6 +6573,7 @@ "resolved": "https://registry.npmjs.org/expo-linking/-/expo-linking-8.0.11.tgz", "integrity": "sha512-+VSaNL5om3kOp/SSKO5qe6cFgfSIWnnQDSbA7XLs3ECkYzXRquk5unxNS3pg7eK5kNUmQ4kgLI7MhTggAEUBLA==", "license": "MIT", + "peer": true, "dependencies": { "expo-constants": "~18.0.12", "invariant": "^2.2.4" @@ -6918,6 +6945,19 @@ } } }, + "node_modules/expo-task-manager": { + "version": "14.0.9", + "resolved": "https://registry.npmjs.org/expo-task-manager/-/expo-task-manager-14.0.9.tgz", + "integrity": "sha512-GKWtXrkedr4XChHfTm5IyTcSfMtCPxzx89y4CMVqKfyfROATibrE/8UI5j7UC/pUOfFoYlQvulQEvECMreYuUA==", + "license": "MIT", + "dependencies": { + "unimodules-app-loader": "~6.0.8" + }, + "peerDependencies": { + "expo": "*", + "react-native": "*" + } + }, "node_modules/expo-updates-interface": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/expo-updates-interface/-/expo-updates-interface-2.0.0.tgz", @@ -10740,6 +10780,7 @@ "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", "integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==", "license": "MIT", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -10759,6 +10800,7 @@ "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.0.tgz", "integrity": "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==", "license": "MIT", + "peer": true, "dependencies": { "scheduler": "^0.26.0" }, @@ -10795,6 +10837,7 @@ "resolved": "https://registry.npmjs.org/react-native/-/react-native-0.81.5.tgz", "integrity": "sha512-1w+/oSjEXZjMqsIvmkCRsOc8UBYv163bTWKTI8+1mxztvQPhCRYGTvZ/PL1w16xXHneIj/SLGfxWg2GWN2uexw==", "license": "MIT", + "peer": true, "dependencies": { "@jest/create-cache-key-function": "^29.7.0", "@react-native/assets-registry": "0.81.5", @@ -10877,6 +10920,7 @@ "resolved": "https://registry.npmjs.org/react-native-gesture-handler/-/react-native-gesture-handler-2.28.0.tgz", "integrity": "sha512-0msfJ1vRxXKVgTgvL+1ZOoYw3/0z1R+Ked0+udoJhyplC2jbVKIJ8Z1bzWdpQRCV3QcQ87Op0zJVE5DhKK2A0A==", "license": "MIT", + "peer": true, "dependencies": { "@egjs/hammerjs": "^2.0.17", "hoist-non-react-statics": "^3.3.0", @@ -10911,6 +10955,7 @@ "resolved": "https://registry.npmjs.org/react-native-safe-area-context/-/react-native-safe-area-context-5.6.2.tgz", "integrity": "sha512-4XGqMNj5qjUTYywJqpdWZ9IG8jgkS3h06sfVjfw5yZQZfWnRFXczi0GnYyFyCc2EBps/qFmoCH8fez//WumdVg==", "license": "MIT", + "peer": true, "peerDependencies": { "react": "*", "react-native": "*" @@ -10921,6 +10966,7 @@ "resolved": "https://registry.npmjs.org/react-native-screens/-/react-native-screens-4.16.0.tgz", "integrity": "sha512-yIAyh7F/9uWkOzCi1/2FqvNvK6Wb9Y1+Kzn16SuGfN9YFJDTbwlzGRvePCNTOX0recpLQF3kc2FmvMUhyTCH1Q==", "license": "MIT", + "peer": true, "dependencies": { "react-freeze": "^1.0.0", "react-native-is-edge-to-edge": "^1.2.1", @@ -10936,6 +10982,7 @@ "resolved": "https://registry.npmjs.org/react-native-web/-/react-native-web-0.21.2.tgz", "integrity": "sha512-SO2t9/17zM4iEnFvlu2DA9jqNbzNhoUP+AItkoCOyFmDMOhUnBBznBDCYN92fGdfAkfQlWzPoez6+zLxFNsZEg==", "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.18.6", "@react-native/normalize-colors": "^0.74.1", @@ -11042,6 +11089,7 @@ "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.2.tgz", "integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==", "license": "MIT", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -12353,6 +12401,7 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -12569,6 +12618,7 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -12677,6 +12727,12 @@ "node": ">=4" } }, + "node_modules/unimodules-app-loader": { + "version": "6.0.8", + "resolved": "https://registry.npmjs.org/unimodules-app-loader/-/unimodules-app-loader-6.0.8.tgz", + "integrity": "sha512-fqS8QwT/MC/HAmw1NKCHdzsPA6WaLm0dNmoC5Pz6lL+cDGYeYCNdHMO9fy08aL2ZD7cVkNM0pSR/AoNRe+rslA==", + "license": "MIT" + }, "node_modules/unique-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", diff --git a/ushadow/mobile/package.json b/ushadow/mobile/package.json index b501d01b..438a3f13 100644 --- a/ushadow/mobile/package.json +++ b/ushadow/mobile/package.json @@ -21,6 +21,7 @@ "@react-navigation/native": "^7.1.8", "expo": "~54.0.30", "expo-av": "^16.0.8", + "expo-background-fetch": "~14.0.9", "expo-build-properties": "^1.0.10", "expo-camera": "~17.0.7", "expo-constants": "~18.0.12", @@ -35,6 +36,7 @@ "expo-status-bar": "~3.0.9", "expo-symbols": "~1.0.8", "expo-system-ui": "~6.0.9", + "expo-task-manager": "~14.0.9", "expo-web-browser": "~15.0.10", "friend-lite-react-native": "^1.0.2", "react": "19.1.0", diff --git a/vibe-kanban b/vibe-kanban deleted file mode 160000 index d54a4620..00000000 --- a/vibe-kanban +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d54a46209b99f7dab298e3adb607efc4e63116c7