From 9c38d4013e2f16f760de69004f238c7164a9a768 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Sun, 30 Nov 2025 16:30:48 +0000 Subject: [PATCH 01/31] moved memory and transcription to services --- .gitmodules | 3 + .../src/advanced_omi_backend/app_factory.py | 2 +- .../src/advanced_omi_backend/chat_service.py | 2 +- .../src/advanced_omi_backend/config.py | 6 +- .../controllers/memory_controller.py | 2 +- .../controllers/system_controller.py | 4 +- .../controllers/user_controller.py | 2 +- .../memory/providers/__init__.py | 41 --- .../models/conversation.py | 1 + .../routers/modules/health_routes.py | 2 +- .../services/audio_stream/producer.py | 2 +- .../{ => services}/memory/README.md | 0 .../{ => services}/memory/__init__.py | 10 +- .../{ => services}/memory/base.py | 0 .../{ => services}/memory/config.py | 19 ++ .../{ => services}/memory/prompts.py | 0 .../services/memory/providers/__init__.py | 30 ++ .../memory/providers}/compat_service.py | 8 +- .../memory/providers/friend_lite.py} | 14 +- .../memory/providers/llm_providers.py | 0 .../memory/providers/mcp_client.py | 0 .../services/memory/providers/mycelia.py | 277 ++++++++++++++++++ .../memory/providers/openmemory_mcp.py} | 0 .../memory/providers/vector_stores.py | 0 .../{ => services}/memory/service_factory.py | 24 +- .../memory/update_memory_utils.py | 0 .../{ => services}/memory/utils.py | 0 .../services/transcription/__init__.py | 2 +- .../transcription/base.py} | 0 .../services/transcription/deepgram.py | 2 +- .../services/transcription/parakeet.py | 2 +- .../utils/conversation_utils.py | 15 +- .../workers/memory_jobs.py | 4 +- .../workers/transcription_jobs.py | 69 +++++ extras/mycelia | 1 + 35 files changed, 462 insertions(+), 82 deletions(-) create mode 100644 .gitmodules delete mode 100644 backends/advanced/src/advanced_omi_backend/memory/providers/__init__.py rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/README.md (100%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/__init__.py (92%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/base.py (100%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/config.py (95%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/prompts.py (100%) create mode 100644 backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py rename backends/advanced/src/advanced_omi_backend/{memory => services/memory/providers}/compat_service.py (98%) rename backends/advanced/src/advanced_omi_backend/{memory/memory_service.py => services/memory/providers/friend_lite.py} (99%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/providers/llm_providers.py (100%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/providers/mcp_client.py (100%) create mode 100644 backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py rename backends/advanced/src/advanced_omi_backend/{memory/providers/openmemory_mcp_service.py => services/memory/providers/openmemory_mcp.py} (100%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/providers/vector_stores.py (100%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/service_factory.py (89%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/update_memory_utils.py (100%) rename backends/advanced/src/advanced_omi_backend/{ => services}/memory/utils.py (100%) rename backends/advanced/src/advanced_omi_backend/{models/transcription.py => services/transcription/base.py} (100%) create mode 160000 extras/mycelia diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..ffffaa52 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "extras/mycelia"] + path = extras/mycelia + url = https://github.com/mycelia-tech/mycelia diff --git a/backends/advanced/src/advanced_omi_backend/app_factory.py b/backends/advanced/src/advanced_omi_backend/app_factory.py index 52a48093..8aa0c97a 100644 --- a/backends/advanced/src/advanced_omi_backend/app_factory.py +++ b/backends/advanced/src/advanced_omi_backend/app_factory.py @@ -30,7 +30,7 @@ register_client_to_user, ) from advanced_omi_backend.client_manager import get_client_manager -from advanced_omi_backend.memory import get_memory_service, shutdown_memory_service +from advanced_omi_backend.services.memory import get_memory_service, shutdown_memory_service from advanced_omi_backend.middleware.app_middleware import setup_middleware from advanced_omi_backend.routers.api_router import router as api_router from advanced_omi_backend.routers.modules.health_routes import router as health_router diff --git a/backends/advanced/src/advanced_omi_backend/chat_service.py b/backends/advanced/src/advanced_omi_backend/chat_service.py index 812f8af0..4ec5ecff 100644 --- a/backends/advanced/src/advanced_omi_backend/chat_service.py +++ b/backends/advanced/src/advanced_omi_backend/chat_service.py @@ -22,7 +22,7 @@ from advanced_omi_backend.database import get_database from advanced_omi_backend.llm_client import get_llm_client -from advanced_omi_backend.memory import get_memory_service +from advanced_omi_backend.services.memory import get_memory_service from advanced_omi_backend.users import User logger = logging.getLogger(__name__) diff --git a/backends/advanced/src/advanced_omi_backend/config.py b/backends/advanced/src/advanced_omi_backend/config.py index ceebcad0..f2168e6d 100644 --- a/backends/advanced/src/advanced_omi_backend/config.py +++ b/backends/advanced/src/advanced_omi_backend/config.py @@ -30,9 +30,9 @@ # Default speech detection settings DEFAULT_SPEECH_DETECTION_SETTINGS = { - "min_words": 5, # Minimum words to create conversation - "min_confidence": 0.5, # Word confidence threshold (unified) - "min_duration": 2.0, # Minimum speech duration (seconds) + "min_words": 10, # Minimum words to create conversation (increased from 5) + "min_confidence": 0.7, # Word confidence threshold (increased from 0.5) + "min_duration": 10.0, # Minimum speech duration in seconds (increased from 2.0) } # Default conversation stop settings diff --git a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py index e5f576c2..f6ca8387 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py @@ -8,7 +8,7 @@ from fastapi.responses import JSONResponse -from advanced_omi_backend.memory import get_memory_service +from advanced_omi_backend.services.memory import get_memory_service from advanced_omi_backend.users import User logger = logging.getLogger(__name__) diff --git a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py index 5bc0b35d..a2afadbc 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py @@ -442,8 +442,8 @@ async def reload_memory_config(): async def delete_all_user_memories(user: User): """Delete all memories for the current user.""" try: - from advanced_omi_backend.memory import get_memory_service - + from advanced_omi_backend.services.memory import get_memory_service + memory_service = get_memory_service() # Delete all memories for the user diff --git a/backends/advanced/src/advanced_omi_backend/controllers/user_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/user_controller.py index ba7dd753..a1b9c140 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/user_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/user_controller.py @@ -16,7 +16,7 @@ ) from advanced_omi_backend.client_manager import get_user_clients_all from advanced_omi_backend.database import db, users_col -from advanced_omi_backend.memory import get_memory_service +from advanced_omi_backend.services.memory import get_memory_service from advanced_omi_backend.models.conversation import Conversation from advanced_omi_backend.users import User, UserCreate, UserUpdate diff --git a/backends/advanced/src/advanced_omi_backend/memory/providers/__init__.py b/backends/advanced/src/advanced_omi_backend/memory/providers/__init__.py deleted file mode 100644 index 59ded58e..00000000 --- a/backends/advanced/src/advanced_omi_backend/memory/providers/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Memory service providers package. - -This package contains implementations of LLM providers, vector stores, -and complete memory service implementations for the memory service architecture. -""" - -from ..base import LLMProviderBase, VectorStoreBase, MemoryEntry -from .llm_providers import OpenAIProvider -from .vector_stores import QdrantVectorStore - -# Import complete memory service implementations -try: - from .openmemory_mcp_service import OpenMemoryMCPService -except ImportError: - OpenMemoryMCPService = None - -try: - from .mcp_client import MCPClient, MCPError -except ImportError: - MCPClient = None - MCPError = None - -__all__ = [ - # Base classes - "LLMProviderBase", - "VectorStoreBase", - "MemoryEntry", - - # LLM providers - "OpenAIProvider", - - # Vector stores - "QdrantVectorStore", - - # Complete memory service implementations - "OpenMemoryMCPService", - - # MCP client components - "MCPClient", - "MCPError", -] \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/models/conversation.py b/backends/advanced/src/advanced_omi_backend/models/conversation.py index 7caf8a55..55c31244 100644 --- a/backends/advanced/src/advanced_omi_backend/models/conversation.py +++ b/backends/advanced/src/advanced_omi_backend/models/conversation.py @@ -30,6 +30,7 @@ class MemoryProvider(str, Enum): """Supported memory providers.""" FRIEND_LITE = "friend_lite" OPENMEMORY_MCP = "openmemory_mcp" + MYCELIA = "mycelia" class ConversationStatus(str, Enum): """Conversation processing status.""" diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py index 37913c48..1634bc3d 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py @@ -18,7 +18,7 @@ from advanced_omi_backend.controllers.queue_controller import redis_conn from advanced_omi_backend.client_manager import get_client_manager from advanced_omi_backend.llm_client import async_health_check -from advanced_omi_backend.memory import get_memory_service +from advanced_omi_backend.services.memory import get_memory_service from advanced_omi_backend.services.transcription import get_transcription_provider # Create router diff --git a/backends/advanced/src/advanced_omi_backend/services/audio_stream/producer.py b/backends/advanced/src/advanced_omi_backend/services/audio_stream/producer.py index 95bf25e1..66b0acf7 100644 --- a/backends/advanced/src/advanced_omi_backend/services/audio_stream/producer.py +++ b/backends/advanced/src/advanced_omi_backend/services/audio_stream/producer.py @@ -7,7 +7,7 @@ import redis.asyncio as redis -from advanced_omi_backend.models.transcription import TranscriptionProvider +from advanced_omi_backend.services.transcription.base import TranscriptionProvider logger = logging.getLogger(__name__) diff --git a/backends/advanced/src/advanced_omi_backend/memory/README.md b/backends/advanced/src/advanced_omi_backend/services/memory/README.md similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/README.md rename to backends/advanced/src/advanced_omi_backend/services/memory/README.md diff --git a/backends/advanced/src/advanced_omi_backend/memory/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py similarity index 92% rename from backends/advanced/src/advanced_omi_backend/memory/__init__.py rename to backends/advanced/src/advanced_omi_backend/services/memory/__init__.py index 1fcc786a..42cba194 100644 --- a/backends/advanced/src/advanced_omi_backend/memory/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py @@ -27,7 +27,7 @@ memory_logger.info("๐Ÿ†• Using NEW memory service implementation") try: - from .compat_service import ( + from .providers.compat_service import ( MemoryService, get_memory_service, migrate_from_mem0, @@ -35,7 +35,7 @@ ) # Also import core implementation for direct access - from .memory_service import MemoryService as CoreMemoryService + from .providers.friend_lite import MemoryService as CoreMemoryService test_new_memory_service = None # Will be implemented if needed except ImportError as e: memory_logger.error(f"Failed to import new memory service: {e}") @@ -55,8 +55,10 @@ create_openai_config, create_qdrant_config, ) - from .providers import OpenMemoryMCPService # New complete memory service - from .providers import MCPClient, MCPError, OpenAIProvider, QdrantVectorStore + from .providers.openmemory_mcp import OpenMemoryMCPService # New complete memory service + from .providers.mcp_client import MCPClient, MCPError + from .providers.llm_providers import OpenAIProvider + from .providers.vector_stores import QdrantVectorStore from .service_factory import create_memory_service from .service_factory import get_memory_service as get_core_memory_service from .service_factory import get_service_info as get_core_service_info diff --git a/backends/advanced/src/advanced_omi_backend/memory/base.py b/backends/advanced/src/advanced_omi_backend/services/memory/base.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/base.py rename to backends/advanced/src/advanced_omi_backend/services/memory/base.py diff --git a/backends/advanced/src/advanced_omi_backend/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py similarity index 95% rename from backends/advanced/src/advanced_omi_backend/memory/config.py rename to backends/advanced/src/advanced_omi_backend/services/memory/config.py index 99e79d38..ae03fcd8 100644 --- a/backends/advanced/src/advanced_omi_backend/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -36,6 +36,7 @@ class MemoryProvider(Enum): """Supported memory service providers.""" FRIEND_LITE = "friend_lite" # Default sophisticated implementation OPENMEMORY_MCP = "openmemory_mcp" # OpenMemory MCP backend + MYCELIA = "mycelia" # Mycelia memory backend @dataclass @@ -48,6 +49,7 @@ class MemoryConfig: vector_store_config: Dict[str, Any] = None embedder_config: Dict[str, Any] = None openmemory_config: Dict[str, Any] = None # Configuration for OpenMemory MCP + mycelia_config: Dict[str, Any] = None # Configuration for Mycelia extraction_prompt: str = None extraction_enabled: bool = True timeout_seconds: int = 1200 @@ -122,6 +124,23 @@ def create_openmemory_config( } +def create_mycelia_config( + api_url: str = "http://localhost:8080", + api_key: str = None, + timeout: int = 30, + **kwargs +) -> Dict[str, Any]: + """Create Mycelia configuration.""" + config = { + "api_url": api_url, + "timeout": timeout, + } + if api_key: + config["api_key"] = api_key + config.update(kwargs) + return config + + def build_memory_config_from_env() -> MemoryConfig: """Build memory configuration from environment variables and YAML config.""" try: diff --git a/backends/advanced/src/advanced_omi_backend/memory/prompts.py b/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/prompts.py rename to backends/advanced/src/advanced_omi_backend/services/memory/prompts.py diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py new file mode 100644 index 00000000..591fbc2b --- /dev/null +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py @@ -0,0 +1,30 @@ +"""Memory service provider implementations. + +This package contains all memory service provider implementations: +- friend_lite: Friend-Lite native implementation with LLM + vector store +- openmemory_mcp: OpenMemory MCP backend integration +- mycelia: Mycelia backend integration +- llm_providers: LLM provider implementations (OpenAI, Ollama) +- vector_stores: Vector store implementations (Qdrant) +- mcp_client: MCP client utilities +- compat_service: Backward compatibility wrapper +""" + +from .friend_lite import MemoryService as FriendLiteMemoryService +from .openmemory_mcp import OpenMemoryMCPService +from .mycelia import MyceliaMemoryService +from .llm_providers import OpenAIProvider +from .vector_stores import QdrantVectorStore +from .mcp_client import MCPClient, MCPError +from .compat_service import MemoryService + +__all__ = [ + "FriendLiteMemoryService", + "OpenMemoryMCPService", + "MyceliaMemoryService", + "OpenAIProvider", + "QdrantVectorStore", + "MCPClient", + "MCPError", + "MemoryService", +] diff --git a/backends/advanced/src/advanced_omi_backend/memory/compat_service.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py similarity index 98% rename from backends/advanced/src/advanced_omi_backend/memory/compat_service.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py index 3814f29e..361f8bcd 100644 --- a/backends/advanced/src/advanced_omi_backend/memory/compat_service.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py @@ -10,8 +10,8 @@ import os from typing import Any, Dict, List, Optional, Tuple -from .config import build_memory_config_from_env -from .memory_service import MemoryService as CoreMemoryService +from ..config import build_memory_config_from_env +from .friend_lite import MemoryService as CoreMemoryService memory_logger = logging.getLogger("memory_service") @@ -395,8 +395,8 @@ def get_memory_service() -> MemoryService: global _memory_service if _memory_service is None: # Use the new service factory to create the appropriate service - from .service_factory import get_memory_service as get_core_service - + from ..service_factory import get_memory_service as get_core_service + core_service = get_core_service() # If it's already a compat service, use it directly diff --git a/backends/advanced/src/advanced_omi_backend/memory/memory_service.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py similarity index 99% rename from backends/advanced/src/advanced_omi_backend/memory/memory_service.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py index 6460aa25..be91a5f5 100644 --- a/backends/advanced/src/advanced_omi_backend/memory/memory_service.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py @@ -11,15 +11,11 @@ import uuid from typing import Any, List, Optional, Tuple -from .base import MemoryEntry, MemoryServiceBase -from .config import LLMProvider as LLMProviderEnum -from .config import MemoryConfig, VectorStoreProvider -from .providers import ( - LLMProviderBase, - OpenAIProvider, - QdrantVectorStore, - VectorStoreBase, -) +from ..base import LLMProviderBase, MemoryEntry, MemoryServiceBase, VectorStoreBase +from ..config import LLMProvider as LLMProviderEnum +from ..config import MemoryConfig, VectorStoreProvider +from .llm_providers import OpenAIProvider +from .vector_stores import QdrantVectorStore memory_logger = logging.getLogger("memory_service") diff --git a/backends/advanced/src/advanced_omi_backend/memory/providers/llm_providers.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/llm_providers.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/providers/llm_providers.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/llm_providers.py diff --git a/backends/advanced/src/advanced_omi_backend/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/providers/mcp_client.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py new file mode 100644 index 00000000..ccf30160 --- /dev/null +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py @@ -0,0 +1,277 @@ +"""Mycelia memory service implementation. + +This module provides a concrete implementation of the MemoryServiceBase interface +that uses Mycelia as the backend for all memory operations. +""" + +import logging +from typing import Any, List, Optional, Tuple + +from ..base import MemoryEntry, MemoryServiceBase + +memory_logger = logging.getLogger("memory_service") + + +class MyceliaMemoryService(MemoryServiceBase): + """Memory service implementation using Mycelia backend. + + This class implements the MemoryServiceBase interface by delegating memory + operations to a Mycelia server. + + Args: + api_url: Mycelia API endpoint URL + api_key: Optional API key for authentication + timeout: Request timeout in seconds + **kwargs: Additional configuration parameters + """ + + def __init__( + self, + api_url: str = "http://localhost:8080", + api_key: Optional[str] = None, + timeout: int = 30, + **kwargs + ): + """Initialize Mycelia memory service. + + Args: + api_url: Mycelia API endpoint + api_key: Optional API key for authentication + timeout: Request timeout in seconds + **kwargs: Additional configuration parameters + """ + self.api_url = api_url + self.api_key = api_key + self.timeout = timeout + self.config = kwargs + self._initialized = False + + memory_logger.info(f"๐Ÿ„ Initializing Mycelia memory service at {api_url}") + + async def initialize(self) -> None: + """Initialize Mycelia client and verify connection.""" + try: + # TODO: Initialize your Mycelia client here + # Example: self.client = MyceliaClient(self.api_url, self.api_key) + + # Test connection + if not await self.test_connection(): + raise RuntimeError("Failed to connect to Mycelia service") + + self._initialized = True + memory_logger.info("โœ… Mycelia memory service initialized successfully") + + except Exception as e: + memory_logger.error(f"โŒ Failed to initialize Mycelia service: {e}") + raise RuntimeError(f"Mycelia initialization failed: {e}") + + async def add_memory( + self, + transcript: str, + client_id: str, + source_id: str, + user_id: str, + user_email: str, + allow_update: bool = False, + db_helper: Any = None, + ) -> Tuple[bool, List[str]]: + """Add memories from transcript using Mycelia. + + Args: + transcript: Raw transcript text to extract memories from + client_id: Client identifier + source_id: Unique identifier for the source (audio session, chat session, etc.) + user_id: User identifier + user_email: User email address + allow_update: Whether to allow updating existing memories + db_helper: Optional database helper for tracking relationships + + Returns: + Tuple of (success: bool, created_memory_ids: List[str]) + """ + try: + # TODO: Implement your Mycelia API call to add memories + # Example implementation: + # response = await self.client.add_memories( + # transcript=transcript, + # user_id=user_id, + # metadata={ + # "client_id": client_id, + # "source_id": source_id, + # "user_email": user_email, + # } + # ) + # return (True, response.memory_ids) + + memory_logger.warning("Mycelia add_memory not yet implemented") + return (False, []) + + except Exception as e: + memory_logger.error(f"Failed to add memory via Mycelia: {e}") + return (False, []) + + async def search_memories( + self, query: str, user_id: str, limit: int = 10, score_threshold: float = 0.0 + ) -> List[MemoryEntry]: + """Search memories using Mycelia semantic search. + + Args: + query: Search query text + user_id: User identifier to filter memories + limit: Maximum number of results to return + score_threshold: Minimum similarity score (0.0 = no threshold) + + Returns: + List of matching MemoryEntry objects ordered by relevance + """ + try: + # TODO: Implement Mycelia search + # Example implementation: + # results = await self.client.search( + # query=query, + # user_id=user_id, + # limit=limit, + # threshold=score_threshold + # ) + # return [ + # MemoryEntry( + # id=r.id, + # memory=r.text, + # user_id=user_id, + # metadata=r.metadata, + # score=r.score + # ) + # for r in results + # ] + + memory_logger.warning("Mycelia search_memories not yet implemented") + return [] + + except Exception as e: + memory_logger.error(f"Failed to search memories via Mycelia: {e}") + return [] + + async def get_all_memories( + self, user_id: str, limit: int = 100 + ) -> List[MemoryEntry]: + """Get all memories for a user from Mycelia. + + Args: + user_id: User identifier + limit: Maximum number of memories to return + + Returns: + List of MemoryEntry objects for the user + """ + try: + # TODO: Implement Mycelia get all + # Example implementation: + # results = await self.client.get_all(user_id=user_id, limit=limit) + # return [ + # MemoryEntry( + # id=r.id, + # memory=r.text, + # user_id=user_id, + # metadata=r.metadata + # ) + # for r in results + # ] + + memory_logger.warning("Mycelia get_all_memories not yet implemented") + return [] + + except Exception as e: + memory_logger.error(f"Failed to get memories via Mycelia: {e}") + return [] + + async def count_memories(self, user_id: str) -> Optional[int]: + """Count memories for a user. + + Args: + user_id: User identifier + + Returns: + Total count of memories for the user, or None if not supported + """ + try: + # TODO: Implement if Mycelia supports efficient counting + # Example: + # return await self.client.count(user_id=user_id) + + return None # Not implemented yet + + except Exception as e: + memory_logger.error(f"Failed to count memories via Mycelia: {e}") + return None + + async def delete_memory(self, memory_id: str) -> bool: + """Delete a specific memory from Mycelia. + + Args: + memory_id: Unique identifier of the memory to delete + + Returns: + True if successfully deleted, False otherwise + """ + try: + # TODO: Implement Mycelia delete + # Example: + # success = await self.client.delete(memory_id=memory_id) + # return success + + memory_logger.warning("Mycelia delete_memory not yet implemented") + return False + + except Exception as e: + memory_logger.error(f"Failed to delete memory via Mycelia: {e}") + return False + + async def delete_all_user_memories(self, user_id: str) -> int: + """Delete all memories for a user from Mycelia. + + Args: + user_id: User identifier + + Returns: + Number of memories that were deleted + """ + try: + # TODO: Implement Mycelia bulk delete + # Example: + # count = await self.client.delete_all(user_id=user_id) + # return count + + memory_logger.warning("Mycelia delete_all_user_memories not yet implemented") + return 0 + + except Exception as e: + memory_logger.error(f"Failed to delete user memories via Mycelia: {e}") + return 0 + + async def test_connection(self) -> bool: + """Test connection to Mycelia service. + + Returns: + True if connection is healthy, False otherwise + """ + try: + # TODO: Implement health check + # Example: + # return await self.client.health_check() + + # For now, just check if URL is set + memory_logger.warning("Mycelia test_connection not fully implemented (stub)") + return self.api_url is not None + + except Exception as e: + memory_logger.error(f"Mycelia connection test failed: {e}") + return False + + def shutdown(self) -> None: + """Shutdown Mycelia client and cleanup resources.""" + memory_logger.info("Shutting down Mycelia memory service") + # TODO: Cleanup if needed + # Example: + # if hasattr(self, 'client'): + # self.client.close() + self._initialized = False diff --git a/backends/advanced/src/advanced_omi_backend/memory/providers/openmemory_mcp_service.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/providers/openmemory_mcp_service.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py diff --git a/backends/advanced/src/advanced_omi_backend/memory/providers/vector_stores.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/vector_stores.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/providers/vector_stores.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/vector_stores.py diff --git a/backends/advanced/src/advanced_omi_backend/memory/service_factory.py b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py similarity index 89% rename from backends/advanced/src/advanced_omi_backend/memory/service_factory.py rename to backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py index df2a23c9..a51f4edc 100644 --- a/backends/advanced/src/advanced_omi_backend/memory/service_factory.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py @@ -38,21 +38,33 @@ def create_memory_service(config: MemoryConfig) -> MemoryServiceBase: if config.memory_provider == MemoryProvider.FRIEND_LITE: # Use the sophisticated Friend-Lite implementation - from .memory_service import MemoryService as FriendLiteMemoryService + from .providers.friend_lite import MemoryService as FriendLiteMemoryService return FriendLiteMemoryService(config) - + elif config.memory_provider == MemoryProvider.OPENMEMORY_MCP: # Use OpenMemory MCP implementation try: - from .providers.openmemory_mcp_service import OpenMemoryMCPService + from .providers.openmemory_mcp import OpenMemoryMCPService except ImportError as e: raise RuntimeError(f"OpenMemory MCP service not available: {e}") - + if not config.openmemory_config: raise ValueError("OpenMemory configuration is required for OPENMEMORY_MCP provider") - + return OpenMemoryMCPService(**config.openmemory_config) - + + elif config.memory_provider == MemoryProvider.MYCELIA: + # Use Mycelia implementation + try: + from .providers.mycelia import MyceliaMemoryService + except ImportError as e: + raise RuntimeError(f"Mycelia memory service not available: {e}") + + if not config.mycelia_config: + raise ValueError("Mycelia configuration is required for MYCELIA provider") + + return MyceliaMemoryService(**config.mycelia_config) + else: raise ValueError(f"Unsupported memory provider: {config.memory_provider}") diff --git a/backends/advanced/src/advanced_omi_backend/memory/update_memory_utils.py b/backends/advanced/src/advanced_omi_backend/services/memory/update_memory_utils.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/update_memory_utils.py rename to backends/advanced/src/advanced_omi_backend/services/memory/update_memory_utils.py diff --git a/backends/advanced/src/advanced_omi_backend/memory/utils.py b/backends/advanced/src/advanced_omi_backend/services/memory/utils.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/memory/utils.py rename to backends/advanced/src/advanced_omi_backend/services/memory/utils.py diff --git a/backends/advanced/src/advanced_omi_backend/services/transcription/__init__.py b/backends/advanced/src/advanced_omi_backend/services/transcription/__init__.py index 9036aa61..06d5b57f 100644 --- a/backends/advanced/src/advanced_omi_backend/services/transcription/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/transcription/__init__.py @@ -10,7 +10,7 @@ import os from typing import Optional -from advanced_omi_backend.models.transcription import BaseTranscriptionProvider +from .base import BaseTranscriptionProvider from advanced_omi_backend.services.transcription.deepgram import ( DeepgramProvider, DeepgramStreamingProvider, diff --git a/backends/advanced/src/advanced_omi_backend/models/transcription.py b/backends/advanced/src/advanced_omi_backend/services/transcription/base.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/models/transcription.py rename to backends/advanced/src/advanced_omi_backend/services/transcription/base.py diff --git a/backends/advanced/src/advanced_omi_backend/services/transcription/deepgram.py b/backends/advanced/src/advanced_omi_backend/services/transcription/deepgram.py index e9261955..ee7e23fa 100644 --- a/backends/advanced/src/advanced_omi_backend/services/transcription/deepgram.py +++ b/backends/advanced/src/advanced_omi_backend/services/transcription/deepgram.py @@ -13,7 +13,7 @@ import httpx import websockets -from advanced_omi_backend.models.transcription import ( +from .base import ( BatchTranscriptionProvider, StreamingTranscriptionProvider, ) diff --git a/backends/advanced/src/advanced_omi_backend/services/transcription/parakeet.py b/backends/advanced/src/advanced_omi_backend/services/transcription/parakeet.py index 5b11e094..97b5b751 100644 --- a/backends/advanced/src/advanced_omi_backend/services/transcription/parakeet.py +++ b/backends/advanced/src/advanced_omi_backend/services/transcription/parakeet.py @@ -17,7 +17,7 @@ from easy_audio_interfaces.audio_interfaces import AudioChunk from easy_audio_interfaces.filesystem import LocalFileSink -from advanced_omi_backend.models.transcription import ( +from .base import ( BatchTranscriptionProvider, StreamingTranscriptionProvider, ) diff --git a/backends/advanced/src/advanced_omi_backend/utils/conversation_utils.py b/backends/advanced/src/advanced_omi_backend/utils/conversation_utils.py index 416c1fb1..b2cddf4c 100644 --- a/backends/advanced/src/advanced_omi_backend/utils/conversation_utils.py +++ b/backends/advanced/src/advanced_omi_backend/utils/conversation_utils.py @@ -52,8 +52,9 @@ def analyze_speech(transcript_data: dict) -> dict: Analyze transcript for meaningful speech to determine if conversation should be created. Uses configurable thresholds from environment: - - SPEECH_DETECTION_MIN_WORDS (default: 5) - - SPEECH_DETECTION_MIN_CONFIDENCE (default: 0.5) + - SPEECH_DETECTION_MIN_WORDS (default: 10) + - SPEECH_DETECTION_MIN_CONFIDENCE (default: 0.7) + - SPEECH_DETECTION_MIN_DURATION (default: 10.0) Args: transcript_data: Dictionary with: @@ -99,6 +100,16 @@ def analyze_speech(transcript_data: dict) -> dict: speech_end = valid_words[-1].get("end", 0) speech_duration = speech_end - speech_start + # Check minimum duration threshold + min_duration = settings.get("min_duration", 10.0) + if speech_duration < min_duration: + return { + "has_speech": False, + "reason": f"Speech too short ({speech_duration:.1f}s < {min_duration}s)", + "word_count": len(valid_words), + "duration": speech_duration, + } + return { "has_speech": True, "word_count": len(valid_words), diff --git a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py index fe4b1c19..6b8da757 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py @@ -46,7 +46,7 @@ async def process_memory_job( Dict with processing results """ from advanced_omi_backend.models.conversation import Conversation - from advanced_omi_backend.memory import get_memory_service + from advanced_omi_backend.services.memory import get_memory_service from advanced_omi_backend.users import get_user_by_id start_time = time.time() @@ -142,7 +142,7 @@ async def process_memory_job( # Determine memory provider from memory service memory_provider = conversation_model.MemoryProvider.FRIEND_LITE # Default try: - from advanced_omi_backend.memory import get_memory_service + from advanced_omi_backend.services.memory import get_memory_service memory_service_obj = get_memory_service() provider_name = memory_service_obj.__class__.__name__ if "OpenMemory" in provider_name: diff --git a/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py index 9690f286..2fc4c5ab 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py @@ -193,6 +193,75 @@ async def transcribe_full_audio_job( logger.info(f"๐Ÿ“Š Transcription complete: {len(transcript_text)} chars, {len(segments)} segments, {len(words)} words") + # Validate meaningful speech BEFORE any further processing + from advanced_omi_backend.utils.conversation_utils import analyze_speech, mark_conversation_deleted + + transcript_data = {"text": transcript_text, "words": words} + speech_analysis = analyze_speech(transcript_data) + + if not speech_analysis.get("has_speech", False): + logger.warning( + f"โš ๏ธ Transcription found no meaningful speech for conversation {conversation_id}: " + f"{speech_analysis.get('reason', 'unknown')}" + ) + + # Mark conversation as deleted + await mark_conversation_deleted( + conversation_id=conversation_id, + deletion_reason="no_meaningful_speech_batch_transcription" + ) + + # Cancel all dependent jobs (cropping, speaker recognition, memory, title/summary) + from rq import get_current_job + from rq.job import Job + + current_job = get_current_job() + if current_job: + # Get all jobs that depend on this transcription job + from advanced_omi_backend.controllers.queue_controller import redis_conn + + # Find dependent jobs by searching for jobs with this job as dependency + try: + # Cancel jobs based on conversation_id pattern + job_patterns = [ + f"crop_{conversation_id[:12]}", + f"speaker_{conversation_id[:12]}", + f"memory_{conversation_id[:12]}", + f"title_summary_{conversation_id[:12]}" + ] + + cancelled_jobs = [] + for job_id in job_patterns: + try: + dependent_job = Job.fetch(job_id, connection=redis_conn) + if dependent_job and dependent_job.get_status() in ['queued', 'deferred', 'scheduled']: + dependent_job.cancel() + cancelled_jobs.append(job_id) + logger.info(f"โœ… Cancelled dependent job: {job_id}") + except Exception as e: + logger.debug(f"Job {job_id} not found or already completed: {e}") + + if cancelled_jobs: + logger.info(f"๐Ÿšซ Cancelled {len(cancelled_jobs)} dependent jobs due to no meaningful speech") + except Exception as cancel_error: + logger.warning(f"Failed to cancel some dependent jobs: {cancel_error}") + + # Return early with failure status + return { + "success": False, + "conversation_id": conversation_id, + "error": "no_meaningful_speech", + "reason": speech_analysis.get("reason"), + "word_count": speech_analysis.get("word_count", 0), + "duration": speech_analysis.get("duration", 0.0), + "deleted": True + } + + logger.info( + f"โœ… Meaningful speech validated: {speech_analysis.get('word_count')} words, " + f"{speech_analysis.get('duration', 0):.1f}s" + ) + # Calculate processing time (transcription only) processing_time = time.time() - start_time diff --git a/extras/mycelia b/extras/mycelia new file mode 160000 index 00000000..ca7b177b --- /dev/null +++ b/extras/mycelia @@ -0,0 +1 @@ +Subproject commit ca7b177b1e9228b63399da557a1ddbf696cf6762 From 74d548271c618a7a8f33306919314817b02a0692 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Mon, 1 Dec 2025 14:14:33 +0000 Subject: [PATCH 02/31] Added support for mycelia --- Makefile | 52 ++- backends/advanced/.env.template | 24 +- backends/advanced/docker-compose-test.yml | 58 +++ .../scripts/create_mycelia_api_key.py | 112 +++++ .../scripts/sync_friendlite_mycelia.py | 382 ++++++++++++++++++ .../src/advanced_omi_backend/app_factory.py | 7 + .../advanced/src/advanced_omi_backend/auth.py | 35 ++ .../controllers/memory_controller.py | 54 ++- .../routers/modules/health_routes.py | 36 ++ .../routers/modules/memory_routes.py | 18 +- .../services/memory/base.py | 13 + .../services/memory/config.py | 19 +- .../services/memory/providers/mycelia.py | 378 ++++++++++++----- .../services/mycelia_sync.py | 248 ++++++++++++ backends/advanced/webui/src/App.tsx | 4 +- .../webui/src/contexts/AuthContext.tsx | 3 + .../webui/src/pages/MemoriesRouter.tsx | 22 + 17 files changed, 1359 insertions(+), 106 deletions(-) create mode 100755 backends/advanced/scripts/create_mycelia_api_key.py create mode 100644 backends/advanced/scripts/sync_friendlite_mycelia.py create mode 100644 backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py create mode 100644 backends/advanced/webui/src/pages/MemoriesRouter.tsx diff --git a/Makefile b/Makefile index 1a5a3829..3d03a180 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ export $(shell sed 's/=.*//' config.env | grep -v '^\s*$$' | grep -v '^\s*\#') SCRIPTS_DIR := scripts K8S_SCRIPTS_DIR := $(SCRIPTS_DIR)/k8s -.PHONY: help menu setup-k8s setup-infrastructure setup-rbac setup-storage-pvc config config-docker config-k8s config-all clean deploy deploy-docker deploy-k8s deploy-k8s-full deploy-infrastructure deploy-apps check-infrastructure check-apps build-backend up-backend down-backend k8s-status k8s-cleanup k8s-purge audio-manage test-robot test-robot-integration test-robot-unit test-robot-endpoints test-robot-specific test-robot-clean +.PHONY: help menu setup-k8s setup-infrastructure setup-rbac setup-storage-pvc config config-docker config-k8s config-all clean deploy deploy-docker deploy-k8s deploy-k8s-full deploy-infrastructure deploy-apps check-infrastructure check-apps build-backend up-backend down-backend k8s-status k8s-cleanup k8s-purge audio-manage mycelia-sync-status mycelia-sync-all mycelia-sync-user mycelia-check-orphans mycelia-reassign-orphans test-robot test-robot-integration test-robot-unit test-robot-endpoints test-robot-specific test-robot-clean # Default target .DEFAULT_GOAL := menu @@ -57,6 +57,13 @@ menu: ## Show interactive menu (default) @echo " check-apps ๐Ÿ” Check application services" @echo " clean ๐Ÿงน Clean up generated files" @echo + @echo "๐Ÿ”„ Mycelia Sync:" + @echo " mycelia-sync-status ๐Ÿ“Š Show Mycelia OAuth sync status" + @echo " mycelia-sync-all ๐Ÿ”„ Sync all Friend-Lite users to Mycelia" + @echo " mycelia-sync-user ๐Ÿ‘ค Sync specific user (EMAIL=user@example.com)" + @echo " mycelia-check-orphans ๐Ÿ” Find orphaned Mycelia objects" + @echo " mycelia-reassign-orphans โ™ป๏ธ Reassign orphans (EMAIL=admin@example.com)" + @echo @echo "Current configuration:" @echo " DOMAIN: $(DOMAIN)" @echo " DEPLOYMENT_MODE: $(DEPLOYMENT_MODE)" @@ -101,6 +108,13 @@ help: ## Show detailed help for all targets @echo "๐ŸŽต AUDIO MANAGEMENT:" @echo " audio-manage Interactive audio file management" @echo + @echo "๐Ÿ”„ MYCELIA SYNC:" + @echo " mycelia-sync-status Show Mycelia OAuth sync status for all users" + @echo " mycelia-sync-all Sync all Friend-Lite users to Mycelia OAuth" + @echo " mycelia-sync-user Sync specific user (EMAIL=user@example.com)" + @echo " mycelia-check-orphans Find Mycelia objects without Friend-Lite owner" + @echo " mycelia-reassign-orphans Reassign orphaned objects (EMAIL=admin@example.com)" + @echo @echo "๐Ÿงช ROBOT FRAMEWORK TESTING:" @echo " test-robot Run all Robot Framework tests" @echo " test-robot-integration Run integration tests only" @@ -333,6 +347,42 @@ audio-manage: ## Interactive audio file management @echo "๐ŸŽต Starting audio file management..." @$(SCRIPTS_DIR)/manage-audio-files.sh +# ======================================== +# MYCELIA SYNC +# ======================================== + +mycelia-sync-status: ## Show Mycelia OAuth sync status for all users + @echo "๐Ÿ“Š Checking Mycelia OAuth sync status..." + @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --status + +mycelia-sync-all: ## Sync all Friend-Lite users to Mycelia OAuth + @echo "๐Ÿ”„ Syncing all Friend-Lite users to Mycelia OAuth..." + @echo "โš ๏ธ This will create OAuth credentials for users without them" + @read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1 + @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --sync-all + +mycelia-sync-user: ## Sync specific user to Mycelia OAuth (usage: make mycelia-sync-user EMAIL=user@example.com) + @echo "๐Ÿ‘ค Syncing specific user to Mycelia OAuth..." + @if [ -z "$(EMAIL)" ]; then \ + echo "โŒ EMAIL parameter is required. Usage: make mycelia-sync-user EMAIL=user@example.com"; \ + exit 1; \ + fi + @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --email $(EMAIL) + +mycelia-check-orphans: ## Find Mycelia objects without Friend-Lite owner + @echo "๐Ÿ” Checking for orphaned Mycelia objects..." + @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --check-orphans + +mycelia-reassign-orphans: ## Reassign orphaned objects to user (usage: make mycelia-reassign-orphans EMAIL=admin@example.com) + @echo "โ™ป๏ธ Reassigning orphaned Mycelia objects..." + @if [ -z "$(EMAIL)" ]; then \ + echo "โŒ EMAIL parameter is required. Usage: make mycelia-reassign-orphans EMAIL=admin@example.com"; \ + exit 1; \ + fi + @echo "โš ๏ธ This will reassign all orphaned objects to: $(EMAIL)" + @read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1 + @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --reassign-orphans --target-email $(EMAIL) + # ======================================== # TESTING TARGETS # ======================================== diff --git a/backends/advanced/.env.template b/backends/advanced/.env.template index 01724f19..60d2c99e 100644 --- a/backends/advanced/.env.template +++ b/backends/advanced/.env.template @@ -99,8 +99,8 @@ QDRANT_BASE_URL=qdrant # MEMORY PROVIDER CONFIGURATION # ======================================== -# Memory Provider: "friend_lite" (default) or "openmemory_mcp" -# +# Memory Provider: "friend_lite" (default), "openmemory_mcp", or "mycelia" +# # Friend-Lite (default): In-house memory system with full control # - Custom LLM-powered extraction with individual fact storage # - Smart deduplication and memory updates (ADD/UPDATE/DELETE) @@ -113,6 +113,13 @@ QDRANT_BASE_URL=qdrant # - Web UI at http://localhost:8765 # - Requires external server setup # +# Mycelia: Full-featured personal memory timeline +# - Voice, screenshots, and text capture +# - Timeline UI with waveform playback +# - Conversation extraction and semantic search +# - OAuth federation for cross-instance sharing +# - Requires Mycelia server setup (extras/mycelia) +# # See MEMORY_PROVIDERS.md for detailed comparison MEMORY_PROVIDER=friend_lite @@ -128,6 +135,19 @@ MEMORY_PROVIDER=friend_lite # OPENMEMORY_USER_ID=openmemory # OPENMEMORY_TIMEOUT=30 +# ---------------------------------------- +# Mycelia Configuration +# (Only needed if MEMORY_PROVIDER=mycelia) +# ---------------------------------------- +# First start Mycelia: +# cd extras/mycelia && docker compose up -d redis mongo mongo-search +# cd extras/mycelia/backend && deno task dev +# +# IMPORTANT: JWT_SECRET in Mycelia backend/.env must match AUTH_SECRET_KEY above +# MYCELIA_URL=http://host.docker.internal:5173 +# MYCELIA_DB=mycelia # Database name (use mycelia_test for test environment) +# MYCELIA_TIMEOUT=30 + # ======================================== # OPTIONAL FEATURES # ======================================== diff --git a/backends/advanced/docker-compose-test.yml b/backends/advanced/docker-compose-test.yml index 029d0238..1dde7c55 100644 --- a/backends/advanced/docker-compose-test.yml +++ b/backends/advanced/docker-compose-test.yml @@ -38,6 +38,8 @@ services: - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} + - MYCELIA_URL=http://mycelia-backend-test:5173 + - MYCELIA_DB=mycelia_test # Disable speaker recognition in test environment to prevent segment duplication - DISABLE_SPEAKER_RECOGNITION=false - SPEAKER_SERVICE_URL=https://localhost:8085 @@ -146,6 +148,8 @@ services: - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} + - MYCELIA_URL=http://mycelia-backend-test:5173 + - MYCELIA_DB=mycelia_test - DISABLE_SPEAKER_RECOGNITION=false - SPEAKER_SERVICE_URL=https://localhost:8085 # Set low inactivity timeout for tests (2 seconds instead of 60) @@ -163,6 +167,60 @@ services: condition: service_started restart: unless-stopped + # Mycelia - AI memory and timeline service (test environment) + mycelia-backend-test: + build: + context: ../../extras/mycelia/backend + dockerfile: Dockerfile.simple + ports: + - "5100:5173" # Test backend port + environment: + # Shared JWT secret for Friend-Lite authentication (test key) + - JWT_SECRET=test-jwt-signing-key-for-integration-tests + - SECRET_KEY=test-jwt-signing-key-for-integration-tests + # MongoDB connection (test database) + - MONGO_URL=mongodb://mongo-test:27017 + - MONGO_DB=mycelia_test + - DATABASE_NAME=mycelia_test + # Redis connection (ioredis uses individual host/port, not URL) + - REDIS_HOST=redis-test + - REDIS_PORT=6379 + volumes: + - ../../extras/mycelia/backend/app:/app/app # Mount source for development + depends_on: + mongo-test: + condition: service_healthy + redis-test: + condition: service_started + healthcheck: + test: ["CMD", "deno", "eval", "fetch('http://localhost:5173/health').then(r => r.ok ? Deno.exit(0) : Deno.exit(1))"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 5s + restart: unless-stopped + profiles: + - mycelia + + mycelia-frontend-test: + build: + context: ../../extras/mycelia + dockerfile: frontend/Dockerfile.simple + args: + - VITE_API_URL=http://localhost:5100 + ports: + - "3002:8080" # Nginx serves on 8080 internally + environment: + - VITE_API_URL=http://localhost:5100 + volumes: + - ../../extras/mycelia/frontend/src:/app/src # Mount source for development + depends_on: + mycelia-backend-test: + condition: service_healthy + restart: unless-stopped + profiles: + - mycelia + # caddy: # image: caddy:2-alpine # ports: diff --git a/backends/advanced/scripts/create_mycelia_api_key.py b/backends/advanced/scripts/create_mycelia_api_key.py new file mode 100755 index 00000000..ac2149e8 --- /dev/null +++ b/backends/advanced/scripts/create_mycelia_api_key.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python3 +"""Create a proper Mycelia API key (not OAuth client) for Friend-Lite user.""" + +import os +import sys +import secrets +import hashlib +from pymongo import MongoClient +from bson import ObjectId +from datetime import datetime + +# MongoDB configuration +MONGO_URL = os.getenv("MONGO_URL", "mongodb://localhost:27018") +MYCELIA_DB = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia_test")) + +# User ID from JWT or argument +USER_ID = os.getenv("USER_ID", "692c7727c7b16bdf58d23cd1") # test user + + +def hash_api_key_with_salt(api_key: str, salt: bytes) -> str: + """Hash API key with salt (matches Mycelia's hashApiKey function).""" + # SHA256(salt + apiKey) in base64 + import base64 + h = hashlib.sha256() + h.update(salt) + h.update(api_key.encode('utf-8')) + return base64.b64encode(h.digest()).decode('utf-8') # Use base64 like Mycelia + + +def main(): + print(f"๐Ÿ“Š MongoDB Configuration:") + print(f" URL: {MONGO_URL}") + print(f" Database: {MYCELIA_DB}\n") + + print("๐Ÿ” Creating Mycelia API Key\n") + + # Generate API key in Mycelia format: mycelia_{random_base64url} + random_part = secrets.token_urlsafe(32) + api_key = f"mycelia_{random_part}" + + # Generate salt (32 bytes) + salt = secrets.token_bytes(32) + + # Hash the API key with salt + hashed_key = hash_api_key_with_salt(api_key, salt) + + # Open prefix (first 16 chars for fast lookup) + open_prefix = api_key[:16] + + print(f"โœ… Generated API Key:") + print(f" Key: {api_key}") + print(f" Open Prefix: {open_prefix}") + print(f" Owner: {USER_ID}\n") + + # Connect to MongoDB + client = MongoClient(MONGO_URL) + db = client[MYCELIA_DB] + api_keys = db["api_keys"] + + # Check for existing active keys for this user + existing = api_keys.find_one({"owner": USER_ID, "isActive": True}) + if existing: + print(f"โ„น๏ธ Existing active API key found: {existing['_id']}") + print(f" Deactivating old key...\n") + api_keys.update_one( + {"_id": existing["_id"]}, + {"$set": {"isActive": False}} + ) + + # Create API key document (matches Mycelia's format) + import base64 + api_key_doc = { + "hashedKey": hashed_key, # Note: hashedKey, not hash! + "salt": base64.b64encode(salt).decode('utf-8'), # Store as base64 like Mycelia + "owner": USER_ID, + "name": "Friend-Lite Integration", + "policies": [ + { + "resource": "**", + "action": "*", + "effect": "allow" + } + ], + "openPrefix": open_prefix, + "createdAt": datetime.now(), + "isActive": True, + } + + # Insert into database + result = api_keys.insert_one(api_key_doc) + client_id = str(result.inserted_id) + + print(f"๐ŸŽ‰ API Key Created Successfully!") + print(f" Client ID: {client_id}") + print(f" API Key: {api_key}") + print(f"\n" + "=" * 70) + print("๐Ÿ“‹ MYCELIA CONFIGURATION (Test Environment)") + print("=" * 70) + print(f"\n1๏ธโƒฃ Configure Mycelia Frontend Settings:") + print(f" โ€ข Go to: http://localhost:3002/settings") + print(f" โ€ข API Endpoint: http://localhost:5100") + print(f" โ€ข Client ID: {client_id}") + print(f" โ€ข Client Secret: {api_key}") + print(f" โ€ข Click 'Save' and then 'Test Token'") + print(f"\nโœ… This API key uses the proper Mycelia format with salt!") + print("=" * 70 + "\n") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/backends/advanced/scripts/sync_friendlite_mycelia.py b/backends/advanced/scripts/sync_friendlite_mycelia.py new file mode 100644 index 00000000..c7051f2c --- /dev/null +++ b/backends/advanced/scripts/sync_friendlite_mycelia.py @@ -0,0 +1,382 @@ +#!/usr/bin/env python3 +""" +Sync Friend-Lite users with Mycelia OAuth credentials. + +This script helps migrate existing Friend-Lite installations to use Mycelia, +or sync existing Mycelia installations with Friend-Lite users. + +Usage: + # Dry run (preview changes) + python scripts/sync_friendlite_mycelia.py --dry-run + + # Sync all users + python scripts/sync_friendlite_mycelia.py --sync-all + + # Sync specific user + python scripts/sync_friendlite_mycelia.py --email admin@example.com + + # Check for orphaned Mycelia objects + python scripts/sync_friendlite_mycelia.py --check-orphans + + # Reassign orphaned objects to a user + python scripts/sync_friendlite_mycelia.py --reassign-orphans --target-email admin@example.com + +Environment Variables: + MONGODB_URI or MONGO_URL - MongoDB connection string + MYCELIA_DB - Mycelia database name (default: mycelia) +""" + +import os +import sys +import argparse +import secrets +import hashlib +import base64 +from datetime import datetime +from typing import List, Dict, Tuple, Optional +from pymongo import MongoClient +from bson import ObjectId + +# Add parent directory to path for imports +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + + +class FriendLiteMyceliaSync: + """Sync Friend-Lite users with Mycelia OAuth credentials.""" + + def __init__(self, mongo_url: str, mycelia_db: str, friendlite_db: str): + self.mongo_url = mongo_url + self.mycelia_db = mycelia_db + self.friendlite_db = friendlite_db + self.client = MongoClient(mongo_url) + + print(f"๐Ÿ“Š Connected to MongoDB:") + print(f" URL: {mongo_url}") + print(f" Friend-Lite DB: {friendlite_db}") + print(f" Mycelia DB: {mycelia_db}\n") + + def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: + """Hash API key with salt (matches Mycelia's implementation).""" + h = hashlib.sha256() + h.update(salt) + h.update(api_key.encode('utf-8')) + return base64.b64encode(h.digest()).decode('utf-8') + + def get_all_friendlite_users(self) -> List[Dict]: + """Get all users from Friend-Lite database.""" + db = self.client[self.friendlite_db] + users = list(db["users"].find({})) + return users + + def get_all_mycelia_objects(self) -> List[Dict]: + """Get all objects from Mycelia database.""" + db = self.client[self.mycelia_db] + objects = list(db["objects"].find({})) + return objects + + def get_mycelia_api_key_for_user(self, user_id: str) -> Optional[Dict]: + """Check if user already has a Mycelia API key.""" + db = self.client[self.mycelia_db] + api_key = db["api_keys"].find_one({ + "owner": user_id, + "isActive": True + }) + return api_key + + def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = False) -> Tuple[str, str]: + """Create a Mycelia API key for a Friend-Lite user.""" + # Generate API key + random_part = secrets.token_urlsafe(32) + api_key = f"mycelia_{random_part}" + salt = secrets.token_bytes(32) + hashed_key = self._hash_api_key_with_salt(api_key, salt) + open_prefix = api_key[:16] + + api_key_doc = { + "hashedKey": hashed_key, + "salt": base64.b64encode(salt).decode('utf-8'), + "owner": user_id, + "name": f"Friend-Lite Auto ({user_email})", + "policies": [{"resource": "**", "action": "*", "effect": "allow"}], + "openPrefix": open_prefix, + "createdAt": datetime.utcnow(), + "isActive": True, + } + + if dry_run: + print(f" [DRY RUN] Would create API key with owner={user_id}") + return "dry-run-client-id", "dry-run-api-key" + + db = self.client[self.mycelia_db] + result = db["api_keys"].insert_one(api_key_doc) + client_id = str(result.inserted_id) + + # Update Friend-Lite user document + fl_db = self.client[self.friendlite_db] + fl_db["users"].update_one( + {"_id": ObjectId(user_id)}, + { + "$set": { + "mycelia_oauth": { + "client_id": client_id, + "created_at": datetime.utcnow(), + "synced": True + } + } + } + ) + + return client_id, api_key + + def sync_user(self, user: Dict, dry_run: bool = False) -> bool: + """Sync a single user to Mycelia OAuth.""" + user_id = str(user["_id"]) + user_email = user.get("email", "unknown") + + # Check if already synced + existing = self.get_mycelia_api_key_for_user(user_id) + if existing: + print(f"โœ“ {user_email:40} Already synced (Client ID: {existing['_id']})") + return False + + # Create new API key + try: + client_id, api_key = self.create_mycelia_api_key(user_id, user_email, dry_run) + + if dry_run: + print(f"โ†’ {user_email:40} [DRY RUN] Would create OAuth credentials") + else: + print(f"โœ“ {user_email:40} Created OAuth credentials") + print(f" Client ID: {client_id}") + print(f" Client Secret: {api_key}") + + return True + except Exception as e: + print(f"โœ— {user_email:40} Failed: {e}") + return False + + def sync_all_users(self, dry_run: bool = False): + """Sync all Friend-Lite users to Mycelia OAuth.""" + users = self.get_all_friendlite_users() + + print(f"{'='*80}") + print(f"SYNC ALL USERS") + print(f"{'='*80}") + print(f"Found {len(users)} Friend-Lite users\n") + + if dry_run: + print("๐Ÿ” DRY RUN MODE - No changes will be made\n") + + synced_count = 0 + for user in users: + if self.sync_user(user, dry_run): + synced_count += 1 + + print(f"\n{'='*80}") + if dry_run: + print(f"DRY RUN SUMMARY: Would sync {synced_count} users") + else: + print(f"SUMMARY: Synced {synced_count} new users, {len(users) - synced_count} already synced") + print(f"{'='*80}\n") + + def check_orphaned_objects(self): + """Find Mycelia objects with userId not matching any Friend-Lite user.""" + users = self.get_all_friendlite_users() + user_ids = {str(user["_id"]) for user in users} + + objects = self.get_all_mycelia_objects() + + print(f"{'='*80}") + print(f"ORPHANED OBJECTS CHECK") + print(f"{'='*80}") + print(f"Friend-Lite users: {len(user_ids)}") + print(f"Mycelia objects: {len(objects)}\n") + + orphaned = [] + user_object_counts = {} + + for obj in objects: + obj_user_id = obj.get("userId") + if obj_user_id: + # Count objects per user + user_object_counts[obj_user_id] = user_object_counts.get(obj_user_id, 0) + 1 + + # Check if orphaned + if obj_user_id not in user_ids: + orphaned.append(obj) + + # Display object distribution + print("Object distribution by userId:") + for user_id, count in sorted(user_object_counts.items(), key=lambda x: x[1], reverse=True): + status = "โœ“" if user_id in user_ids else "โœ— ORPHANED" + print(f" {user_id}: {count:4} objects {status}") + + # Display orphaned objects + if orphaned: + print(f"\nโš ๏ธ Found {len(orphaned)} orphaned objects:") + for obj in orphaned[:10]: # Show first 10 + obj_id = obj.get("_id") + obj_name = obj.get("name", "Unnamed")[:50] + obj_user_id = obj.get("userId") + print(f" {obj_id} - {obj_name} (userId: {obj_user_id})") + + if len(orphaned) > 10: + print(f" ... and {len(orphaned) - 10} more") + else: + print("\nโœ“ No orphaned objects found!") + + print(f"{'='*80}\n") + return orphaned + + def reassign_orphaned_objects(self, target_email: str, dry_run: bool = False): + """Reassign all orphaned objects to a specific Friend-Lite user.""" + # Get target user + fl_db = self.client[self.friendlite_db] + target_user = fl_db["users"].find_one({"email": target_email}) + + if not target_user: + print(f"โœ— User with email '{target_email}' not found in Friend-Lite") + return + + target_user_id = str(target_user["_id"]) + print(f"Target user: {target_email} (ID: {target_user_id})\n") + + # Find orphaned objects + users = self.get_all_friendlite_users() + user_ids = {str(user["_id"]) for user in users} + objects = self.get_all_mycelia_objects() + + orphaned = [obj for obj in objects if obj.get("userId") and obj.get("userId") not in user_ids] + + if not orphaned: + print("โœ“ No orphaned objects to reassign") + return + + print(f"{'='*80}") + print(f"REASSIGN ORPHANED OBJECTS") + print(f"{'='*80}") + print(f"Found {len(orphaned)} orphaned objects") + + if dry_run: + print("๐Ÿ” DRY RUN MODE - No changes will be made\n") + else: + print(f"Will reassign to: {target_email}\n") + + mycelia_db = self.client[self.mycelia_db] + + for obj in orphaned: + obj_id = obj["_id"] + old_user_id = obj.get("userId") + obj_name = obj.get("name", "Unnamed")[:50] + + if dry_run: + print(f"โ†’ [DRY RUN] Would reassign: {obj_name}") + print(f" From: {old_user_id} โ†’ To: {target_user_id}") + else: + result = mycelia_db["objects"].update_one( + {"_id": obj_id}, + {"$set": {"userId": target_user_id}} + ) + if result.modified_count > 0: + print(f"โœ“ Reassigned: {obj_name}") + else: + print(f"โœ— Failed to reassign: {obj_name}") + + print(f"\n{'='*80}") + if dry_run: + print(f"DRY RUN SUMMARY: Would reassign {len(orphaned)} objects to {target_email}") + else: + print(f"SUMMARY: Reassigned {len(orphaned)} objects to {target_email}") + print(f"{'='*80}\n") + + def display_sync_status(self): + """Display current sync status.""" + users = self.get_all_friendlite_users() + + print(f"{'='*80}") + print(f"SYNC STATUS") + print(f"{'='*80}\n") + + synced_count = 0 + unsynced_count = 0 + + print(f"{'Email':<40} {'User ID':<30} {'Status'}") + print(f"{'-'*40} {'-'*30} {'-'*20}") + + for user in users: + user_id = str(user["_id"]) + user_email = user.get("email", "unknown") + + existing = self.get_mycelia_api_key_for_user(user_id) + if existing: + status = f"โœ“ Synced (Client ID: {existing['_id']})" + synced_count += 1 + else: + status = "โœ— Not synced" + unsynced_count += 1 + + print(f"{user_email:<40} {user_id:<30} {status}") + + print(f"\n{'='*80}") + print(f"Total users: {len(users)}") + print(f"Synced: {synced_count}") + print(f"Not synced: {unsynced_count}") + print(f"{'='*80}\n") + + +def main(): + parser = argparse.ArgumentParser( + description="Sync Friend-Lite users with Mycelia OAuth credentials", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=__doc__ + ) + + parser.add_argument("--dry-run", action="store_true", help="Preview changes without making them") + parser.add_argument("--sync-all", action="store_true", help="Sync all Friend-Lite users") + parser.add_argument("--email", type=str, help="Sync specific user by email") + parser.add_argument("--check-orphans", action="store_true", help="Check for orphaned Mycelia objects") + parser.add_argument("--reassign-orphans", action="store_true", help="Reassign orphaned objects to target user") + parser.add_argument("--target-email", type=str, help="Target user email for reassigning orphans") + parser.add_argument("--status", action="store_true", help="Display current sync status") + + args = parser.parse_args() + + # Get configuration from environment + mongo_url = os.getenv("MONGODB_URI") or os.getenv("MONGO_URL", "mongodb://localhost:27017") + + # Extract database name from MONGODB_URI if present + if "/" in mongo_url and mongo_url.count("/") >= 3: + friendlite_db = mongo_url.split("/")[-1].split("?")[0] or "friend-lite" + else: + friendlite_db = "friend-lite" + + mycelia_db = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia")) + + # Create sync service + sync = FriendLiteMyceliaSync(mongo_url, mycelia_db, friendlite_db) + + # Execute requested action + if args.status: + sync.display_sync_status() + elif args.sync_all: + sync.sync_all_users(dry_run=args.dry_run) + elif args.email: + fl_db = sync.client[friendlite_db] + user = fl_db["users"].find_one({"email": args.email}) + if user: + sync.sync_user(user, dry_run=args.dry_run) + else: + print(f"โœ— User with email '{args.email}' not found") + elif args.check_orphans: + sync.check_orphaned_objects() + elif args.reassign_orphans: + if not args.target_email: + print("โœ— --target-email required for --reassign-orphans") + sys.exit(1) + sync.reassign_orphaned_objects(args.target_email, dry_run=args.dry_run) + else: + parser.print_help() + + +if __name__ == "__main__": + main() diff --git a/backends/advanced/src/advanced_omi_backend/app_factory.py b/backends/advanced/src/advanced_omi_backend/app_factory.py index 8aa0c97a..65b1adbf 100644 --- a/backends/advanced/src/advanced_omi_backend/app_factory.py +++ b/backends/advanced/src/advanced_omi_backend/app_factory.py @@ -73,6 +73,13 @@ async def lifespan(app: FastAPI): application_logger.error(f"Failed to create admin user: {e}") # Don't raise here as this is not critical for startup + # Sync admin user with Mycelia OAuth (if using Mycelia memory provider) + try: + from advanced_omi_backend.services.mycelia_sync import sync_admin_on_startup + await sync_admin_on_startup() + except Exception as e: + application_logger.error(f"Failed to sync admin with Mycelia OAuth: {e}") + # Don't raise here as this is not critical for startup # Initialize Redis connection for RQ try: diff --git a/backends/advanced/src/advanced_omi_backend/auth.py b/backends/advanced/src/advanced_omi_backend/auth.py index a39637f1..8b489988 100644 --- a/backends/advanced/src/advanced_omi_backend/auth.py +++ b/backends/advanced/src/advanced_omi_backend/auth.py @@ -98,6 +98,41 @@ def get_jwt_strategy() -> JWTStrategy: ) # 24 hours for device compatibility +def generate_jwt_for_user(user_id: str, user_email: str) -> str: + """Generate a JWT token for a user to authenticate with external services. + + This function creates a JWT token that can be used to authenticate with + services that share the same AUTH_SECRET_KEY, such as Mycelia. + + Args: + user_id: User's unique identifier (MongoDB ObjectId as string) + user_email: User's email address + + Returns: + JWT token string valid for 24 hours + + Example: + >>> token = generate_jwt_for_user("507f1f77bcf86cd799439011", "user@example.com") + >>> # Use token to call Mycelia API + """ + from datetime import datetime, timedelta + import jwt + + # Create JWT payload matching Friend-Lite's standard format + payload = { + "sub": user_id, # Subject = user ID + "email": user_email, + "iss": "friend-lite", # Issuer + "aud": "friend-lite", # Audience + "exp": datetime.utcnow() + timedelta(hours=24), # 24 hour expiration + "iat": datetime.utcnow(), # Issued at + } + + # Sign the token with the same secret key + token = jwt.encode(payload, SECRET_KEY, algorithm="HS256") + return token + + # Authentication backends cookie_backend = AuthenticationBackend( name="cookie", diff --git a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py index f6ca8387..d917ec18 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py @@ -27,13 +27,16 @@ async def get_memories(user: User, limit: int, user_id: Optional[str] = None): # Execute memory retrieval directly (now async) memories = await memory_service.get_all_memories(target_user_id, limit) - + # Get total count (service returns None on failure) total_count = await memory_service.count_memories(target_user_id) + # Convert MemoryEntry objects to dicts for JSON serialization + memories_dicts = [mem.to_dict() if hasattr(mem, 'to_dict') else mem for mem in memories] + return { - "memories": memories, - "count": len(memories), + "memories": memories_dicts, + "count": len(memories), "total_count": total_count, "user_id": target_user_id } @@ -87,9 +90,12 @@ async def search_memories(query: str, user: User, limit: int, score_threshold: f # Execute search directly (now async) search_results = await memory_service.search_memories(query, target_user_id, limit, score_threshold) + # Convert MemoryEntry objects to dicts for JSON serialization + results_dicts = [result.to_dict() if hasattr(result, 'to_dict') else result for result in search_results] + return { "query": query, - "results": search_results, + "results": results_dicts, "count": len(search_results), "user_id": target_user_id, } @@ -157,6 +163,46 @@ async def get_memories_unfiltered(user: User, limit: int, user_id: Optional[str] ) +async def add_memory(content: str, user: User, source_id: Optional[str] = None): + """Add a memory directly from content text. Extracts structured memories from the provided content.""" + try: + memory_service = get_memory_service() + + # Use source_id or generate a unique one + memory_source_id = source_id or f"manual_{user.user_id}_{int(asyncio.get_event_loop().time())}" + + # Extract memories from content + success, memory_ids = await memory_service.add_memory( + transcript=content, + client_id=f"{user.user_id[:8]}-manual", + source_id=memory_source_id, + user_id=user.user_id, + user_email=user.email, + allow_update=False, + db_helper=None + ) + + if success: + return { + "success": True, + "memory_ids": memory_ids, + "count": len(memory_ids), + "source_id": memory_source_id, + "message": f"Successfully created {len(memory_ids)} memory/memories" + } + else: + return JSONResponse( + status_code=500, + content={"success": False, "message": "Failed to create memories"} + ) + + except Exception as e: + audio_logger.error(f"Error adding memory: {e}", exc_info=True) + return JSONResponse( + status_code=500, content={"success": False, "message": f"Error adding memory: {str(e)}"} + ) + + async def get_all_memories_admin(user: User, limit: int): """Get all memories across all users for admin review. Admin only.""" try: diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py index 1634bc3d..06e0da1e 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py @@ -273,6 +273,42 @@ async def health_check(): "provider": "openmemory_mcp", "critical": False, } + elif memory_provider == "mycelia": + # Mycelia memory service check + try: + # Test Mycelia memory service connection with timeout + test_success = await asyncio.wait_for(memory_service.test_connection(), timeout=8.0) + if test_success: + health_status["services"]["memory_service"] = { + "status": "โœ… Mycelia Memory Connected", + "healthy": True, + "provider": "mycelia", + "critical": False, + } + else: + health_status["services"]["memory_service"] = { + "status": "โš ๏ธ Mycelia Memory Test Failed", + "healthy": False, + "provider": "mycelia", + "critical": False, + } + overall_healthy = False + except asyncio.TimeoutError: + health_status["services"]["memory_service"] = { + "status": "โš ๏ธ Mycelia Memory Timeout (8s) - Check Mycelia service", + "healthy": False, + "provider": "mycelia", + "critical": False, + } + overall_healthy = False + except Exception as e: + health_status["services"]["memory_service"] = { + "status": f"โš ๏ธ Mycelia Memory Failed: {str(e)}", + "healthy": False, + "provider": "mycelia", + "critical": False, + } + overall_healthy = False else: health_status["services"]["memory_service"] = { "status": f"โŒ Unknown memory provider: {memory_provider}", diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py index 4d71ce6d..c9bc75e3 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py @@ -7,7 +7,8 @@ import logging from typing import Optional -from fastapi import APIRouter, Depends, Query +from fastapi import APIRouter, Depends, Query, Body +from pydantic import BaseModel from advanced_omi_backend.auth import current_active_user, current_superuser from advanced_omi_backend.controllers import memory_controller @@ -18,6 +19,12 @@ router = APIRouter(prefix="/memories", tags=["memories"]) +class AddMemoryRequest(BaseModel): + """Request model for adding a memory.""" + content: str + source_id: Optional[str] = None + + @router.get("") async def get_memories( current_user: User = Depends(current_active_user), @@ -50,6 +57,15 @@ async def search_memories( return await memory_controller.search_memories(query, current_user, limit, score_threshold, user_id) +@router.post("") +async def add_memory( + request: AddMemoryRequest, + current_user: User = Depends(current_active_user) +): + """Add a memory directly from content text. The service will extract structured memories from the provided content.""" + return await memory_controller.add_memory(request.content, current_user, request.source_id) + + @router.delete("/{memory_id}") async def delete_memory(memory_id: str, current_user: User = Depends(current_active_user)): """Delete a memory by ID. Users can only delete their own memories, admins can delete any.""" diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/base.py b/backends/advanced/src/advanced_omi_backend/services/memory/base.py index 65d39d75..f205ecdb 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/base.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/base.py @@ -49,6 +49,19 @@ def __post_init__(self): if self.created_at is None: self.created_at = str(int(time.time())) + def to_dict(self) -> Dict[str, Any]: + """Convert MemoryEntry to dictionary for JSON serialization.""" + return { + "id": self.id, + "memory": self.content, # Frontend expects 'memory' key + "content": self.content, # Also provide 'content' for consistency + "metadata": self.metadata, + "embedding": self.embedding, + "score": self.score, + "created_at": self.created_at, + "user_id": self.metadata.get("user_id") # Extract user_id from metadata + } + class MemoryServiceBase(ABC): """Abstract base class defining the core memory service interface. diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py index ae03fcd8..9d5c8324 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -159,14 +159,29 @@ def build_memory_config_from_env() -> MemoryConfig: user_id=os.getenv("OPENMEMORY_USER_ID", "default"), timeout=int(os.getenv("OPENMEMORY_TIMEOUT", "30")) ) - + memory_logger.info(f"๐Ÿ”ง Memory config: Provider=OpenMemory MCP, URL={openmemory_config['server_url']}") - + return MemoryConfig( memory_provider=memory_provider_enum, openmemory_config=openmemory_config, timeout_seconds=int(os.getenv("OPENMEMORY_TIMEOUT", "30")) ) + + # For Mycelia provider, configuration is simple - just URL + if memory_provider_enum == MemoryProvider.MYCELIA: + mycelia_config = create_mycelia_config( + api_url=os.getenv("MYCELIA_URL", "http://localhost:5173"), + timeout=int(os.getenv("MYCELIA_TIMEOUT", "30")) + ) + + memory_logger.info(f"๐Ÿ”ง Memory config: Provider=Mycelia, URL={mycelia_config['api_url']}") + + return MemoryConfig( + memory_provider=memory_provider_enum, + mycelia_config=mycelia_config, + timeout_seconds=int(os.getenv("MYCELIA_TIMEOUT", "30")) + ) # For Friend-Lite provider, use existing complex configuration # Import config loader diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py index ccf30160..3033c307 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py @@ -5,7 +5,9 @@ """ import logging -from typing import Any, List, Optional, Tuple +from datetime import datetime +from typing import Any, Dict, List, Optional, Tuple +import httpx from ..base import MemoryEntry, MemoryServiceBase @@ -16,11 +18,10 @@ class MyceliaMemoryService(MemoryServiceBase): """Memory service implementation using Mycelia backend. This class implements the MemoryServiceBase interface by delegating memory - operations to a Mycelia server. + operations to a Mycelia server using JWT authentication from Friend-Lite. Args: api_url: Mycelia API endpoint URL - api_key: Optional API key for authentication timeout: Request timeout in seconds **kwargs: Additional configuration parameters """ @@ -28,7 +29,6 @@ class MyceliaMemoryService(MemoryServiceBase): def __init__( self, api_url: str = "http://localhost:8080", - api_key: Optional[str] = None, timeout: int = 30, **kwargs ): @@ -36,27 +36,34 @@ def __init__( Args: api_url: Mycelia API endpoint - api_key: Optional API key for authentication timeout: Request timeout in seconds **kwargs: Additional configuration parameters """ - self.api_url = api_url - self.api_key = api_key + self.api_url = api_url.rstrip("/") self.timeout = timeout self.config = kwargs self._initialized = False + self._client: Optional[httpx.AsyncClient] = None memory_logger.info(f"๐Ÿ„ Initializing Mycelia memory service at {api_url}") async def initialize(self) -> None: """Initialize Mycelia client and verify connection.""" try: - # TODO: Initialize your Mycelia client here - # Example: self.client = MyceliaClient(self.api_url, self.api_key) - - # Test connection - if not await self.test_connection(): - raise RuntimeError("Failed to connect to Mycelia service") + # Initialize HTTP client + self._client = httpx.AsyncClient( + base_url=self.api_url, + timeout=self.timeout, + headers={"Content-Type": "application/json"} + ) + + # Test connection directly (without calling test_connection to avoid recursion) + try: + response = await self._client.get("/health") + if response.status_code != 200: + raise RuntimeError(f"Health check failed with status {response.status_code}") + except httpx.HTTPError as e: + raise RuntimeError(f"Failed to connect to Mycelia service: {e}") self._initialized = True memory_logger.info("โœ… Mycelia memory service initialized successfully") @@ -65,6 +72,109 @@ async def initialize(self) -> None: memory_logger.error(f"โŒ Failed to initialize Mycelia service: {e}") raise RuntimeError(f"Mycelia initialization failed: {e}") + async def _get_user_jwt(self, user_id: str, user_email: Optional[str] = None) -> str: + """Get JWT token for a user (with optional user lookup). + + Args: + user_id: User ID + user_email: Optional user email (will lookup if not provided) + + Returns: + JWT token string + + Raises: + ValueError: If user not found + """ + from advanced_omi_backend.auth import generate_jwt_for_user + + # If email not provided, lookup user + if not user_email: + from advanced_omi_backend.users import User + user = await User.get(user_id) + if not user: + raise ValueError(f"User {user_id} not found") + user_email = user.email + + return generate_jwt_for_user(user_id, user_email) + + @staticmethod + def _extract_bson_id(raw_id: Any) -> str: + """Extract ID from Mycelia BSON format {"$oid": "..."} or plain string.""" + if isinstance(raw_id, dict) and "$oid" in raw_id: + return raw_id["$oid"] + return str(raw_id) + + @staticmethod + def _extract_bson_date(date_obj: Any) -> Any: + """Extract date from Mycelia BSON format {"$date": "..."} or plain value.""" + if isinstance(date_obj, dict) and "$date" in date_obj: + return date_obj["$date"] + return date_obj + + def _mycelia_object_to_memory_entry(self, obj: Dict, user_id: str) -> MemoryEntry: + """Convert Mycelia object to MemoryEntry. + + Args: + obj: Mycelia object from API + user_id: User ID for metadata + + Returns: + MemoryEntry object + """ + memory_id = self._extract_bson_id(obj.get("_id", "")) + memory_content = obj.get("details", "") + + return MemoryEntry( + id=memory_id, + content=memory_content, + metadata={ + "user_id": user_id, + "name": obj.get("name", ""), + "aliases": obj.get("aliases", []), + "created_at": self._extract_bson_date(obj.get("createdAt")), + "updated_at": self._extract_bson_date(obj.get("updatedAt")), + }, + created_at=self._extract_bson_date(obj.get("createdAt")) + ) + + async def _call_resource( + self, + action: str, + jwt_token: str, + **params + ) -> Dict[str, Any]: + """Call Mycelia objects resource with JWT authentication. + + Args: + action: Action to perform (create, list, get, delete, etc.) + jwt_token: User's JWT token from Friend-Lite + **params: Additional parameters for the action + + Returns: + Response data from Mycelia + + Raises: + RuntimeError: If API call fails + """ + if not self._client: + raise RuntimeError("Mycelia client not initialized") + + try: + response = await self._client.post( + "/api/resource/tech.mycelia.objects", + json={"action": action, **params}, + headers={"Authorization": f"Bearer {jwt_token}"} + ) + response.raise_for_status() + return response.json() + + except httpx.HTTPStatusError as e: + memory_logger.error(f"Mycelia API error: {e.response.status_code} - {e.response.text}") + raise RuntimeError(f"Mycelia API error: {e.response.status_code}") + except Exception as e: + memory_logger.error(f"Failed to call Mycelia resource: {e}") + raise RuntimeError(f"Mycelia API call failed: {e}") + async def add_memory( self, transcript: str, @@ -90,21 +200,37 @@ async def add_memory( Tuple of (success: bool, created_memory_ids: List[str]) """ try: - # TODO: Implement your Mycelia API call to add memories - # Example implementation: - # response = await self.client.add_memories( - # transcript=transcript, - # user_id=user_id, - # metadata={ - # "client_id": client_id, - # "source_id": source_id, - # "user_email": user_email, - # } - # ) - # return (True, response.memory_ids) - - memory_logger.warning("Mycelia add_memory not yet implemented") - return (False, []) + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id, user_email) + + # Create a Mycelia object for this memory + # Memory content is stored in the 'details' field + memory_preview = transcript[:50] + ("..." if len(transcript) > 50 else "") + + object_data = { + "name": f"Memory: {memory_preview}", + "details": transcript, + "aliases": [source_id, client_id], # Searchable by source or client + "isPerson": False, + "isPromise": False, + "isEvent": False, + "isRelationship": False, + # Note: userId is auto-injected by Mycelia from JWT + } + + result = await self._call_resource( + action="create", + jwt_token=jwt_token, + object=object_data + ) + + memory_id = result.get("insertedId") + if memory_id: + memory_logger.info(f"โœ… Created Mycelia memory object: {memory_id}") + return (True, [memory_id]) + else: + memory_logger.error("Failed to create Mycelia memory: no insertedId returned") + return (False, []) except Exception as e: memory_logger.error(f"Failed to add memory via Mycelia: {e}") @@ -124,28 +250,39 @@ async def search_memories( Returns: List of matching MemoryEntry objects ordered by relevance """ + if not self._initialized: + await self.initialize() + try: - # TODO: Implement Mycelia search - # Example implementation: - # results = await self.client.search( - # query=query, - # user_id=user_id, - # limit=limit, - # threshold=score_threshold - # ) - # return [ - # MemoryEntry( - # id=r.id, - # memory=r.text, - # user_id=user_id, - # metadata=r.metadata, - # score=r.score - # ) - # for r in results - # ] - - memory_logger.warning("Mycelia search_memories not yet implemented") - return [] + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id) + + # Search using Mycelia's list action with searchTerm option + result = await self._call_resource( + action="list", + jwt_token=jwt_token, + filters={}, # Auto-scoped by userId in Mycelia + options={ + "searchTerm": query, + "limit": limit, + "sort": {"updatedAt": -1} # Most recent first + } + ) + + # Convert Mycelia objects to MemoryEntry objects + memories = [] + for i, obj in enumerate(result): + # Calculate a simple relevance score (0-1) based on position + # (Mycelia doesn't provide semantic similarity scores yet) + score = 1.0 - (i * 0.1) # Decaying score + if score < score_threshold: + continue + + entry = self._mycelia_object_to_memory_entry(obj, user_id) + entry.score = score # Override score + memories.append(entry) + + return memories except Exception as e: memory_logger.error(f"Failed to search memories via Mycelia: {e}") @@ -163,22 +300,27 @@ async def get_all_memories( Returns: List of MemoryEntry objects for the user """ + if not self._initialized: + await self.initialize() + try: - # TODO: Implement Mycelia get all - # Example implementation: - # results = await self.client.get_all(user_id=user_id, limit=limit) - # return [ - # MemoryEntry( - # id=r.id, - # memory=r.text, - # user_id=user_id, - # metadata=r.metadata - # ) - # for r in results - # ] - - memory_logger.warning("Mycelia get_all_memories not yet implemented") - return [] + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id) + + # List all objects for this user (auto-scoped by Mycelia) + result = await self._call_resource( + action="list", + jwt_token=jwt_token, + filters={}, # Auto-scoped by userId + options={ + "limit": limit, + "sort": {"updatedAt": -1} # Most recent first + } + ) + + # Convert Mycelia objects to MemoryEntry objects + memories = [self._mycelia_object_to_memory_entry(obj, user_id) for obj in result] + return memories except Exception as e: memory_logger.error(f"Failed to get memories via Mycelia: {e}") @@ -193,34 +335,67 @@ async def count_memories(self, user_id: str) -> Optional[int]: Returns: Total count of memories for the user, or None if not supported """ - try: - # TODO: Implement if Mycelia supports efficient counting - # Example: - # return await self.client.count(user_id=user_id) + if not self._initialized: + await self.initialize() - return None # Not implemented yet + try: + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id) + + # Use Mycelia's mongo resource to count objects for this user + if not self._client: + raise RuntimeError("Mycelia client not initialized") + + response = await self._client.post( + "/api/resource/tech.mycelia.mongo", + json={ + "action": "count", + "collection": "objects", + "query": {"userId": user_id} + }, + headers={"Authorization": f"Bearer {jwt_token}"} + ) + response.raise_for_status() + return response.json() except Exception as e: memory_logger.error(f"Failed to count memories via Mycelia: {e}") return None - async def delete_memory(self, memory_id: str) -> bool: + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory from Mycelia. Args: memory_id: Unique identifier of the memory to delete + user_id: Optional user identifier for authentication + user_email: Optional user email for authentication Returns: True if successfully deleted, False otherwise """ try: - # TODO: Implement Mycelia delete - # Example: - # success = await self.client.delete(memory_id=memory_id) - # return success - - memory_logger.warning("Mycelia delete_memory not yet implemented") - return False + # Need user credentials for JWT - if not provided, we can't delete + if not user_id: + memory_logger.error("User ID required for Mycelia delete operation") + return False + + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id, user_email) + + # Delete the object (auto-scoped by userId in Mycelia) + result = await self._call_resource( + action="delete", + jwt_token=jwt_token, + id=memory_id + ) + + deleted_count = result.get("deletedCount", 0) + if deleted_count > 0: + memory_logger.info(f"โœ… Deleted Mycelia memory object: {memory_id}") + return True + else: + memory_logger.warning(f"No memory deleted with ID: {memory_id}") + return False except Exception as e: memory_logger.error(f"Failed to delete memory via Mycelia: {e}") @@ -236,13 +411,26 @@ async def delete_all_user_memories(self, user_id: str) -> int: Number of memories that were deleted """ try: - # TODO: Implement Mycelia bulk delete - # Example: - # count = await self.client.delete_all(user_id=user_id) - # return count - - memory_logger.warning("Mycelia delete_all_user_memories not yet implemented") - return 0 + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id) + + # First, get all memory IDs for this user + result = await self._call_resource( + action="list", + jwt_token=jwt_token, + filters={}, # Auto-scoped by userId + options={"limit": 10000} # Large limit to get all + ) + + # Delete each memory individually + deleted_count = 0 + for obj in result: + memory_id = self._extract_bson_id(obj.get("_id", "")) + if await self.delete_memory(memory_id, user_id): + deleted_count += 1 + + memory_logger.info(f"โœ… Deleted {deleted_count} Mycelia memories for user {user_id}") + return deleted_count except Exception as e: memory_logger.error(f"Failed to delete user memories via Mycelia: {e}") @@ -255,13 +443,15 @@ async def test_connection(self) -> bool: True if connection is healthy, False otherwise """ try: - # TODO: Implement health check - # Example: - # return await self.client.health_check() + if not self._initialized: + await self.initialize() + + if not self._client: + return False - # For now, just check if URL is set - memory_logger.warning("Mycelia test_connection not fully implemented (stub)") - return self.api_url is not None + # Test connection by hitting a lightweight endpoint + response = await self._client.get("/health") + return response.status_code == 200 except Exception as e: memory_logger.error(f"Mycelia connection test failed: {e}") @@ -270,8 +460,8 @@ async def test_connection(self) -> bool: def shutdown(self) -> None: """Shutdown Mycelia client and cleanup resources.""" memory_logger.info("Shutting down Mycelia memory service") - # TODO: Cleanup if needed - # Example: - # if hasattr(self, 'client'): - # self.client.close() + if self._client: + # Note: httpx AsyncClient should be closed in an async context + # In practice, this will be called during shutdown so we log a warning + memory_logger.warning("HTTP client should be closed with await client.aclose()") self._initialized = False diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py new file mode 100644 index 00000000..dd94bf63 --- /dev/null +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -0,0 +1,248 @@ +""" +Mycelia OAuth Synchronization Service. + +This module synchronizes Friend-Lite users with Mycelia OAuth API keys, +ensuring that when users access Mycelia directly, they use credentials +that map to their Friend-Lite user ID. +""" + +import logging +import os +import secrets +import hashlib +import base64 +from typing import Optional, Tuple +from pymongo import MongoClient +from datetime import datetime + +logger = logging.getLogger(__name__) + + +class MyceliaSyncService: + """Synchronize Friend-Lite users with Mycelia OAuth API keys.""" + + def __init__(self): + """Initialize the sync service.""" + # MongoDB configuration + # MONGODB_URI format: mongodb://host:port/database_name + self.mongo_url = os.getenv("MONGODB_URI", os.getenv("MONGO_URL", "mongodb://localhost:27017")) + + # Determine Mycelia database from environment + # Test environment uses mycelia_test, production uses mycelia + self.mycelia_db = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia")) + + # Friend-Lite database - extract from MONGODB_URI or use default + # Test env: test_db, Production: friend-lite + if "/" in self.mongo_url and self.mongo_url.count("/") >= 3: + # Extract database name from mongodb://host:port/database + self.friendlite_db = self.mongo_url.split("/")[-1].split("?")[0] or "friend-lite" + else: + self.friendlite_db = "friend-lite" + + logger.info(f"MyceliaSyncService initialized: {self.mongo_url}, Mycelia DB: {self.mycelia_db}, Friend-Lite DB: {self.friendlite_db}") + + def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: + """Hash API key with salt (matches Mycelia's implementation).""" + h = hashlib.sha256() + h.update(salt) + h.update(api_key.encode('utf-8')) + return base64.b64encode(h.digest()).decode('utf-8') + + def _create_mycelia_api_key( + self, + user_id: str, + user_email: str + ) -> Tuple[str, str]: + """ + Create a Mycelia API key for a Friend-Lite user. + + Args: + user_id: Friend-Lite user ID (MongoDB ObjectId as string) + user_email: User email address + + Returns: + Tuple of (client_id, api_key) + """ + # Generate API key in Mycelia format + random_part = secrets.token_urlsafe(32) + api_key = f"mycelia_{random_part}" + + # Generate salt + salt = secrets.token_bytes(32) + + # Hash the API key + hashed_key = self._hash_api_key_with_salt(api_key, salt) + + # Open prefix for fast lookup + open_prefix = api_key[:16] + + # Connect to Mycelia database + client = MongoClient(self.mongo_url) + db = client[self.mycelia_db] + api_keys_collection = db["api_keys"] + + # Check if user already has an active API key + existing = api_keys_collection.find_one({ + "owner": user_id, + "isActive": True, + "name": f"Friend-Lite Auto ({user_email})" + }) + + if existing: + logger.info(f"User {user_email} already has Mycelia API key: {existing['_id']}") + # Return existing credentials (we can't retrieve the original API key) + # User will need to use the stored credentials + return str(existing["_id"]), None + + # Create new API key document + api_key_doc = { + "hashedKey": hashed_key, + "salt": base64.b64encode(salt).decode('utf-8'), + "owner": user_id, # CRITICAL: owner = Friend-Lite user ID + "name": f"Friend-Lite Auto ({user_email})", + "policies": [ + { + "resource": "**", + "action": "*", + "effect": "allow" + } + ], + "openPrefix": open_prefix, + "createdAt": datetime.utcnow(), + "isActive": True, + } + + # Insert into Mycelia database + result = api_keys_collection.insert_one(api_key_doc) + client_id = str(result.inserted_id) + + logger.info(f"โœ… Created Mycelia API key for {user_email}: {client_id}") + + return client_id, api_key + + def sync_user_to_mycelia( + self, + user_id: str, + user_email: str + ) -> Optional[Tuple[str, str]]: + """ + Sync a Friend-Lite user to Mycelia OAuth. + + Args: + user_id: Friend-Lite user ID + user_email: User email + + Returns: + Tuple of (client_id, api_key) or None if sync fails + """ + try: + # Create Mycelia API key + client_id, api_key = self._create_mycelia_api_key(user_id, user_email) + + # Store credentials in Friend-Lite user document (if new key was created) + if api_key: + client = MongoClient(self.mongo_url) + db = client[self.friendlite_db] + users_collection = db["users"] + + from bson import ObjectId + users_collection.update_one( + {"_id": ObjectId(user_id)}, + { + "$set": { + "mycelia_oauth": { + "client_id": client_id, + "created_at": datetime.utcnow(), + "synced": True + } + } + } + ) + + logger.info(f"โœ… Synced {user_email} with Mycelia OAuth") + return client_id, api_key + else: + logger.info(f"โ„น๏ธ {user_email} already synced with Mycelia") + return client_id, None + + except Exception as e: + logger.error(f"Failed to sync {user_email} to Mycelia: {e}", exc_info=True) + return None + + def sync_admin_user(self) -> Optional[Tuple[str, str]]: + """ + Sync the admin user on startup. + + Returns: + Tuple of (client_id, api_key) if new key created, or None + """ + try: + admin_email = os.getenv("ADMIN_EMAIL") + if not admin_email: + logger.warning("ADMIN_EMAIL not set, skipping Mycelia sync") + return None + + # Get admin user from Friend-Lite database + client = MongoClient(self.mongo_url) + db = client[self.friendlite_db] + users_collection = db["users"] + + admin_user = users_collection.find_one({"email": admin_email}) + if not admin_user: + logger.warning(f"Admin user {admin_email} not found in database") + return None + + user_id = str(admin_user["_id"]) + + # Sync to Mycelia + result = self.sync_user_to_mycelia(user_id, admin_email) + + if result: + client_id, api_key = result + if api_key: + logger.info("="*70) + logger.info("๐Ÿ”‘ MYCELIA OAUTH CREDENTIALS (Save these!)") + logger.info("="*70) + logger.info(f"User: {admin_email}") + logger.info(f"Client ID: {client_id}") + logger.info(f"Client Secret: {api_key}") + logger.info("="*70) + logger.info("Configure Mycelia frontend at http://localhost:3002/settings") + logger.info("="*70) + + return result + + except Exception as e: + logger.error(f"Failed to sync admin user: {e}", exc_info=True) + return None + + +# Global instance +_sync_service: Optional[MyceliaSyncService] = None + + +def get_mycelia_sync_service() -> MyceliaSyncService: + """Get or create the global Mycelia sync service instance.""" + global _sync_service + if _sync_service is None: + _sync_service = MyceliaSyncService() + return _sync_service + + +async def sync_admin_on_startup(): + """Run admin user sync on application startup.""" + logger.info("๐Ÿ”„ Starting Mycelia OAuth synchronization...") + + # Check if Mycelia sync is enabled + memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite") + if memory_provider != "mycelia": + logger.info("Mycelia sync skipped (MEMORY_PROVIDER != mycelia)") + return + + sync_service = get_mycelia_sync_service() + result = sync_service.sync_admin_user() + + if result: + logger.info("โœ… Mycelia OAuth sync completed") + else: + logger.warning("โš ๏ธ Mycelia OAuth sync completed with warnings") diff --git a/backends/advanced/webui/src/App.tsx b/backends/advanced/webui/src/App.tsx index 39605087..6e497dff 100644 --- a/backends/advanced/webui/src/App.tsx +++ b/backends/advanced/webui/src/App.tsx @@ -5,7 +5,7 @@ import Layout from './components/layout/Layout' import LoginPage from './pages/LoginPage' import Chat from './pages/Chat' import Conversations from './pages/Conversations' -import Memories from './pages/Memories' +import MemoriesRouter from './pages/MemoriesRouter' import Users from './pages/Users' import System from './pages/System' import Upload from './pages/Upload' @@ -51,7 +51,7 @@ function App() { } /> - + } /> { + // Store JWT in localStorage for potential direct Mycelia access + if (token) { + localStorage.setItem('mycelia_jwt_token', token) + } + }, [token]) + + // Always show the native Memories page (works for all providers) + // Friend-Lite backend will proxy to Mycelia when needed + return +} From 37912789cd8dfefdc38a0c318bf36612e29ad5d0 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Mon, 1 Dec 2025 14:26:41 +0000 Subject: [PATCH 03/31] Fixed zombie jobs where a worker could get stuck --- .../advanced_omi_backend/utils/job_utils.py | 44 +++++++++++++++++++ .../workers/audio_jobs.py | 11 +++++ .../workers/conversation_jobs.py | 5 +++ .../workers/transcription_jobs.py | 5 +++ 4 files changed, 65 insertions(+) create mode 100644 backends/advanced/src/advanced_omi_backend/utils/job_utils.py diff --git a/backends/advanced/src/advanced_omi_backend/utils/job_utils.py b/backends/advanced/src/advanced_omi_backend/utils/job_utils.py new file mode 100644 index 00000000..6200af82 --- /dev/null +++ b/backends/advanced/src/advanced_omi_backend/utils/job_utils.py @@ -0,0 +1,44 @@ +""" +Job utility functions for RQ workers. + +This module provides common utilities for long-running RQ jobs. +""" + +import logging +from typing import Optional + +logger = logging.getLogger(__name__) + + +async def check_job_alive(redis_client, current_job) -> bool: + """ + Check if current RQ job still exists in Redis. + + Long-running jobs should call this periodically to detect zombie state + (when the job has been deleted from Redis but the worker is still running). + + Args: + redis_client: Async Redis client + current_job: RQ job instance from get_current_job() + + Returns: + False if job is zombie (caller should exit), True otherwise + + Example: + from rq import get_current_job + from advanced_omi_backend.utils.job_utils import check_job_alive + + current_job = get_current_job() + + while True: + # Check for zombie state each iteration + if not await check_job_alive(redis_client, current_job): + break + # ... do work ... + """ + if current_job: + job_exists = await redis_client.exists(f"rq:job:{current_job.id}") + if not job_exists: + logger.error(f"๐ŸงŸ Zombie job detected - job {current_job.id} deleted from Redis, exiting") + return False + return True diff --git a/backends/advanced/src/advanced_omi_backend/workers/audio_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/audio_jobs.py index 7fc3f323..56df7149 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/audio_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/audio_jobs.py @@ -260,7 +260,18 @@ async def audio_streaming_persistence_job( max_empty_reads = 3 # Exit after 3 consecutive empty reads (deterministic check) conversation_count = 0 + # Get current job for zombie detection + from rq import get_current_job + from advanced_omi_backend.utils.job_utils import check_job_alive + current_job = get_current_job() + while True: + # Check if job still exists in Redis (detect zombie state) + if not await check_job_alive(redis_client, current_job): + if file_sink: + await file_sink.close() + break + # Check timeout if time.time() - start_time > max_runtime: logger.warning(f"โฑ๏ธ Timeout reached for audio persistence {session_id}") diff --git a/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py index 8bc6a205..1d4bd985 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py @@ -281,6 +281,11 @@ async def open_conversation_job( logger.info("๐Ÿงช Test mode: Waiting for audio queue to drain before timeout") while True: + # Check if job still exists in Redis (detect zombie state) + from advanced_omi_backend.utils.job_utils import check_job_alive + if not await check_job_alive(redis_client, current_job): + break + # Check if session is finalizing (set by producer when recording stops) if not finalize_received: status = await redis_client.hget(session_key, "status") diff --git a/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py index 2fc4c5ab..4e340319 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py @@ -499,6 +499,11 @@ async def stream_speech_detection_job( # Main loop: Listen for speech while True: + # Check if job still exists in Redis (detect zombie state) + from advanced_omi_backend.utils.job_utils import check_job_alive + if not await check_job_alive(redis_client, current_job): + break + # Exit conditions session_status = await redis_client.hget(session_key, "status") if session_status and session_status.decode() in ["complete", "closed"]: From eb4df567768f5c6c471dadf8b9520479290fd4c6 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Mon, 1 Dec 2025 16:14:22 +0000 Subject: [PATCH 04/31] fixed delete for mycelia with needing user_id --- .../advanced_omi_backend/controllers/memory_controller.py | 8 +++++--- .../src/advanced_omi_backend/services/memory/base.py | 8 +++++--- .../services/memory/providers/__init__.py | 3 --- .../services/memory/providers/friend_lite.py | 2 +- .../services/memory/providers/mcp_client.py | 4 ++-- .../services/memory/providers/openmemory_mcp.py | 2 +- .../services/memory/providers/vector_stores.py | 4 ++-- extras/mycelia | 2 +- 8 files changed, 17 insertions(+), 16 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py index d917ec18..220ba815 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py @@ -117,12 +117,14 @@ async def delete_memory(memory_id: str, user: User): # Check if memory belongs to current user user_memories = await memory_service.get_all_memories(user.user_id, 1000) - memory_ids = [str(mem.get("id", mem.get("memory_id", ""))) for mem in user_memories] + # MemoryEntry is a dataclass, access id attribute directly + memory_ids = [str(mem.id) for mem in user_memories] if memory_id not in memory_ids: return JSONResponse(status_code=404, content={"message": "Memory not found"}) - # Delete the memory - success = await memory_service.delete_memory(memory_id) + # Delete the memory (pass user_id and user_email for Mycelia authentication) + audio_logger.info(f"Deleting memory {memory_id} for user_id={user.user_id}, email={user.email}") + success = await memory_service.delete_memory(memory_id, user_id=user.user_id, user_email=user.email) if success: return JSONResponse(content={"message": f"Memory {memory_id} deleted successfully"}) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/base.py b/backends/advanced/src/advanced_omi_backend/services/memory/base.py index f205ecdb..f557c9af 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/base.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/base.py @@ -163,12 +163,14 @@ async def count_memories(self, user_id: str) -> Optional[int]: return None @abstractmethod - async def delete_memory(self, memory_id: str) -> bool: + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. - + Args: memory_id: Unique identifier of the memory to delete - + user_id: Optional user ID for authentication (required for Mycelia provider) + user_email: Optional user email for authentication (required for Mycelia provider) + Returns: True if successfully deleted, False otherwise """ diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py index 591fbc2b..43d438cf 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py @@ -7,7 +7,6 @@ - llm_providers: LLM provider implementations (OpenAI, Ollama) - vector_stores: Vector store implementations (Qdrant) - mcp_client: MCP client utilities -- compat_service: Backward compatibility wrapper """ from .friend_lite import MemoryService as FriendLiteMemoryService @@ -16,7 +15,6 @@ from .llm_providers import OpenAIProvider from .vector_stores import QdrantVectorStore from .mcp_client import MCPClient, MCPError -from .compat_service import MemoryService __all__ = [ "FriendLiteMemoryService", @@ -26,5 +24,4 @@ "QdrantVectorStore", "MCPClient", "MCPError", - "MemoryService", ] diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py index be91a5f5..b3909a65 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py @@ -291,7 +291,7 @@ async def count_memories(self, user_id: str) -> Optional[int]: memory_logger.error(f"Count memories failed: {e}") return None - async def delete_memory(self, memory_id: str) -> bool: + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. Args: diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py index 7942a17a..a1b9876f 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py @@ -6,7 +6,7 @@ import logging import uuid -from typing import List, Dict, Any +from typing import List, Dict, Any, Optional import httpx memory_logger = logging.getLogger("memory_service") @@ -339,7 +339,7 @@ async def delete_all_memories(self) -> int: memory_logger.error(f"Error deleting all memories: {e}") return 0 - async def delete_memory(self, memory_id: str) -> bool: + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. Args: diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py index d5f8acd9..04b8fd67 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py @@ -283,7 +283,7 @@ async def get_all_memories( # Restore original user_id self.mcp_client.user_id = original_user_id - async def delete_memory(self, memory_id: str) -> bool: + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. Args: diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/vector_stores.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/vector_stores.py index a3d04100..cf153472 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/vector_stores.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/vector_stores.py @@ -9,7 +9,7 @@ import logging import time import uuid -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from qdrant_client import AsyncQdrantClient from qdrant_client.models import ( @@ -240,7 +240,7 @@ async def get_memories(self, user_id: str, limit: int) -> List[MemoryEntry]: memory_logger.error(f"Qdrant get memories failed: {e}") return [] - async def delete_memory(self, memory_id: str) -> bool: + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory from Qdrant.""" try: # Convert memory_id to proper format for Qdrant diff --git a/extras/mycelia b/extras/mycelia index ca7b177b..6c27e2cc 160000 --- a/extras/mycelia +++ b/extras/mycelia @@ -1 +1 @@ -Subproject commit ca7b177b1e9228b63399da557a1ddbf696cf6762 +Subproject commit 6c27e2ccafd6d22933d35b5399f62552097a36b3 From 224982e30bd6a3765ae81e16dcf39540b8c41adb Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Mon, 1 Dec 2025 17:42:03 +0000 Subject: [PATCH 05/31] removed friend or would be cicular --- extras/mycelia | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extras/mycelia b/extras/mycelia index 6c27e2cc..47ea1966 160000 --- a/extras/mycelia +++ b/extras/mycelia @@ -1 +1 @@ -Subproject commit 6c27e2ccafd6d22933d35b5399f62552097a36b3 +Subproject commit 47ea1966dd8a8c10662c91c7a3f907798f6a7dbc From 8e5612ce83f8f6428e7e5145caa01b87c3ca7f41 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Mon, 1 Dec 2025 17:59:36 +0000 Subject: [PATCH 06/31] added temporal memopries --- .../services/memory/__init__.py | 139 +------------- .../services/memory/config.py | 18 +- .../services/memory/prompts.py | 171 +++++++++++++++++- .../services/memory/service_factory.py | 3 +- 4 files changed, 198 insertions(+), 133 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py index 42cba194..c2413ff2 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py @@ -1,151 +1,30 @@ """Memory service package. This package provides memory management functionality with support for -multiple LLM providers and vector stores for the Omi backend. +multiple memory providers (Friend-Lite, Mycelia, OpenMemory MCP). The memory service handles extraction, storage, and retrieval of memories from user conversations and interactions. Architecture: - base.py: Abstract base classes and interfaces -- memory_service.py: Core implementation -- compat_service.py: Backward compatibility wrapper -- providers/: LLM and vector store implementations - config.py: Configuration management +- service_factory.py: Provider selection and instantiation +- providers/friend_lite.py: Friend-Lite native provider (LLM + Qdrant) +- providers/mycelia.py: Mycelia backend provider +- providers/openmemory_mcp.py: OpenMemory MCP provider +- providers/llm_providers.py: LLM implementations (OpenAI, Ollama) +- providers/vector_stores.py: Vector store implementations (Qdrant) """ import logging memory_logger = logging.getLogger("memory_service") -# Initialize core functions to None -get_memory_service = None -MemoryService = None -shutdown_memory_service = None -test_new_memory_service = None -migrate_from_mem0 = None - -memory_logger.info("๐Ÿ†• Using NEW memory service implementation") -try: - from .providers.compat_service import ( - MemoryService, - get_memory_service, - migrate_from_mem0, - shutdown_memory_service, - ) - - # Also import core implementation for direct access - from .providers.friend_lite import MemoryService as CoreMemoryService - test_new_memory_service = None # Will be implemented if needed -except ImportError as e: - memory_logger.error(f"Failed to import new memory service: {e}") - raise - -# Also export the new architecture components for direct access when needed -try: - from .base import LLMProviderBase, MemoryEntry, MemoryServiceBase, VectorStoreBase - from .config import MemoryProvider # New memory provider enum - from .config import create_openmemory_config # New OpenMemory config function - from .config import ( - LLMProvider, - MemoryConfig, - VectorStoreProvider, - build_memory_config_from_env, - create_ollama_config, - create_openai_config, - create_qdrant_config, - ) - from .providers.openmemory_mcp import OpenMemoryMCPService # New complete memory service - from .providers.mcp_client import MCPClient, MCPError - from .providers.llm_providers import OpenAIProvider - from .providers.vector_stores import QdrantVectorStore - from .service_factory import create_memory_service - from .service_factory import get_memory_service as get_core_memory_service - from .service_factory import get_service_info as get_core_service_info - from .service_factory import reset_memory_service - from .service_factory import shutdown_memory_service as shutdown_core_memory_service - - # Keep backward compatibility alias - AbstractMemoryService = CoreMemoryService -except ImportError as e: - memory_logger.warning(f"Some advanced memory service components not available: {e}") - MemoryServiceBase = None - LLMProviderBase = None - VectorStoreBase = None - AbstractMemoryService = None - MemoryConfig = None - LLMProvider = None - VectorStoreProvider = None - MemoryProvider = None - build_memory_config_from_env = None - create_openai_config = None - create_ollama_config = None - create_qdrant_config = None - create_openmemory_config = None - MemoryEntry = None - OpenAIProvider = None - QdrantVectorStore = None - OpenMemoryMCPService = None - MCPClient = None - MCPError = None - get_core_memory_service = None - create_memory_service = None - shutdown_core_memory_service = None - reset_memory_service = None - get_core_service_info = None +# Import the main interface functions from service_factory +from .service_factory import get_memory_service, shutdown_memory_service __all__ = [ - # Main interface (compatible with legacy) "get_memory_service", - "MemoryService", "shutdown_memory_service", - - # New service specific (may be None if not available) - "test_new_memory_service", - "migrate_from_mem0", - "CoreMemoryService", - - # Base classes (new architecture) - "MemoryServiceBase", - "LLMProviderBase", - "VectorStoreBase", - - # Advanced components (may be None if not available) - "AbstractMemoryService", # Backward compatibility alias - "MemoryConfig", - "MemoryEntry", - "LLMProvider", - "VectorStoreProvider", - "MemoryProvider", # New enum - "build_memory_config_from_env", - "create_openai_config", - "create_ollama_config", - "create_qdrant_config", - "create_openmemory_config", # New function - "OpenAIProvider", - "QdrantVectorStore", - - # Complete memory service implementations - "OpenMemoryMCPService", - - # MCP client components - "MCPClient", - "MCPError", - - # Service factory functions - "get_core_memory_service", - "create_memory_service", - "shutdown_core_memory_service", - "reset_memory_service", - "get_core_service_info" ] - -def get_service_info(): - """Get information about which service is currently active.""" - return { - "active_service": "new", # Always use new service - "new_service_available": CoreMemoryService is not None, - "legacy_service_available": True, # Assume always available - "base_classes_available": MemoryServiceBase is not None, - "core_service_available": CoreMemoryService is not None - } \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py index 9d5c8324..3946deae 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -168,18 +168,34 @@ def build_memory_config_from_env() -> MemoryConfig: timeout_seconds=int(os.getenv("OPENMEMORY_TIMEOUT", "30")) ) - # For Mycelia provider, configuration is simple - just URL + # For Mycelia provider, build mycelia_config + llm_config (for temporal extraction) if memory_provider_enum == MemoryProvider.MYCELIA: mycelia_config = create_mycelia_config( api_url=os.getenv("MYCELIA_URL", "http://localhost:5173"), timeout=int(os.getenv("MYCELIA_TIMEOUT", "30")) ) + # Build LLM config for temporal extraction (Mycelia provider uses OpenAI directly) + openai_api_key = os.getenv("OPENAI_API_KEY") + if not openai_api_key: + memory_logger.warning("OPENAI_API_KEY not set - temporal extraction will be disabled") + llm_config = None + else: + model = os.getenv("OPENAI_MODEL", "gpt-4o-mini") + base_url = os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1") + llm_config = create_openai_config( + api_key=openai_api_key, + model=model, + base_url=base_url + ) + memory_logger.info(f"๐Ÿ”ง Mycelia temporal extraction: LLM={model}") + memory_logger.info(f"๐Ÿ”ง Memory config: Provider=Mycelia, URL={mycelia_config['api_url']}") return MemoryConfig( memory_provider=memory_provider_enum, mycelia_config=mycelia_config, + llm_config=llm_config, timeout_seconds=int(os.getenv("MYCELIA_TIMEOUT", "30")) ) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py b/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py index f655752e..b022e39c 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py @@ -5,10 +5,13 @@ 2. Updating memory with new facts (DEFAULT_UPDATE_MEMORY_PROMPT) 3. Answering questions from memory (MEMORY_ANSWER_PROMPT) 4. Procedural memory for task tracking (PROCEDURAL_MEMORY_SYSTEM_PROMPT) +5. Temporal and entity extraction (TEMPORAL_ENTITY_EXTRACTION_PROMPT) """ -from datetime import datetime +from datetime import datetime, timedelta import json +from typing import List, Optional +from pydantic import BaseModel, Field MEMORY_ANSWER_PROMPT = """ You are an expert at answering questions based on the provided memories. Your task is to provide accurate and concise answers to the questions by leveraging the information given in the memories. @@ -383,3 +386,169 @@ def get_update_memory_messages(retrieved_old_memory_dict, response_content, cust Do not return anything except the JSON format. """ + + +# ===== Temporal and Entity Extraction ===== + +class TimeRange(BaseModel): + """Represents a time range with start and end timestamps.""" + start: datetime = Field(description="ISO 8601 timestamp when the event/activity starts") + end: datetime = Field(description="ISO 8601 timestamp when the event/activity ends") + name: Optional[str] = Field(default=None, description="Optional name/label for this time range (e.g., 'wedding ceremony', 'party')") + + +class TemporalEntity(BaseModel): + """Structured temporal and entity information extracted from a memory fact.""" + isEvent: bool = Field(description="Whether this memory describes a scheduled event or time-bound activity") + isPerson: bool = Field(description="Whether this memory is primarily about a person or people") + isPlace: bool = Field(description="Whether this memory is primarily about a location or place") + isPromise: bool = Field(description="Whether this memory contains a commitment, promise, or agreement") + isRelationship: bool = Field(description="Whether this memory describes a relationship between people") + entities: List[str] = Field(default_factory=list, description="List of people, places, or things mentioned (e.g., ['John', 'Botanical Gardens', 'wedding'])") + timeRanges: List[TimeRange] = Field(default_factory=list, description="List of time ranges if this is a temporal memory") + emoji: Optional[str] = Field(default=None, description="Single emoji that best represents this memory") + + +def build_temporal_extraction_prompt(current_date: datetime) -> str: + """Build the temporal extraction prompt with the current date context.""" + return f"""You are an expert at extracting temporal and entity information from memory facts. + +Your task is to analyze a memory fact and extract structured information in JSON format: +1. **Entity Types**: Determine if the memory is about events, people, places, promises, or relationships +2. **Temporal Information**: Extract and resolve any time references to actual ISO 8601 timestamps +3. **Named Entities**: List all people, places, and things mentioned +4. **Representation**: Choose a single emoji that captures the essence of the memory + +You must return a valid JSON object with the following structure. + +**Current Date Context:** +- Today's date: {current_date.strftime("%Y-%m-%d")} +- Current time: {current_date.strftime("%H:%M:%S")} +- Day of week: {current_date.strftime("%A")} + +**Time Resolution Guidelines:** + +Relative Time References: +- "tomorrow" โ†’ Add 1 day to current date +- "next week" โ†’ Add 7 days to current date +- "in X days/weeks/months" โ†’ Add X time units to current date +- "yesterday" โ†’ Subtract 1 day from current date + +Time of Day: +- "4pm" or "16:00" โ†’ Use current date with that time +- "tomorrow at 4pm" โ†’ Use tomorrow's date at 16:00 +- "morning" โ†’ 09:00 on the referenced day +- "afternoon" โ†’ 14:00 on the referenced day +- "evening" โ†’ 18:00 on the referenced day +- "night" โ†’ 21:00 on the referenced day + +Duration Estimation (when only start time is mentioned): +- Events like "wedding", "meeting", "party" โ†’ Default 2 hours duration +- "lunch", "dinner", "breakfast" โ†’ Default 1 hour duration +- "class", "workshop" โ†’ Default 1.5 hours duration +- "appointment", "call" โ†’ Default 30 minutes duration + +**Entity Type Guidelines:** + +- **isEvent**: True for scheduled activities, appointments, meetings, parties, ceremonies, classes, etc. +- **isPerson**: True when the primary focus is on a person (e.g., "Met John", "Sarah is my friend") +- **isPlace**: True when the primary focus is a location (e.g., "Botanical Gardens is beautiful", "Favorite restaurant is...") +- **isPromise**: True for commitments, promises, or agreements (e.g., "I'll call you tomorrow", "We agreed to meet") +- **isRelationship**: True for statements about relationships (e.g., "John is my brother", "We're getting married") + +**Examples:** + +Input: "I'm getting married in one week! It's going to be at 4pm at the botanical gardens." +Output: +{{ + "isEvent": true, + "isPerson": false, + "isPlace": false, + "isPromise": false, + "isRelationship": true, + "entities": ["botanical gardens", "wedding"], + "timeRanges": [ + {{ + "start": "{(current_date.replace(hour=16, minute=0, second=0) + timedelta(days=7)).isoformat()}", + "end": "{(current_date.replace(hour=18, minute=0, second=0) + timedelta(days=7)).isoformat()}", + "name": "wedding ceremony" + }} + ], + "emoji": "๐Ÿ’’" +}} + +Input: "Had a meeting with John at 3pm to discuss the new project" +Output: +{{ + "isEvent": true, + "isPerson": true, + "isPlace": false, + "isPromise": false, + "isRelationship": false, + "entities": ["John", "new project", "meeting"], + "timeRanges": [ + {{ + "start": "{current_date.replace(hour=15, minute=0, second=0).isoformat()}", + "end": "{current_date.replace(hour=16, minute=0, second=0).isoformat()}", + "name": "meeting" + }} + ], + "emoji": "๐Ÿค" +}} + +Input: "My favorite restaurant is Giovanni's Italian Kitchen" +Output: +{{ + "isEvent": false, + "isPerson": false, + "isPlace": true, + "isPromise": false, + "isRelationship": false, + "entities": ["Giovanni's Italian Kitchen", "restaurant"], + "timeRanges": [], + "emoji": "๐Ÿ" +}} + +Input: "I love hiking in the mountains" +Output: +{{ + "isEvent": false, + "isPerson": false, + "isPlace": false, + "isPromise": false, + "isRelationship": false, + "entities": ["mountains", "hiking"], + "timeRanges": [], + "emoji": "๐Ÿ”๏ธ" +}} + +Input: "Tomorrow I need to call Sarah about the party at 2pm" +Output: +{{ + "isEvent": true, + "isPerson": true, + "isPlace": false, + "isPromise": true, + "isRelationship": false, + "entities": ["Sarah", "party", "call"], + "timeRanges": [ + {{ + "start": "{(current_date.replace(hour=14, minute=0, second=0) + timedelta(days=1)).isoformat()}", + "end": "{(current_date.replace(hour=14, minute=30, second=0) + timedelta(days=1)).isoformat()}", + "name": "call Sarah" + }} + ], + "emoji": "๐Ÿ“ž" +}} + +**Instructions:** +- Return structured data following the TemporalEntity schema +- Convert all temporal references to ISO 8601 format +- Be conservative: if there's no temporal information, leave timeRanges empty +- Multiple tags can be true (e.g., isEvent and isPerson both true for "meeting with John") +- Extract all meaningful entities (people, places, things) mentioned in the fact +- Choose an emoji that best represents the core meaning of the memory +""" + + +TEMPORAL_ENTITY_EXTRACTION_PROMPT = build_temporal_extraction_prompt(datetime.now()) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py index a51f4edc..37922186 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py @@ -63,7 +63,8 @@ def create_memory_service(config: MemoryConfig) -> MemoryServiceBase: if not config.mycelia_config: raise ValueError("Mycelia configuration is required for MYCELIA provider") - return MyceliaMemoryService(**config.mycelia_config) + # Pass the full config so Mycelia can access llm_config + return MyceliaMemoryService(config) else: raise ValueError(f"Unsupported memory provider: {config.memory_provider}") From 11b856e84f63eaeff0e9defbca9b7f0f77d76f6e Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Wed, 3 Dec 2025 19:57:21 +0000 Subject: [PATCH 07/31] added methods to memory service baseclass Tweaks to get all memory providers working --- .../services/memory/base.py | 70 ++- .../memory/providers/compat_service.py | 460 ------------------ .../services/memory/providers/friend_lite.py | 5 +- .../services/memory/providers/mcp_client.py | 110 ++++- .../services/memory/providers/mycelia.py | 452 +++++++++++++++-- .../memory/providers/openmemory_mcp.py | 142 ++++-- .../workers/memory_jobs.py | 4 +- 7 files changed, 690 insertions(+), 553 deletions(-) delete mode 100644 backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/base.py b/backends/advanced/src/advanced_omi_backend/services/memory/base.py index f557c9af..e88e42d4 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/base.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/base.py @@ -150,18 +150,59 @@ async def get_all_memories( async def count_memories(self, user_id: str) -> Optional[int]: """Count total number of memories for a user. - + This is an optional method that providers can implement for efficient counting. Returns None if the provider doesn't support counting. - + Args: user_id: User identifier - + Returns: Total count of memories for the user, or None if not supported """ return None - + + async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Optional[MemoryEntry]: + """Get a specific memory by ID. + + This is an optional method that providers can implement for fetching + individual memories. Returns None if the provider doesn't support it + or the memory is not found. + + Args: + memory_id: Unique identifier of the memory to retrieve + user_id: Optional user ID for authentication/filtering + + Returns: + MemoryEntry object if found, None otherwise + """ + return None + + async def update_memory( + self, + memory_id: str, + content: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + user_id: Optional[str] = None, + user_email: Optional[str] = None + ) -> bool: + """Update a specific memory's content and/or metadata. + + This is an optional method that providers can implement for updating + existing memories. Returns False if not supported or update fails. + + Args: + memory_id: Unique identifier of the memory to update + content: New content for the memory (if None, content is not updated) + metadata: New metadata to merge with existing (if None, metadata is not updated) + user_id: Optional user ID for authentication + user_email: Optional user email for authentication + + Returns: + True if update succeeded, False otherwise + """ + return False + @abstractmethod async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. @@ -205,6 +246,27 @@ def shutdown(self) -> None: """ pass + def __init__(self): + """Initialize base memory service state. + + Subclasses should call super().__init__() in their constructors. + """ + self._initialized = False + + async def _ensure_initialized(self) -> None: + """Ensure the memory service is initialized before use. + + This method provides lazy initialization - it will automatically + call initialize() the first time it's needed. This is critical + for services used in RQ workers where the service instance is + created in one process but used in another. + + This should be called at the start of any method that requires + the service to be initialized (e.g., add_memory, search_memories). + """ + if not self._initialized: + await self.initialize() + class LLMProviderBase(ABC): """Abstract base class for LLM provider implementations. diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py deleted file mode 100644 index 361f8bcd..00000000 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/compat_service.py +++ /dev/null @@ -1,460 +0,0 @@ -"""Compatibility service for backward compatibility. - -This module provides a drop-in replacement for the original mem0-based -memory service, maintaining the same interface while using the new -architecture internally. -""" - -import json -import logging -import os -from typing import Any, Dict, List, Optional, Tuple - -from ..config import build_memory_config_from_env -from .friend_lite import MemoryService as CoreMemoryService - -memory_logger = logging.getLogger("memory_service") - - -class MemoryService: - """Drop-in replacement for the original mem0-based MemoryService. - - This class provides backward compatibility by wrapping the new - CoreMemoryService with the same interface as the original service. - It handles data format conversion and maintains compatibility with - existing code. - - Attributes: - _service: Internal CoreMemoryService instance - _initialized: Whether the service has been initialized - """ - - def __init__(self): - """Initialize the compatibility memory service.""" - self._service: Optional[CoreMemoryService] = None - self._initialized = False - - async def initialize(self): - """Initialize the memory service. - - Raises: - RuntimeError: If initialization fails - """ - if self._initialized: - return - - try: - config = build_memory_config_from_env() - self._service = CoreMemoryService(config) - await self._service.initialize() - self._initialized = True - memory_logger.info("โœ… Memory service initialized successfully") - except Exception as e: - memory_logger.error(f"Failed to initialize memory service: {e}") - raise - - async def add_memory( - self, - transcript: str, - client_id: str, - source_id: str, - user_id: str, - user_email: str, - allow_update: bool = False, - db_helper=None, - ) -> Tuple[bool, List[str]]: - """Add memory from transcript - compatible with original interface. - - Args: - transcript: Raw transcript text to extract memories from - client_id: Client identifier - source_id: Unique identifier for the source (audio session, chat session, etc.) - user_id: User identifier - user_email: User email address - allow_update: Whether to allow updating existing memories - db_helper: Optional database helper for tracking relationships - - Returns: - Tuple of (success: bool, created_memory_ids: List[str]) - """ - if not self._initialized: - await self.initialize() - - # Ensure service is initialized if it's not the internal CoreMemoryService - if hasattr(self._service, 'initialize') and hasattr(self._service, '_initialized'): - if not self._service._initialized: - await self._service.initialize() - - return await self._service.add_memory( - transcript=transcript, - client_id=client_id, - source_id=source_id, - user_id=user_id, - user_email=user_email, - allow_update=allow_update, - db_helper=db_helper - ) - - def _normalize_memory_content(self, content: str, metadata: Dict[str, Any]) -> str: - """Return memory content as-is since individual facts are now stored separately. - - Args: - content: Memory content from the provider - metadata: Memory metadata (not used) - - Returns: - Content as-is (no normalization needed) - """ - return content - - async def get_all_memories(self, user_id: str, limit: int = 100) -> List[Dict[str, Any]]: - """Get all memories for a user - returns dict format for compatibility. - - Args: - user_id: User identifier - limit: Maximum number of memories to return - - Returns: - List of memory dictionaries in legacy format - """ - if not self._initialized: - await self.initialize() - - memories = await self._service.get_all_memories(user_id, limit) - - # Convert MemoryEntry objects to dict format for compatibility with normalized content - return [ - { - "id": memory.id, - "memory": self._normalize_memory_content(memory.content, memory.metadata), - "metadata": memory.metadata, - "created_at": memory.created_at, - "score": memory.score - } - for memory in memories - ] - - async def count_memories(self, user_id: str) -> Optional[int]: - """Count total number of memories for a user. - - Args: - user_id: User identifier - - Returns: - Total count of memories for the user, or None if not supported - """ - if not self._initialized: - await self.initialize() - - # Delegate to the core service - return await self._service.count_memories(user_id) - - async def get_all_memories_unfiltered(self, user_id: str, limit: int = 100) -> List[Dict[str, Any]]: - """Get all memories without filtering - same as get_all_memories in new implementation. - - Args: - user_id: User identifier - limit: Maximum number of memories to return - - Returns: - List of memory dictionaries in legacy format - """ - return await self.get_all_memories(user_id, limit) - - async def search_memories(self, query: str, user_id: str, limit: int = 10, score_threshold: float = 0.0) -> List[Dict[str, Any]]: - """Search memories using semantic similarity - returns dict format for compatibility. - - Args: - query: Search query text - user_id: User identifier to filter memories - limit: Maximum number of results to return - score_threshold: Minimum similarity score (0.0 = no threshold) - - Returns: - List of memory dictionaries in legacy format ordered by relevance - """ - if not self._initialized: - await self.initialize() - - memories = await self._service.search_memories(query, user_id, limit, score_threshold) - - # Convert MemoryEntry objects to dict format for compatibility with normalized content - return [ - { - "id": memory.id, - "memory": self._normalize_memory_content(memory.content, memory.metadata), - "metadata": memory.metadata, - "created_at": memory.created_at, - "score": memory.score - } - for memory in memories - ] - - async def delete_all_user_memories(self, user_id: str) -> int: - """Delete all memories for a user and return count. - - Args: - user_id: User identifier - - Returns: - Number of memories that were deleted - """ - if not self._initialized: - await self.initialize() - - return await self._service.delete_all_user_memories(user_id) - - async def delete_memory(self, memory_id: str) -> bool: - """Delete a specific memory by ID. - - Args: - memory_id: Unique identifier of the memory to delete - - Returns: - True if successfully deleted, False otherwise - """ - if not self._initialized: - await self.initialize() - - return await self._service.delete_memory(memory_id) - - async def get_all_memories_debug(self, limit: int = 200) -> List[Dict[str, Any]]: - """Get all memories across all users for admin debugging. - - Args: - limit: Maximum number of memories to return - - Returns: - List of memory dictionaries with user context for debugging - """ - if not self._initialized: - await self.initialize() - - # Import User model to get all users - try: - from advanced_omi_backend.users import User - except ImportError: - memory_logger.error("Cannot import User model for debug function") - return [] - - all_memories = [] - users = await User.find_all().to_list() - - for user in users: - user_id = str(user.id) - try: - user_memories = await self.get_all_memories(user_id) - - # Add user context for debugging - for memory in user_memories: - memory_entry = { - **memory, - "user_id": user_id, - "owner_email": user.email, - "collection": "omi_memories" - } - all_memories.append(memory_entry) - - # Respect limit - if len(all_memories) >= limit: - break - - except Exception as e: - memory_logger.warning(f"Error getting memories for user {user_id}: {e}") - continue - - return all_memories[:limit] - - async def get_memories_with_transcripts(self, user_id: str, limit: int = 100) -> List[Dict[str, Any]]: - """Get memories with their source transcripts using database relationship. - - Args: - user_id: User identifier - limit: Maximum number of memories to return - - Returns: - List of enriched memory dictionaries with transcript information - """ - if not self._initialized: - await self.initialize() - - # Get memories first - memories = await self.get_all_memories(user_id, limit) - - # Import Conversation model - try: - from advanced_omi_backend.models.conversation import Conversation - except ImportError: - memory_logger.error("Cannot import Conversation model") - return memories # Return memories without transcript enrichment - - # Extract source IDs for bulk query - source_ids = [] - for memory in memories: - metadata = memory.get("metadata", {}) - source_id = metadata.get("source_id") or metadata.get("audio_uuid") # Backward compatibility - if source_id: - source_ids.append(source_id) - - # Bulk query for conversations (support both old audio_uuid and new source_id) - conversations_list = await Conversation.find( - Conversation.audio_uuid.in_(source_ids) - ).to_list() - - conversations_by_id = {} - for conv in conversations_list: - conversations_by_id[conv.audio_uuid] = conv - - enriched_memories = [] - - for memory in memories: - enriched_memory = { - "memory_id": memory.get("id", "unknown"), - "memory_text": memory.get("memory", ""), - "created_at": memory.get("created_at", ""), - "metadata": memory.get("metadata", {}), - "source_id": None, - "transcript": None, - "client_id": None, - "user_email": None, - "compression_ratio": 0, - "transcript_length": 0, - "memory_length": 0, - } - - # Extract source_id from memory metadata (with backward compatibility) - metadata = memory.get("metadata", {}) - source_id = metadata.get("source_id") or metadata.get("audio_uuid") - - if source_id: - enriched_memory["source_id"] = source_id - enriched_memory["client_id"] = metadata.get("client_id") - enriched_memory["user_email"] = metadata.get("user_email") - - # Get transcript from bulk-loaded conversations - conversation = conversations_by_id.get(source_id) - if conversation: - transcript_segments = conversation.segments - if transcript_segments: - full_transcript = " ".join( - segment.text - for segment in transcript_segments - if segment.text - ) - - if full_transcript.strip(): - enriched_memory["transcript"] = full_transcript - enriched_memory["transcript_length"] = len(full_transcript) - - memory_text = enriched_memory["memory_text"] - enriched_memory["memory_length"] = len(memory_text) - - # Calculate compression ratio - if len(full_transcript) > 0: - enriched_memory["compression_ratio"] = round( - (len(memory_text) / len(full_transcript)) * 100, 1 - ) - - enriched_memories.append(enriched_memory) - - return enriched_memories - - async def test_connection(self) -> bool: - """Test memory service connection. - - Returns: - True if connection successful, False otherwise - """ - try: - if not self._initialized: - await self.initialize() - return await self._service.test_connection() - except Exception as e: - memory_logger.error(f"Connection test failed: {e}") - return False - - def shutdown(self): - """Shutdown the memory service and clean up resources.""" - if self._service: - self._service.shutdown() - self._initialized = False - self._service = None - memory_logger.info("Memory service shut down") - - -# Global service instance - maintains compatibility with original code -_memory_service = None - - -def get_memory_service() -> MemoryService: - """Get the global memory service instance. - - Returns: - Global MemoryService instance (singleton pattern), wrapped for compatibility - """ - global _memory_service - if _memory_service is None: - # Use the new service factory to create the appropriate service - from ..service_factory import get_memory_service as get_core_service - - core_service = get_core_service() - - # If it's already a compat service, use it directly - if isinstance(core_service, MemoryService): - _memory_service = core_service - else: - # Wrap core service with compat layer - _memory_service = MemoryService() - _memory_service._service = core_service - _memory_service._initialized = True - - return _memory_service - - -def shutdown_memory_service(): - """Shutdown the global memory service and clean up resources.""" - global _memory_service - if _memory_service: - _memory_service.shutdown() - _memory_service = None - - # Also shutdown the core service - from .service_factory import shutdown_memory_service as shutdown_core_service - shutdown_core_service() - - -# Migration helper functions -async def migrate_from_mem0(): - """Helper function to migrate existing mem0 data to new format. - - This is a placeholder for migration logic. Actual implementation - would depend on the specific mem0 setup and data format. - - Raises: - RuntimeError: If migration fails - """ - memory_logger.info("๐Ÿ”„ Starting migration from mem0 to new memory service") - - try: - # Initialize new memory service - new_service = get_memory_service() - await new_service.initialize() - - # Get all users - try: - from advanced_omi_backend.users import User - users = await User.find_all().to_list() - except ImportError: - memory_logger.error("Cannot import User model for migration") - return - - # Migration steps would go here: - # 1. For each user, get their mem0 memories (if accessible) - # 2. Convert to new format - # 3. Store in new system - - memory_logger.info("โœ… Migration completed successfully") - - except Exception as e: - memory_logger.error(f"โŒ Migration failed: {e}") - raise \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py index b3909a65..a0974e21 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py @@ -45,10 +45,10 @@ def __init__(self, config: MemoryConfig): Args: config: MemoryConfig instance with provider settings """ + super().__init__() self.config = config self.llm_provider: Optional[LLMProviderBase] = None self.vector_store: Optional[VectorStoreBase] = None - self._initialized = False async def initialize(self) -> None: """Initialize the memory service and all its components. @@ -129,8 +129,7 @@ async def add_memory( Raises: asyncio.TimeoutError: If processing exceeds timeout """ - if not self._initialized: - await self.initialize() + await self._ensure_initialized() try: # Skip empty transcripts diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py index a1b9876f..15226971 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py @@ -28,18 +28,20 @@ class MCPClient: client: HTTP client instance """ - def __init__(self, server_url: str, client_name: str = "friend_lite", user_id: str = "default", timeout: int = 30): + def __init__(self, server_url: str, client_name: str = "friend_lite", user_id: str = "default", user_email: str = "", timeout: int = 30): """Initialize client for OpenMemory. - + Args: server_url: Base URL of the OpenMemory server client_name: Client identifier (used as app name) user_id: User identifier for memory isolation + user_email: User email address for user metadata timeout: HTTP request timeout in seconds """ self.server_url = server_url.rstrip('/') self.client_name = client_name self.user_id = user_id + self.user_email = user_email self.timeout = timeout # Use custom CA certificate if available @@ -107,18 +109,20 @@ async def add_memories(self, text: str) -> List[str]: memory_logger.error("No apps found in OpenMemory - cannot create memory") raise MCPError("No apps found in OpenMemory") - # Use REST API endpoint for creating memories (trailing slash required) + # Use REST API endpoint for creating memories + # The 'app' field can be either app name (string) or app UUID response = await self.client.post( f"{self.server_url}/api/v1/memories/", json={ "user_id": self.user_id, "text": text, + "app": self.client_name, # Use app name (OpenMemory accepts name or UUID) "metadata": { "source": "friend_lite", - "client": self.client_name + "client": self.client_name, + "user_email": self.user_email }, - "infer": True, - "app_id": app_id # Use app_id to avoid duplicate name issues + "infer": True } ) response.raise_for_status() @@ -334,11 +338,101 @@ async def delete_all_memories(self) -> int: return result.get("deleted_count", len(memory_ids)) return len(memory_ids) - + except Exception as e: memory_logger.error(f"Error deleting all memories: {e}") return 0 - + + async def get_memory(self, memory_id: str) -> Optional[Dict[str, Any]]: + """Get a specific memory by ID. + + Args: + memory_id: ID of the memory to retrieve + + Returns: + Memory dictionary if found, None otherwise + """ + try: + # Use the memories endpoint with specific ID + response = await self.client.get( + f"{self.server_url}/api/v1/memories/{memory_id}", + params={"user_id": self.user_id} + ) + + if response.status_code == 404: + memory_logger.warning(f"Memory not found: {memory_id}") + return None + + response.raise_for_status() + result = response.json() + + # Format memory for Friend-Lite + if isinstance(result, dict): + return { + "id": result.get("id", memory_id), + "content": result.get("content", "") or result.get("text", ""), + "metadata": result.get("metadata_", {}) or result.get("metadata", {}), + "created_at": result.get("created_at"), + } + + return None + + except httpx.HTTPStatusError as e: + if e.response.status_code == 404: + return None + memory_logger.error(f"HTTP error getting memory: {e}") + return None + except Exception as e: + memory_logger.error(f"Error getting memory: {e}") + return None + + async def update_memory( + self, + memory_id: str, + content: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None + ) -> bool: + """Update a specific memory's content and/or metadata. + + Args: + memory_id: ID of the memory to update + content: New content for the memory (if None, content is not updated) + metadata: New metadata to merge with existing (if None, metadata is not updated) + + Returns: + True if update succeeded, False otherwise + """ + try: + # Build update payload + update_data: Dict[str, Any] = {"user_id": self.user_id} + + if content is not None: + update_data["text"] = content + + if metadata is not None: + update_data["metadata"] = metadata + + if len(update_data) == 1: # Only user_id + memory_logger.warning("No update data provided") + return False + + # Use PUT to update memory + response = await self.client.put( + f"{self.server_url}/api/v1/memories/{memory_id}", + json=update_data + ) + + response.raise_for_status() + memory_logger.info(f"โœ… Updated OpenMemory memory: {memory_id}") + return True + + except httpx.HTTPStatusError as e: + memory_logger.error(f"HTTP error updating memory: {e.response.status_code}") + return False + except Exception as e: + memory_logger.error(f"Error updating memory: {e}") + return False + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py index 3033c307..40184776 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py @@ -4,16 +4,41 @@ that uses Mycelia as the backend for all memory operations. """ +import json import logging from datetime import datetime from typing import Any, Dict, List, Optional, Tuple import httpx from ..base import MemoryEntry, MemoryServiceBase +from ..prompts import FACT_RETRIEVAL_PROMPT, TEMPORAL_ENTITY_EXTRACTION_PROMPT, TemporalEntity +from ..config import MemoryConfig +from .llm_providers import _get_openai_client memory_logger = logging.getLogger("memory_service") +def strip_markdown_json(content: str) -> str: + """Strip markdown code block wrapper from JSON content. + + Handles formats like: + - ```json\n{...}\n``` + - ```\n{...}\n``` + - {... } (plain JSON, returned as-is) + """ + content = content.strip() + if content.startswith("```"): + # Remove opening ```json or ``` + first_newline = content.find("\n") + if first_newline != -1: + content = content[first_newline + 1:] + # Remove closing ``` + if content.endswith("```"): + content = content[:-3] + content = content.strip() + return content + + class MyceliaMemoryService(MemoryServiceBase): """Memory service implementation using Mycelia backend. @@ -26,27 +51,23 @@ class MyceliaMemoryService(MemoryServiceBase): **kwargs: Additional configuration parameters """ - def __init__( - self, - api_url: str = "http://localhost:8080", - timeout: int = 30, - **kwargs - ): + def __init__(self, config: MemoryConfig): """Initialize Mycelia memory service. Args: - api_url: Mycelia API endpoint - timeout: Request timeout in seconds - **kwargs: Additional configuration parameters + config: MemoryConfig object containing mycelia_config and llm_config """ - self.api_url = api_url.rstrip("/") - self.timeout = timeout - self.config = kwargs - self._initialized = False + super().__init__() + self.config = config + self.mycelia_config = config.mycelia_config or {} + self.api_url = self.mycelia_config.get("api_url", "http://localhost:8080").rstrip("/") + self.timeout = self.mycelia_config.get("timeout", 30) self._client: Optional[httpx.AsyncClient] = None - memory_logger.info(f"๐Ÿ„ Initializing Mycelia memory service at {api_url}") + # Store LLM config for temporal extraction + self.llm_config = config.llm_config or {} + memory_logger.info(f"๐Ÿ„ Initializing Mycelia memory service at {self.api_url}") async def initialize(self) -> None: """Initialize Mycelia client and verify connection.""" try: @@ -119,21 +140,47 @@ def _mycelia_object_to_memory_entry(self, obj: Dict, user_id: str) -> MemoryEntr user_id: User ID for metadata Returns: - MemoryEntry object + MemoryEntry object with full Mycelia metadata including temporal and semantic fields """ memory_id = self._extract_bson_id(obj.get("_id", "")) memory_content = obj.get("details", "") + # Build metadata with all Mycelia fields + metadata = { + "user_id": user_id, + "name": obj.get("name", ""), + "aliases": obj.get("aliases", []), + "created_at": self._extract_bson_date(obj.get("createdAt")), + "updated_at": self._extract_bson_date(obj.get("updatedAt")), + # Semantic flags + "isPerson": obj.get("isPerson", False), + "isEvent": obj.get("isEvent", False), + "isPromise": obj.get("isPromise", False), + "isRelationship": obj.get("isRelationship", False), + } + + # Add icon if present + if "icon" in obj and obj["icon"]: + metadata["icon"] = obj["icon"] + + # Add temporal information if present + if "timeRanges" in obj and obj["timeRanges"]: + # Convert BSON dates in timeRanges to ISO strings for JSON serialization + time_ranges = [] + for tr in obj["timeRanges"]: + time_range = { + "start": self._extract_bson_date(tr.get("start")), + "end": self._extract_bson_date(tr.get("end")), + } + if "name" in tr: + time_range["name"] = tr["name"] + time_ranges.append(time_range) + metadata["timeRanges"] = time_ranges + return MemoryEntry( id=memory_id, content=memory_content, - metadata={ - "user_id": user_id, - "name": obj.get("name", ""), - "aliases": obj.get("aliases", []), - "created_at": self._extract_bson_date(obj.get("createdAt")), - "updated_at": self._extract_bson_date(obj.get("updatedAt")), - }, + metadata=metadata, created_at=self._extract_bson_date(obj.get("createdAt")) ) @@ -175,6 +222,140 @@ async def _call_resource( memory_logger.error(f"Failed to call Mycelia resource: {e}") raise RuntimeError(f"Mycelia API call failed: {e}") + async def _extract_memories_via_llm( + self, + transcript: str, + ) -> List[str]: + """Extract memories from transcript using OpenAI directly. + + Args: + transcript: Raw transcript text + + Returns: + List of extracted memory facts + + Raises: + RuntimeError: If LLM call fails + """ + if not self.llm_config: + memory_logger.warning("No LLM config available for fact extraction") + return [] + + try: + # Get OpenAI client using Friend-Lite's utility + client = _get_openai_client( + api_key=self.llm_config.get("api_key"), + base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), + is_async=True + ) + + # Call OpenAI for memory extraction + response = await client.chat.completions.create( + model=self.llm_config.get("model", "gpt-4o-mini"), + messages=[ + {"role": "system", "content": FACT_RETRIEVAL_PROMPT}, + {"role": "user", "content": transcript} + ], + response_format={"type": "json_object"}, + temperature=0.1 + ) + + content = response.choices[0].message.content + + if not content: + memory_logger.warning("LLM returned empty content") + return [] + + # Parse JSON response to extract facts + try: + # Strip markdown wrapper if present (just in case) + json_content = strip_markdown_json(content) + facts_data = json.loads(json_content) + facts = facts_data.get("facts", []) + memory_logger.info(f"๐Ÿง  Extracted {len(facts)} facts from transcript via OpenAI") + return facts + except json.JSONDecodeError as e: + memory_logger.error(f"Failed to parse LLM response as JSON: {e}") + memory_logger.error(f"LLM response was: {content[:300]}") + return [] + + except Exception as e: + memory_logger.error(f"Failed to extract memories via OpenAI: {e}") + raise RuntimeError(f"OpenAI memory extraction failed: {e}") + + async def _extract_temporal_entity_via_llm( + self, + fact: str, + ) -> Optional[TemporalEntity]: + """Extract temporal and entity information from a fact using OpenAI directly. + + Args: + fact: Memory fact text + + Returns: + TemporalEntity with extracted information, or None if extraction fails + """ + if not self.llm_config: + memory_logger.warning("No LLM config available for temporal extraction") + return None + + try: + # Get OpenAI client using Friend-Lite's utility + client = _get_openai_client( + api_key=self.llm_config.get("api_key"), + base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), + is_async=True + ) + + # Call OpenAI with structured output request + response = await client.chat.completions.create( + model=self.llm_config.get("model", "gpt-4o-mini"), + messages=[ + {"role": "system", "content": TEMPORAL_ENTITY_EXTRACTION_PROMPT}, + {"role": "user", "content": f"Extract temporal and entity information from this memory fact:\n\n{fact}"} + ], + response_format={"type": "json_object"}, + temperature=0.1 + ) + + content = response.choices[0].message.content + + if not content: + memory_logger.warning("LLM returned empty content for temporal extraction") + return None + + # Parse JSON response and validate with Pydantic + try: + # Strip markdown wrapper if present (just in case) + json_content = strip_markdown_json(content) + temporal_data = json.loads(json_content) + + # Convert timeRanges to proper format if present + if "timeRanges" in temporal_data: + for time_range in temporal_data["timeRanges"]: + if isinstance(time_range["start"], str): + time_range["start"] = datetime.fromisoformat(time_range["start"].replace("Z", "+00:00")) + if isinstance(time_range["end"], str): + time_range["end"] = datetime.fromisoformat(time_range["end"].replace("Z", "+00:00")) + + temporal_entity = TemporalEntity(**temporal_data) + memory_logger.info(f"โœ… Temporal extraction: isEvent={temporal_entity.isEvent}, timeRanges={len(temporal_entity.timeRanges)}, entities={temporal_entity.entities}") + return temporal_entity + + except json.JSONDecodeError as e: + memory_logger.error(f"โŒ Failed to parse temporal extraction JSON: {e}") + memory_logger.error(f"Content (first 300 chars): {content[:300]}") + return None + except Exception as e: + memory_logger.error(f"Failed to validate temporal entity: {e}") + memory_logger.error(f"Data: {content[:300] if content else 'None'}") + return None + + except Exception as e: + memory_logger.error(f"Failed to extract temporal data via OpenAI: {e}") + # Don't fail the entire memory creation if temporal extraction fails + return None + async def add_memory( self, transcript: str, @@ -199,37 +380,96 @@ async def add_memory( Returns: Tuple of (success: bool, created_memory_ids: List[str]) """ + # Ensure service is initialized (lazy initialization for RQ workers) + await self._ensure_initialized() + try: # Generate JWT token for this user jwt_token = await self._get_user_jwt(user_id, user_email) - # Create a Mycelia object for this memory - # Memory content is stored in the 'details' field - memory_preview = transcript[:50] + ("..." if len(transcript) > 50 else "") - - object_data = { - "name": f"Memory: {memory_preview}", - "details": transcript, - "aliases": [source_id, client_id], # Searchable by source or client - "isPerson": False, - "isPromise": False, - "isEvent": False, - "isRelationship": False, - # Note: userId is auto-injected by Mycelia from JWT - } + # Extract memories from transcript using OpenAI + memory_logger.info(f"Extracting memories from transcript via OpenAI...") + extracted_facts = await self._extract_memories_via_llm(transcript) - result = await self._call_resource( - action="create", - jwt_token=jwt_token, - object=object_data - ) + if not extracted_facts: + memory_logger.warning("No memories extracted from transcript") + return (False, []) - memory_id = result.get("insertedId") - if memory_id: - memory_logger.info(f"โœ… Created Mycelia memory object: {memory_id}") - return (True, [memory_id]) + # Create Mycelia objects for each extracted fact + memory_ids = [] + for fact in extracted_facts: + fact_preview = fact[:50] + ("..." if len(fact) > 50 else "") + + # Extract temporal and entity information + temporal_entity = await self._extract_temporal_entity_via_llm(fact) + + # Build object data with temporal/entity information if available + if temporal_entity: + # Convert timeRanges from Pydantic models to dict format for Mycelia API + time_ranges = [] + for tr in temporal_entity.timeRanges: + time_range_dict = { + "start": tr.start.isoformat() if isinstance(tr.start, datetime) else tr.start, + "end": tr.end.isoformat() if isinstance(tr.end, datetime) else tr.end, + } + if tr.name: + time_range_dict["name"] = tr.name + time_ranges.append(time_range_dict) + + # Use emoji in name if available, otherwise use default + name_prefix = temporal_entity.emoji if temporal_entity.emoji else "Memory:" + + object_data = { + "name": f"{name_prefix} {fact_preview}", + "details": fact, + "aliases": [source_id, client_id] + temporal_entity.entities, # Include extracted entities + "isPerson": temporal_entity.isPerson, + "isPromise": temporal_entity.isPromise, + "isEvent": temporal_entity.isEvent, + "isRelationship": temporal_entity.isRelationship, + # Note: userId is auto-injected by Mycelia from JWT + } + + # Add timeRanges if temporal information was extracted + if time_ranges: + object_data["timeRanges"] = time_ranges + + # Add emoji icon if available + if temporal_entity.emoji: + object_data["icon"] = {"text": temporal_entity.emoji} + + memory_logger.info(f"๐Ÿ“… Temporal extraction: isEvent={temporal_entity.isEvent}, timeRanges={len(time_ranges)}, entities={len(temporal_entity.entities)}") + else: + # Fallback to basic object without temporal data + object_data = { + "name": f"Memory: {fact_preview}", + "details": fact, + "aliases": [source_id, client_id], + "isPerson": False, + "isPromise": False, + "isEvent": False, + "isRelationship": False, + } + memory_logger.warning(f"โš ๏ธ No temporal data extracted for fact: {fact_preview}") + + result = await self._call_resource( + action="create", + jwt_token=jwt_token, + object=object_data + ) + + memory_id = result.get("insertedId") + if memory_id: + memory_logger.info(f"โœ… Created Mycelia memory object: {memory_id} - {fact_preview}") + memory_ids.append(memory_id) + else: + memory_logger.error(f"Failed to create memory fact: {fact}") + + if memory_ids: + memory_logger.info(f"โœ… Created {len(memory_ids)} Mycelia memory objects from {len(extracted_facts)} facts") + return (True, memory_ids) else: - memory_logger.error("Failed to create Mycelia memory: no insertedId returned") + memory_logger.error("No Mycelia memory objects were created") return (False, []) except Exception as e: @@ -362,6 +602,126 @@ async def count_memories(self, user_id: str) -> Optional[int]: memory_logger.error(f"Failed to count memories via Mycelia: {e}") return None + async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Optional[MemoryEntry]: + """Get a specific memory by ID from Mycelia. + + Args: + memory_id: Unique identifier of the memory to retrieve + user_id: Optional user identifier for authentication + + Returns: + MemoryEntry object if found, None otherwise + """ + if not self._initialized: + await self.initialize() + + try: + # Need user ID for JWT authentication + if not user_id: + memory_logger.error("User ID required for Mycelia get_memory operation") + return None + + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id) + + # Get the object by ID (auto-scoped by userId in Mycelia) + result = await self._call_resource( + action="get", + jwt_token=jwt_token, + id=memory_id + ) + + if result: + return self._mycelia_object_to_memory_entry(result, user_id) + else: + memory_logger.warning(f"Memory not found with ID: {memory_id}") + return None + + except Exception as e: + memory_logger.error(f"Failed to get memory via Mycelia: {e}") + return None + + async def update_memory( + self, + memory_id: str, + content: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + user_id: Optional[str] = None, + user_email: Optional[str] = None + ) -> bool: + """Update a specific memory's content and/or metadata in Mycelia. + + Args: + memory_id: Unique identifier of the memory to update + content: New content for the memory (updates 'details' field) + metadata: New metadata to merge with existing + user_id: Optional user ID for authentication + user_email: Optional user email for authentication + + Returns: + True if update succeeded, False otherwise + """ + if not self._initialized: + await self.initialize() + + try: + # Need user ID for JWT authentication + if not user_id: + memory_logger.error("User ID required for Mycelia update_memory operation") + return False + + # Generate JWT token for this user + jwt_token = await self._get_user_jwt(user_id, user_email) + + # Build update object + update_data: Dict[str, Any] = {} + + if content is not None: + update_data["details"] = content + + if metadata: + # Extract specific metadata fields that Mycelia supports + if "name" in metadata: + update_data["name"] = metadata["name"] + if "aliases" in metadata: + update_data["aliases"] = metadata["aliases"] + if "isPerson" in metadata: + update_data["isPerson"] = metadata["isPerson"] + if "isPromise" in metadata: + update_data["isPromise"] = metadata["isPromise"] + if "isEvent" in metadata: + update_data["isEvent"] = metadata["isEvent"] + if "isRelationship" in metadata: + update_data["isRelationship"] = metadata["isRelationship"] + if "timeRanges" in metadata: + update_data["timeRanges"] = metadata["timeRanges"] + if "icon" in metadata: + update_data["icon"] = metadata["icon"] + + if not update_data: + memory_logger.warning("No update data provided") + return False + + # Update the object (auto-scoped by userId in Mycelia) + result = await self._call_resource( + action="update", + jwt_token=jwt_token, + id=memory_id, + object=update_data + ) + + updated_count = result.get("modifiedCount", 0) + if updated_count > 0: + memory_logger.info(f"โœ… Updated Mycelia memory object: {memory_id}") + return True + else: + memory_logger.warning(f"No memory updated with ID: {memory_id}") + return False + + except Exception as e: + memory_logger.error(f"Failed to update memory via Mycelia: {e}") + return False + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory from Mycelia. diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py index 04b8fd67..d18be16a 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py @@ -47,30 +47,26 @@ def __init__( user_id: Optional[str] = None, timeout: Optional[int] = None, ): - self.server_url = server_url or os.getenv("OPENMEMORY_MCP_URL", "http://localhost:8765") - self.client_name = client_name or os.getenv("OPENMEMORY_CLIENT_NAME", "friend_lite") - self.user_id = user_id or os.getenv("OPENMEMORY_USER_ID", "default") - self.timeout = int(timeout or os.getenv("OPENMEMORY_TIMEOUT", "30")) """Initialize OpenMemory MCP service as a thin client. - + This service delegates all memory processing to the OpenMemory MCP server: - Memory extraction (OpenMemory handles internally) - - Deduplication (OpenMemory handles internally) + - Deduplication (OpenMemory handles internally) - Vector storage (OpenMemory handles internally) - User isolation via ACL (OpenMemory handles internally) - + Args: server_url: URL of the OpenMemory MCP server (default: http://localhost:8765) client_name: Client identifier for OpenMemory MCP user_id: User identifier for memory isolation via OpenMemory ACL timeout: HTTP request timeout in seconds """ - self.server_url = server_url - self.client_name = client_name - self.user_id = user_id - self.timeout = timeout + super().__init__() + self.server_url = server_url or os.getenv("OPENMEMORY_MCP_URL", "http://localhost:8765") + self.client_name = client_name or os.getenv("OPENMEMORY_CLIENT_NAME", "friend_lite") + self.user_id = user_id or os.getenv("OPENMEMORY_USER_ID", "default") + self.timeout = int(timeout or os.getenv("OPENMEMORY_TIMEOUT", "30")) self.mcp_client: Optional[MCPClient] = None - self._initialized = False async def initialize(self) -> None: """Initialize the OpenMemory MCP service. @@ -138,8 +134,7 @@ async def add_memory( Raises: MCPError: If MCP server communication fails """ - if not self._initialized: - await self.initialize() + await self._ensure_initialized() try: # Skip empty transcripts @@ -149,19 +144,22 @@ async def add_memory( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = self.user_id # Use configured user ID - + original_user_email = self.mcp_client.user_email + self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_email = user_email # Use the actual user's email + try: # Thin client approach: Send raw transcript to OpenMemory MCP server # OpenMemory handles: extraction, deduplication, vector storage, ACL enriched_transcript = f"[Source: {source_id}, Client: {client_id}] {transcript}" - - memory_logger.info(f"Delegating memory processing to OpenMemory MCP for {source_id}") + + memory_logger.info(f"Delegating memory processing to OpenMemory MCP for user {user_id}, source {source_id}") memory_ids = await self.mcp_client.add_memories(text=enriched_transcript) - + finally: - # Restore original user_id + # Restore original user context self.mcp_client.user_id = original_user_id + self.mcp_client.user_email = original_user_email # Update database relationships if helper provided if memory_ids and db_helper: @@ -208,24 +206,24 @@ async def search_memories( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = self.user_id # Use configured user ID - + self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + try: results = await self.mcp_client.search_memory( query=query, limit=limit ) - + # Convert MCP results to MemoryEntry objects memory_entries = [] for result in results: memory_entry = self._mcp_result_to_memory_entry(result, user_id) if memory_entry: memory_entries.append(memory_entry) - + memory_logger.info(f"๐Ÿ” Found {len(memory_entries)} memories for query '{query}' (user: {user_id})") return memory_entries - + except MCPError as e: memory_logger.error(f"Search memories failed: {e}") return [] @@ -258,21 +256,21 @@ async def get_all_memories( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = self.user_id # Use configured user ID - + self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + try: results = await self.mcp_client.list_memories(limit=limit) - + # Convert MCP results to MemoryEntry objects memory_entries = [] for result in results: memory_entry = self._mcp_result_to_memory_entry(result, user_id) if memory_entry: memory_entries.append(memory_entry) - + memory_logger.info(f"๐Ÿ“š Retrieved {len(memory_entries)} memories for user {user_id}") return memory_entries - + except MCPError as e: memory_logger.error(f"Get all memories failed: {e}") return [] @@ -282,7 +280,89 @@ async def get_all_memories( finally: # Restore original user_id self.mcp_client.user_id = original_user_id - + + async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Optional[MemoryEntry]: + """Get a specific memory by ID. + + Args: + memory_id: Unique identifier of the memory to retrieve + user_id: Optional user identifier for filtering + + Returns: + MemoryEntry object if found, None otherwise + """ + if not self._initialized: + await self.initialize() + + # Update MCP client user context for this operation + original_user_id = self.mcp_client.user_id + self.mcp_client.user_id = user_id or self.user_id # Use the actual Friend-Lite user's ID + + try: + result = await self.mcp_client.get_memory(memory_id) + + if not result: + memory_logger.warning(f"Memory not found: {memory_id}") + return None + + # Convert MCP result to MemoryEntry + memory_entry = self._mcp_result_to_memory_entry(result, user_id or self.user_id) + if memory_entry: + memory_logger.info(f"๐Ÿ“– Retrieved memory {memory_id}") + return memory_entry + + except Exception as e: + memory_logger.error(f"Failed to get memory: {e}") + return None + finally: + # Restore original user_id + self.mcp_client.user_id = original_user_id + + async def update_memory( + self, + memory_id: str, + content: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + user_id: Optional[str] = None, + user_email: Optional[str] = None + ) -> bool: + """Update a specific memory's content and/or metadata. + + Args: + memory_id: Unique identifier of the memory to update + content: New content for the memory (if None, content is not updated) + metadata: New metadata to merge with existing (if None, metadata is not updated) + user_id: Optional user ID (not used by OpenMemory MCP) + user_email: Optional user email (not used by OpenMemory MCP) + + Returns: + True if update succeeded, False otherwise + """ + if not self._initialized: + await self.initialize() + + # Update MCP client user context for this operation + original_user_id = self.mcp_client.user_id + self.mcp_client.user_id = user_id or self.user_id # Use the actual Friend-Lite user's ID + + try: + success = await self.mcp_client.update_memory( + memory_id=memory_id, + content=content, + metadata=metadata + ) + + if success: + memory_logger.info(f"โœ๏ธ Updated memory {memory_id} via MCP") + return success + + except Exception as e: + memory_logger.error(f"Failed to update memory: {e}") + return False + finally: + # Restore original user_id + self.mcp_client.user_id = original_user_id + async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: """Delete a specific memory by ID. diff --git a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py index 6b8da757..fdb16b7d 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py @@ -180,9 +180,11 @@ async def process_memory_job( for memory_id in created_memory_ids[:5]: # Limit to first 5 for display memory_entry = await memory_service.get_memory(memory_id, user_id) if memory_entry: + # memory_entry is a MemoryEntry object, not a dict + memory_text = memory_entry.content if hasattr(memory_entry, 'content') else str(memory_entry) memory_details.append({ "memory_id": memory_id, - "text": memory_entry.get("text", "")[:200] # First 200 chars + "text": memory_text[:200] # First 200 chars }) except Exception as e: logger.warning(f"Failed to fetch memory details for UI: {e}") From 0f51fcefeae7dd16e31ba221b5005104c7c18258 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Wed, 3 Dec 2025 22:57:48 +0000 Subject: [PATCH 08/31] added memory provider switch --- .../controllers/system_controller.py | 102 ++++++++++++++++- .../routers/modules/system_routes.py | 17 +++ backends/advanced/webui/src/pages/System.tsx | 103 +++++++++++++++++- 3 files changed, 217 insertions(+), 5 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py index a2afadbc..9341cc59 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py @@ -445,19 +445,19 @@ async def delete_all_user_memories(user: User): from advanced_omi_backend.services.memory import get_memory_service memory_service = get_memory_service() - + # Delete all memories for the user deleted_count = await memory_service.delete_all_user_memories(user.user_id) - + logger.info(f"Deleted {deleted_count} memories for user {user.user_id}") - + return { "message": f"Successfully deleted {deleted_count} memories", "deleted_count": deleted_count, "user_id": user.user_id, "status": "success" } - + except Exception as e: logger.error(f"Error deleting all memories for user {user.user_id}: {e}") return JSONResponse( @@ -465,3 +465,97 @@ async def delete_all_user_memories(user: User): ) +# Memory Provider Configuration Functions + +async def get_memory_provider(): + """Get current memory provider configuration.""" + try: + current_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + + # Get available providers + available_providers = ["friend_lite", "openmemory_mcp", "mycelia"] + + return { + "current_provider": current_provider, + "available_providers": available_providers, + "status": "success" + } + + except Exception as e: + logger.error(f"Error getting memory provider: {e}") + return JSONResponse( + status_code=500, content={"error": f"Failed to get memory provider: {str(e)}"} + ) + + +async def set_memory_provider(provider: str): + """Set memory provider and update .env file.""" + try: + # Validate provider + provider = provider.lower().strip() + valid_providers = ["friend_lite", "openmemory_mcp", "mycelia"] + + if provider not in valid_providers: + return JSONResponse( + status_code=400, + content={"error": f"Invalid provider '{provider}'. Valid providers: {', '.join(valid_providers)}"} + ) + + # Path to .env file (assuming we're running from backends/advanced/) + env_path = os.path.join(os.getcwd(), ".env") + + if not os.path.exists(env_path): + return JSONResponse( + status_code=404, + content={"error": f".env file not found at {env_path}"} + ) + + # Read current .env file + with open(env_path, 'r') as file: + lines = file.readlines() + + # Update or add MEMORY_PROVIDER line + provider_found = False + updated_lines = [] + + for line in lines: + if line.strip().startswith("MEMORY_PROVIDER="): + updated_lines.append(f"MEMORY_PROVIDER={provider}\n") + provider_found = True + else: + updated_lines.append(line) + + # If MEMORY_PROVIDER wasn't found, add it + if not provider_found: + updated_lines.append(f"\n# Memory Provider Configuration\nMEMORY_PROVIDER={provider}\n") + + # Create backup + backup_path = f"{env_path}.bak" + shutil.copy2(env_path, backup_path) + logger.info(f"Created .env backup at {backup_path}") + + # Write updated .env file + with open(env_path, 'w') as file: + file.writelines(updated_lines) + + # Update environment variable for current process + os.environ["MEMORY_PROVIDER"] = provider + + logger.info(f"Updated MEMORY_PROVIDER to '{provider}' in .env file") + + return { + "message": f"Memory provider updated to '{provider}'. Please restart the backend service for changes to take effect.", + "provider": provider, + "env_path": env_path, + "backup_created": True, + "requires_restart": True, + "status": "success" + } + + except Exception as e: + logger.error(f"Error setting memory provider: {e}") + return JSONResponse( + status_code=500, content={"error": f"Failed to set memory provider: {str(e)}"} + ) + + diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py index 3c97bd55..10587b5c 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py @@ -126,3 +126,20 @@ async def cleanup_stuck_stream_workers(request: Request, current_user: User = De async def cleanup_old_sessions(request: Request, max_age_seconds: int = 3600, current_user: User = Depends(current_superuser)): """Clean up old session tracking metadata. Admin only.""" return await session_controller.cleanup_old_sessions(request, max_age_seconds) + + +# Memory Provider Configuration Endpoints + +@router.get("/admin/memory/provider") +async def get_memory_provider(current_user: User = Depends(current_superuser)): + """Get current memory provider configuration. Admin only.""" + return await system_controller.get_memory_provider() + + +@router.post("/admin/memory/provider") +async def set_memory_provider( + provider: str = Body(..., embed=True), + current_user: User = Depends(current_superuser) +): + """Set memory provider and restart backend services. Admin only.""" + return await system_controller.set_memory_provider(provider) diff --git a/backends/advanced/webui/src/pages/System.tsx b/backends/advanced/webui/src/pages/System.tsx index 3ca54a59..c722ada9 100644 --- a/backends/advanced/webui/src/pages/System.tsx +++ b/backends/advanced/webui/src/pages/System.tsx @@ -1,5 +1,5 @@ import { useState, useEffect } from 'react' -import { Settings, RefreshCw, CheckCircle, XCircle, AlertCircle, Activity, Users, Database, Server, Volume2, Mic } from 'lucide-react' +import { Settings, RefreshCw, CheckCircle, XCircle, AlertCircle, Activity, Users, Database, Server, Volume2, Mic, Brain } from 'lucide-react' import { systemApi, speakerApi } from '../services/api' import { useAuth } from '../contexts/AuthContext' import MemorySettings from '../components/MemorySettings' @@ -64,6 +64,11 @@ export default function System() { max_speakers: 6 }) const [diarizationLoading, setDiarizationLoading] = useState(false) + const [currentProvider, setCurrentProvider] = useState('') + const [availableProviders, setAvailableProviders] = useState([]) + const [selectedProvider, setSelectedProvider] = useState('') + const [providerLoading, setProviderLoading] = useState(false) + const [providerMessage, setProviderMessage] = useState('') const { isAdmin } = useAuth() @@ -120,6 +125,46 @@ export default function System() { } } + const loadMemoryProvider = async () => { + try { + setProviderLoading(true) + const response = await systemApi.getMemoryProvider() + if (response.data.status === 'success') { + setCurrentProvider(response.data.current_provider) + setAvailableProviders(response.data.available_providers) + setSelectedProvider(response.data.current_provider) + } + } catch (err: any) { + console.error('Failed to load memory provider:', err) + } finally { + setProviderLoading(false) + } + } + + const saveMemoryProvider = async () => { + if (selectedProvider === currentProvider) { + setProviderMessage('Provider is already set to ' + selectedProvider) + setTimeout(() => setProviderMessage(''), 3000) + return + } + + try { + setProviderLoading(true) + setProviderMessage('') + const response = await systemApi.setMemoryProvider(selectedProvider) + if (response.data.status === 'success') { + setCurrentProvider(selectedProvider) + setProviderMessage('โœ… ' + response.data.message) + } else { + setProviderMessage('โŒ Failed to update provider') + } + } catch (err: any) { + setProviderMessage('โŒ Error: ' + (err.response?.data?.error || err.message)) + } finally { + setProviderLoading(false) + } + } + const saveDiarizationSettings = async () => { try { setDiarizationLoading(true) @@ -139,6 +184,7 @@ export default function System() { useEffect(() => { loadSystemData() loadDiarizationSettings() + loadMemoryProvider() }, [isAdmin]) const getStatusIcon = (healthy: boolean) => { @@ -285,6 +331,61 @@ export default function System() { ))} + + {/* Memory Provider Selector */} +
+
+ + + Memory Provider + +
+
+ {/* Current Provider Display */} +
+ Current: + + {currentProvider || 'Loading...'} + +
+ + {/* Provider Selector */} +
+ + +
+ + {/* Status Message */} + {providerMessage && ( +
+ {providerMessage} +
+ )} +
+
)} From d92dc213b800e61b44a616792d7a463d47d9d413 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Wed, 3 Dec 2025 23:01:06 +0000 Subject: [PATCH 09/31] added clickable memories --- .../advanced/webui/src/pages/Memories.tsx | 184 +++++----- .../advanced/webui/src/pages/MemoryDetail.tsx | 331 ++++++++++++++++++ 2 files changed, 435 insertions(+), 80 deletions(-) create mode 100644 backends/advanced/webui/src/pages/MemoryDetail.tsx diff --git a/backends/advanced/webui/src/pages/Memories.tsx b/backends/advanced/webui/src/pages/Memories.tsx index 7ad3bf59..0c4973b6 100644 --- a/backends/advanced/webui/src/pages/Memories.tsx +++ b/backends/advanced/webui/src/pages/Memories.tsx @@ -1,4 +1,5 @@ import { useState, useEffect } from 'react' +import { useNavigate } from 'react-router-dom' import { Brain, Search, RefreshCw, Trash2, Calendar, Tag, X, Target } from 'lucide-react' import { memoriesApi, systemApi } from '../services/api' import { useAuth } from '../contexts/AuthContext' @@ -18,24 +19,25 @@ interface Memory { } export default function Memories() { + const navigate = useNavigate() const [memories, setMemories] = useState([]) const [loading, setLoading] = useState(false) const [error, setError] = useState(null) const [searchQuery, setSearchQuery] = useState('') const [showUnfiltered, setShowUnfiltered] = useState(false) const [totalCount, setTotalCount] = useState(null) - + // Semantic search state const [semanticResults, setSemanticResults] = useState([]) const [isSemanticFilterActive, setIsSemanticFilterActive] = useState(false) const [semanticQuery, setSemanticQuery] = useState('') const [semanticLoading, setSemanticLoading] = useState(false) const [relevanceThreshold, setRelevanceThreshold] = useState(0) // 0-100 percentage - + // System configuration state const [memoryProviderSupportsThreshold, setMemoryProviderSupportsThreshold] = useState(false) const [memoryProvider, setMemoryProvider] = useState('') - + const { user } = useAuth() const loadSystemConfig = async () => { @@ -59,24 +61,24 @@ export default function Memories() { try { setLoading(true) - const response = showUnfiltered + const response = showUnfiltered ? await memoriesApi.getUnfiltered(user.id) : await memoriesApi.getAll(user.id) - + console.log('๐Ÿง  Memories API response:', response.data) - + // Handle the API response structure const memoriesData = response.data.memories || response.data || [] const totalCount = response.data.total_count console.log('๐Ÿง  Processed memories data:', memoriesData) console.log('๐Ÿง  Total count:', totalCount) - + // Log first few memories to inspect structure if (memoriesData.length > 0) { console.log('๐Ÿง  First memory object:', memoriesData[0]) console.log('๐Ÿง  Memory fields:', Object.keys(memoriesData[0])) } - + setMemories(Array.isArray(memoriesData) ? memoriesData : []) // Store total count in state for display setTotalCount(totalCount) @@ -100,25 +102,25 @@ export default function Memories() { // Semantic search handlers const handleSemanticSearch = async () => { if (!searchQuery.trim() || !user?.id) return - + try { setSemanticLoading(true) - + // Use current threshold for server-side filtering if memory provider supports it - const thresholdToUse = memoryProviderSupportsThreshold - ? relevanceThreshold + const thresholdToUse = memoryProviderSupportsThreshold + ? relevanceThreshold : undefined - + const response = await memoriesApi.search( - searchQuery.trim(), - user.id, - 50, + searchQuery.trim(), + user.id, + 50, thresholdToUse ) - + console.log('๐Ÿ” Search response:', response.data) console.log('๐ŸŽฏ Used threshold:', thresholdToUse) - + setSemanticResults(response.data.results || []) setSemanticQuery(searchQuery.trim()) setIsSemanticFilterActive(true) @@ -156,7 +158,7 @@ export default function Memories() { // Update filtering logic with client-side threshold filtering after search const currentMemories = isSemanticFilterActive ? semanticResults : memories - + // Apply relevance threshold filter (client-side for all providers after search) const thresholdFilteredMemories = isSemanticFilterActive && relevanceThreshold > 0 ? currentMemories.filter(memory => { @@ -165,7 +167,7 @@ export default function Memories() { return relevancePercentage >= relevanceThreshold }) : currentMemories - + // Apply text search filter const filteredMemories = thresholdFilteredMemories.filter(memory => memory.memory.toLowerCase().includes(searchQuery.toLowerCase()) || @@ -175,7 +177,7 @@ export default function Memories() { const formatDate = (dateInput: string | number) => { // Handle both timestamp numbers and date strings let date: Date - + if (typeof dateInput === 'number') { // Unix timestamp - multiply by 1000 if needed date = dateInput > 1e10 ? new Date(dateInput) : new Date(dateInput * 1000) @@ -192,20 +194,20 @@ export default function Memories() { } else { date = new Date(dateInput) } - + // Check if date is valid if (isNaN(date.getTime())) { console.warn('Invalid date:', dateInput) return 'Invalid Date' } - + return date.toLocaleString() } const getCategoryColor = (category: string) => { const colors = { 'personal': 'bg-blue-100 text-blue-800', - 'work': 'bg-green-100 text-green-800', + 'work': 'bg-green-100 text-green-800', 'health': 'bg-red-100 text-red-800', 'entertainment': 'bg-purple-100 text-purple-800', 'education': 'bg-yellow-100 text-yellow-800', @@ -218,7 +220,7 @@ export default function Memories() { const renderMemoryText = (content: string) => { // Handle multi-line content (bullet points from backend normalization) const lines = content.split('\n').filter(line => line.trim()) - + if (lines.length > 1) { return (
@@ -230,7 +232,7 @@ export default function Memories() {
) } - + // Single line content return (

@@ -298,7 +300,7 @@ export default function Memories() { onChange={(e) => setSearchQuery(e.target.value)} placeholder="Search memories..." className="w-full pl-10 pr-32 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-blue-500" - onKeyPress={(e) => e.key === 'Enter' && handleSemanticSearch()} + onKeyDown={(e) => e.key === 'Enter' && handleSemanticSearch()} /> - - {/* Memory Content */} -

- {renderMemoryContent(memory)} -
- - {/* Metadata */} - {memory.metadata && ( -
-
- - View metadata - -
-                        {JSON.stringify(memory.metadata, null, 2)}
-                      
-
-
- )} ))} @@ -550,7 +574,7 @@ export default function Memories() { `No semantic matches found for "${semanticQuery}"` ) ) : ( - searchQuery + searchQuery ? `No memories found matching "${searchQuery}"` : `No memories found` )} @@ -567,4 +591,4 @@ export default function Memories() { )} ) -} \ No newline at end of file +} diff --git a/backends/advanced/webui/src/pages/MemoryDetail.tsx b/backends/advanced/webui/src/pages/MemoryDetail.tsx new file mode 100644 index 00000000..73750958 --- /dev/null +++ b/backends/advanced/webui/src/pages/MemoryDetail.tsx @@ -0,0 +1,331 @@ +import { useState, useEffect } from 'react' +import { useParams, useNavigate } from 'react-router-dom' +import { ArrowLeft, Calendar, Tag, Trash2, RefreshCw } from 'lucide-react' +import { memoriesApi } from '../services/api' +import { useAuth } from '../contexts/AuthContext' + +interface Memory { + id: string + memory: string + category?: string + created_at: string + updated_at: string + user_id: string + score?: number + metadata?: { + name?: string + timeRanges?: Array<{ + start: string + end: string + name?: string + }> + isPerson?: boolean + isEvent?: boolean + isPlace?: boolean + extractedWith?: { + model: string + timestamp: string + } + [key: string]: any + } + hash?: string + role?: string +} + +export default function MemoryDetail() { + const { id } = useParams<{ id: string }>() + const navigate = useNavigate() + const { user } = useAuth() + const [memory, setMemory] = useState(null) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + + const loadMemory = async () => { + if (!user?.id || !id) { + console.log('โญ๏ธ MemoryDetail: Missing user or id', { userId: user?.id, memoryId: id }) + return + } + + try { + console.log('๐Ÿ” MemoryDetail: Loading memory', id) + setLoading(true) + setError(null) + const response = await memoriesApi.getAll(user.id) + const memoriesData = response.data.memories || response.data || [] + console.log('๐Ÿ“ฆ MemoryDetail: Loaded memories', memoriesData.length) + + // Find the specific memory by ID + const foundMemory = memoriesData.find((m: Memory) => m.id === id) + console.log('๐ŸŽฏ MemoryDetail: Found memory?', !!foundMemory, foundMemory?.id) + + if (foundMemory) { + setMemory(foundMemory) + } else { + setError('Memory not found') + } + } catch (err: any) { + console.error('โŒ Failed to load memory:', err) + setError(err.message || 'Failed to load memory') + } finally { + setLoading(false) + } + } + + const handleDelete = async () => { + if (!memory || !id) return + + const confirmed = window.confirm('Are you sure you want to delete this memory?') + if (!confirmed) return + + try { + await memoriesApi.delete(id) + navigate('/memories') + } catch (err: any) { + console.error('โŒ Failed to delete memory:', err) + alert('Failed to delete memory: ' + (err.message || 'Unknown error')) + } + } + + useEffect(() => { + loadMemory() + }, [id, user?.id]) + + const formatDate = (dateString: string) => { + try { + return new Date(dateString).toLocaleString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit' + }) + } catch { + return dateString + } + } + + const getMemoryTypeIcon = () => { + if (memory?.metadata?.isEvent) return '๐Ÿ“…' + if (memory?.metadata?.isPerson) return '๐Ÿ‘ค' + if (memory?.metadata?.isPlace) return '๐Ÿ“' + return '๐Ÿง ' + } + + const getMemoryTypeLabel = () => { + if (memory?.metadata?.isEvent) return 'Event' + if (memory?.metadata?.isPerson) return 'Person' + if (memory?.metadata?.isPlace) return 'Place' + return 'Memory' + } + + if (loading) { + return ( +
+
+ +
+
+ + Loading memory... +
+
+ ) + } + + if (error || !memory) { + return ( +
+
+ +
+
+

+ {error || 'Memory not found'} +

+
+
+ ) + } + + return ( +
+ {/* Header */} +
+ + +
+ + {/* Main Content */} +
+ {/* Left Column - Memory Content */} +
+ {/* Memory Card */} +
+
+
{getMemoryTypeIcon()}
+
+
+ + {getMemoryTypeLabel()} + + {memory.category && ( + + + {memory.category} + + )} +
+ {memory.metadata?.name && ( +

+ {memory.metadata.name} +

+ )} +

+ {memory.memory} +

+
+
+
+ + {/* Time Ranges */} + {memory.metadata?.timeRanges && memory.metadata.timeRanges.length > 0 && ( +
+

+ + Time Ranges +

+
+ {memory.metadata.timeRanges.map((range, index) => ( +
+ +
+ {range.name && ( +
+ {range.name} +
+ )} +
+
Start: {formatDate(range.start)}
+
End: {formatDate(range.end)}
+
+
+
+ ))} +
+
+ )} +
+ + {/* Right Column - Metadata */} +
+ {/* Metadata Card */} +
+

+ Metadata +

+
+
+
Created:
+
+ {formatDate(memory.created_at)} +
+
+
+
Updated:
+
+ {formatDate(memory.updated_at)} +
+
+ {memory.score !== undefined && memory.score !== null && ( +
+
Score:
+
+ {memory.score.toFixed(3)} +
+
+ )} + {memory.hash && ( +
+
Hash:
+
+ {memory.hash.substring(0, 12)}... +
+
+ )} +
+
+ + {/* Extraction Metadata */} + {memory.metadata?.extractedWith && ( +
+

+ Extraction +

+
+
+
Model:
+
+ {memory.metadata.extractedWith.model} +
+
+
+
Time:
+
+ {formatDate(memory.metadata.extractedWith.timestamp)} +
+
+
+
+ )} + + {/* Additional Metadata */} + {memory.metadata && Object.keys(memory.metadata).filter(key => + !['name', 'timeRanges', 'isPerson', 'isEvent', 'isPlace', 'extractedWith'].includes(key) + ).length > 0 && ( +
+

+ Additional Data +

+
+ {Object.entries(memory.metadata) + .filter(([key]) => !['name', 'timeRanges', 'isPerson', 'isEvent', 'isPlace', 'extractedWith'].includes(key)) + .map(([key, value]) => ( +
+
{key}:
+
+ {typeof value === 'object' ? JSON.stringify(value) : String(value)} +
+
+ ))} +
+
+ )} +
+
+
+ ) +} From 45add2edfb98a9a724c99ce03c1e6dec98a5e10b Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Wed, 3 Dec 2025 23:02:47 +0000 Subject: [PATCH 10/31] added timeline views --- .../advanced/webui/public/frappe-gantt.css | 1 + backends/advanced/webui/src/App.tsx | 18 +- .../webui/src/components/layout/Layout.tsx | 3 +- .../webui/src/hooks/useAudioRecording.ts | 4 +- .../advanced/webui/src/hooks/useD3Zoom.ts | 82 ++ .../src/hooks/useSimpleAudioRecording.ts | 4 +- .../webui/src/pages/ConversationsRouter.tsx | 48 ++ .../webui/src/pages/ConversationsTimeline.tsx | 321 ++++++++ .../webui/src/pages/FrappeGanttTimeline.tsx | 707 ++++++++++++++++++ .../webui/src/pages/MyceliaTimeline.tsx | 441 +++++++++++ .../webui/src/pages/ReactGanttTimeline.tsx | 359 +++++++++ .../webui/src/pages/TimelineRouter.tsx | 86 +++ backends/advanced/webui/src/services/api.ts | 8 +- .../webui/src/types/react-gantt-timeline.d.ts | 45 ++ 14 files changed, 2117 insertions(+), 10 deletions(-) create mode 100644 backends/advanced/webui/public/frappe-gantt.css create mode 100644 backends/advanced/webui/src/hooks/useD3Zoom.ts create mode 100644 backends/advanced/webui/src/pages/ConversationsRouter.tsx create mode 100644 backends/advanced/webui/src/pages/ConversationsTimeline.tsx create mode 100644 backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx create mode 100644 backends/advanced/webui/src/pages/MyceliaTimeline.tsx create mode 100644 backends/advanced/webui/src/pages/ReactGanttTimeline.tsx create mode 100644 backends/advanced/webui/src/pages/TimelineRouter.tsx create mode 100644 backends/advanced/webui/src/types/react-gantt-timeline.d.ts diff --git a/backends/advanced/webui/public/frappe-gantt.css b/backends/advanced/webui/public/frappe-gantt.css new file mode 100644 index 00000000..73d5781b --- /dev/null +++ b/backends/advanced/webui/public/frappe-gantt.css @@ -0,0 +1 @@ +:root{--g-arrow-color: #1f2937;--g-bar-color: #fff;--g-bar-border: #fff;--g-tick-color-thick: #ededed;--g-tick-color: #f3f3f3;--g-actions-background: #f3f3f3;--g-border-color: #ebeff2;--g-text-muted: #7c7c7c;--g-text-light: #fff;--g-text-dark: #171717;--g-progress-color: #dbdbdb;--g-handle-color: #37352f;--g-weekend-label-color: #dcdce4;--g-expected-progress: #c4c4e9;--g-header-background: #fff;--g-row-color: #fdfdfd;--g-row-border-color: #c7c7c7;--g-today-highlight: #37352f;--g-popup-actions: #ebeff2;--g-weekend-highlight-color: #f7f7f7}.gantt-container{line-height:14.5px;position:relative;overflow:auto;font-size:12px;height:var(--gv-grid-height);width:100%;border-radius:8px}.gantt-container .popup-wrapper{position:absolute;top:0;left:0;background:#fff;box-shadow:0 10px 24px -3px #0003;padding:10px;border-radius:5px;width:max-content;z-index:1000}.gantt-container .popup-wrapper .title{margin-bottom:2px;color:var(--g-text-dark);font-size:.85rem;font-weight:650;line-height:15px}.gantt-container .popup-wrapper .subtitle{color:var(--g-text-dark);font-size:.8rem;margin-bottom:5px}.gantt-container .popup-wrapper .details{color:var(--g-text-muted);font-size:.7rem}.gantt-container .popup-wrapper .actions{margin-top:10px;margin-left:3px}.gantt-container .popup-wrapper .action-btn{border:none;padding:5px 8px;background-color:var(--g-popup-actions);border-right:1px solid var(--g-text-light)}.gantt-container .popup-wrapper .action-btn:hover{background-color:brightness(97%)}.gantt-container .popup-wrapper .action-btn:first-child{border-top-left-radius:4px;border-bottom-left-radius:4px}.gantt-container .popup-wrapper .action-btn:last-child{border-right:none;border-top-right-radius:4px;border-bottom-right-radius:4px}.gantt-container .grid-header{height:calc(var(--gv-lower-header-height) + var(--gv-upper-header-height) + 10px);background-color:var(--g-header-background);position:sticky;top:0;left:0;border-bottom:1px solid var(--g-row-border-color);z-index:1000}.gantt-container .lower-text,.gantt-container .upper-text{text-anchor:middle}.gantt-container .upper-header{height:var(--gv-upper-header-height)}.gantt-container .lower-header{height:var(--gv-lower-header-height)}.gantt-container .lower-text{font-size:12px;position:absolute;width:calc(var(--gv-column-width) * .8);height:calc(var(--gv-lower-header-height) * .8);margin:0 calc(var(--gv-column-width) * .1);align-content:center;text-align:center;color:var(--g-text-muted)}.gantt-container .upper-text{position:absolute;width:fit-content;font-weight:500;font-size:14px;color:var(--g-text-dark);height:calc(var(--gv-lower-header-height) * .66)}.gantt-container .current-upper{position:sticky;left:0!important;padding-left:17px;background:#fff}.gantt-container .side-header{position:sticky;top:0;right:0;float:right;z-index:1000;line-height:20px;font-weight:400;width:max-content;margin-left:auto;padding-right:10px;padding-top:10px;background:var(--g-header-background);display:flex}.gantt-container .side-header *{transition-property:background-color;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s;background-color:var(--g-actions-background);border-radius:.5rem;border:none;padding:5px 8px;color:var(--g-text-dark);font-size:14px;letter-spacing:.02em;font-weight:420;box-sizing:content-box;margin-right:5px}.gantt-container .side-header *:last-child{margin-right:0}.gantt-container .side-header *:hover{filter:brightness(97.5%)}.gantt-container .side-header select{width:60px;padding-top:2px;padding-bottom:2px}.gantt-container .side-header select:focus{outline:none}.gantt-container .date-range-highlight{background-color:var(--g-progress-color);border-radius:12px;height:calc(var(--gv-lower-header-height) - 6px);top:calc(var(--gv-upper-header-height) + 5px);position:absolute}.gantt-container .current-highlight{position:absolute;background:var(--g-today-highlight);width:1px;z-index:999}.gantt-container .current-ball-highlight{position:absolute;background:var(--g-today-highlight);z-index:1001;border-radius:50%}.gantt-container .current-date-highlight{background:var(--g-today-highlight);color:var(--g-text-light);border-radius:5px}.gantt-container .holiday-label{position:absolute;top:0;left:0;opacity:0;z-index:1000;background:--g-weekend-label-color;border-radius:5px;padding:2px 5px}.gantt-container .holiday-label.show{opacity:100}.gantt-container .extras{position:sticky;left:0}.gantt-container .extras .adjust{position:absolute;left:8px;top:calc(var(--gv-grid-height) - 60px);background-color:#000000b3;color:#fff;border:none;padding:8px;border-radius:3px}.gantt-container .hide{display:none}.gantt{user-select:none;-webkit-user-select:none;position:absolute}.gantt .grid-background{fill:none}.gantt .grid-row{fill:var(--g-row-color)}.gantt .row-line{stroke:var(--g-border-color)}.gantt .tick{stroke:var(--g-tick-color);stroke-width:.4}.gantt .tick.thick{stroke:var(--g-tick-color-thick);stroke-width:.7}.gantt .arrow{fill:none;stroke:var(--g-arrow-color);stroke-width:1.5}.gantt .bar-wrapper .bar{fill:var(--g-bar-color);stroke:var(--g-bar-border);stroke-width:0;transition:stroke-width .3s ease}.gantt .bar-progress{fill:var(--g-progress-color);border-radius:4px}.gantt .bar-expected-progress{fill:var(--g-expected-progress)}.gantt .bar-invalid{fill:transparent;stroke:var(--g-bar-border);stroke-width:1;stroke-dasharray:5}:is(.gantt .bar-invalid)~.bar-label{fill:var(--g-text-light)}.gantt .bar-label{fill:var(--g-text-dark);dominant-baseline:central;font-family:Helvetica;font-size:13px;font-weight:400}.gantt .bar-label.big{fill:var(--g-text-dark);text-anchor:start}.gantt .handle{fill:var(--g-handle-color);opacity:0;transition:opacity .3s ease}.gantt .handle.active,.gantt .handle.visible{cursor:ew-resize;opacity:1}.gantt .handle.progress{fill:var(--g-text-muted)}.gantt .bar-wrapper{cursor:pointer}.gantt .bar-wrapper .bar{outline:1px solid var(--g-row-border-color);border-radius:3px}.gantt .bar-wrapper:hover .bar{transition:transform .3s ease}.gantt .bar-wrapper:hover .date-range-highlight{display:block} diff --git a/backends/advanced/webui/src/App.tsx b/backends/advanced/webui/src/App.tsx index 6e497dff..6f7f3e72 100644 --- a/backends/advanced/webui/src/App.tsx +++ b/backends/advanced/webui/src/App.tsx @@ -4,8 +4,10 @@ import { ThemeProvider } from './contexts/ThemeContext' import Layout from './components/layout/Layout' import LoginPage from './pages/LoginPage' import Chat from './pages/Chat' -import Conversations from './pages/Conversations' +import ConversationsRouter from './pages/ConversationsRouter' import MemoriesRouter from './pages/MemoriesRouter' +import MemoryDetail from './pages/MemoryDetail' +import TimelineRouter from './pages/TimelineRouter' import Users from './pages/Users' import System from './pages/System' import Upload from './pages/Upload' @@ -31,7 +33,7 @@ function App() { }> - + } /> - + + + } /> + + } /> } /> + + + + } /> diff --git a/backends/advanced/webui/src/components/layout/Layout.tsx b/backends/advanced/webui/src/components/layout/Layout.tsx index 0243d00f..f4caf629 100644 --- a/backends/advanced/webui/src/components/layout/Layout.tsx +++ b/backends/advanced/webui/src/components/layout/Layout.tsx @@ -1,5 +1,5 @@ import { Link, useLocation, Outlet } from 'react-router-dom' -import { Music, MessageSquare, MessageCircle, Brain, Users, Upload, Settings, LogOut, Sun, Moon, Shield, Radio, Layers } from 'lucide-react' +import { Music, MessageSquare, MessageCircle, Brain, Users, Upload, Settings, LogOut, Sun, Moon, Shield, Radio, Layers, Calendar } from 'lucide-react' import { useAuth } from '../../contexts/AuthContext' import { useTheme } from '../../contexts/ThemeContext' @@ -13,6 +13,7 @@ export default function Layout() { { path: '/chat', label: 'Chat', icon: MessageCircle }, { path: '/conversations', label: 'Conversations', icon: MessageSquare }, { path: '/memories', label: 'Memories', icon: Brain }, + { path: '/timeline', label: 'Timeline', icon: Calendar }, { path: '/users', label: 'User Management', icon: Users }, ...(isAdmin ? [ { path: '/upload', label: 'Upload Audio', icon: Upload }, diff --git a/backends/advanced/webui/src/hooks/useAudioRecording.ts b/backends/advanced/webui/src/hooks/useAudioRecording.ts index 5fc2091b..3e303cbc 100644 --- a/backends/advanced/webui/src/hooks/useAudioRecording.ts +++ b/backends/advanced/webui/src/hooks/useAudioRecording.ts @@ -100,8 +100,8 @@ export const useAudioRecording = (): UseAudioRecordingReturn => { const audioContextRef = useRef(null) const analyserRef = useRef(null) const processorRef = useRef(null) - const durationIntervalRef = useRef() - const keepAliveIntervalRef = useRef() + const durationIntervalRef = useRef>() + const keepAliveIntervalRef = useRef>() const audioProcessingStartedRef = useRef(false) const chunkCountRef = useRef(0) // Note: Legacy message queue code removed as it was unused diff --git a/backends/advanced/webui/src/hooks/useD3Zoom.ts b/backends/advanced/webui/src/hooks/useD3Zoom.ts new file mode 100644 index 00000000..8f60b204 --- /dev/null +++ b/backends/advanced/webui/src/hooks/useD3Zoom.ts @@ -0,0 +1,82 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' +import * as d3 from 'd3' + +interface UseD3ZoomOptions { + onZoom?: (transform: d3.ZoomTransform) => void + scaleExtent?: [number, number] + wheelDelta?: (event: WheelEvent) => number +} + +export function useD3Zoom(options: UseD3ZoomOptions = {}) { + const { + onZoom, + scaleExtent = [0.5, 5], + wheelDelta = (event) => -event.deltaY * 0.002 + } = options + + const svgRef = useRef(null) + const [transform, setTransform] = useState(d3.zoomIdentity) + + const handleZoom = useCallback( + (event: d3.D3ZoomEvent) => { + const t = event.transform + setTransform(t) + onZoom?.(t) + + // Synchronize zoom across all zoomable SVG elements + d3.selectAll('.zoomable').each(function () { + const svg = d3.select(this) + const node = svg.node() + + // Skip the source element + if (!node || node.contains(event.sourceEvent?.target as Element)) { + return + } + + svg.property('__zoom', t) + }) + }, + [onZoom] + ) + + const zoomBehavior = useMemo( + () => + d3.zoom() + .scaleExtent(scaleExtent) + .on('zoom', handleZoom) + .wheelDelta(wheelDelta) + .touchable(() => true) + .filter((event) => { + if (event.type === 'dblclick') return false + if (event.button && event.button !== 0) return false + return true + }), + [handleZoom, scaleExtent, wheelDelta] + ) + + useEffect(() => { + if (!svgRef.current) return + + const svg = d3.select(svgRef.current) + const node = svg.node() + + if (node) { + node.style.touchAction = 'none' + node.style.webkitUserSelect = 'none' + node.style.userSelect = 'none' + } + + svg.call(zoomBehavior as any) + svg.property('__zoom', transform) + + return () => { + svg.on('.zoom', null) + } + }, [zoomBehavior, transform]) + + return { + svgRef, + transform, + zoomBehavior + } +} diff --git a/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts b/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts index 268544c7..e0a1badc 100644 --- a/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts +++ b/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts @@ -58,8 +58,8 @@ export const useSimpleAudioRecording = (): SimpleAudioRecordingReturn => { const audioContextRef = useRef(null) const analyserRef = useRef(null) const processorRef = useRef(null) - const durationIntervalRef = useRef() - const keepAliveIntervalRef = useRef() + const durationIntervalRef = useRef>() + const keepAliveIntervalRef = useRef>() const chunkCountRef = useRef(0) const audioProcessingStartedRef = useRef(false) diff --git a/backends/advanced/webui/src/pages/ConversationsRouter.tsx b/backends/advanced/webui/src/pages/ConversationsRouter.tsx new file mode 100644 index 00000000..c7e6e95c --- /dev/null +++ b/backends/advanced/webui/src/pages/ConversationsRouter.tsx @@ -0,0 +1,48 @@ +import { useState } from 'react' +import Conversations from './Conversations' +import ConversationsTimeline from './ConversationsTimeline' + +export default function ConversationsRouter() { + const [activeTab, setActiveTab] = useState<'classic' | 'timeline'>('classic') + + return ( +
+ {/* Tab Navigation */} +
+ +
+ + {/* Content */} + {activeTab === 'classic' ? ( + + ) : ( + + )} +
+ ) +} diff --git a/backends/advanced/webui/src/pages/ConversationsTimeline.tsx b/backends/advanced/webui/src/pages/ConversationsTimeline.tsx new file mode 100644 index 00000000..5c3f748f --- /dev/null +++ b/backends/advanced/webui/src/pages/ConversationsTimeline.tsx @@ -0,0 +1,321 @@ +import { useState, useEffect } from 'react' +import { MessageSquare, RefreshCw, User, Clock, ChevronDown, ChevronUp } from 'lucide-react' +import { VerticalTimeline, VerticalTimelineElement } from 'react-vertical-timeline-component' +import 'react-vertical-timeline-component/style.min.css' +import { conversationsApi } from '../services/api' + +interface Conversation { + conversation_id?: string + audio_uuid: string + title?: string + summary?: string + detailed_summary?: string + created_at?: string + client_id: string + segment_count?: number + memory_count?: number + audio_path?: string + cropped_audio_path?: string + duration_seconds?: number + has_memory?: boolean + transcript?: string + segments?: Array<{ + text: string + speaker: string + start: number + end: number + confidence?: number + }> + active_transcript_version?: string + active_memory_version?: string + transcript_version_count?: number + memory_version_count?: number + deleted?: boolean + deletion_reason?: string + deleted_at?: string +} + +interface ConversationCardProps { + conversation: Conversation + formatDuration: (seconds: number) => string +} + +function ConversationCard({ conversation, formatDuration }: ConversationCardProps) { + const [isExpanded, setIsExpanded] = useState(false) + + return ( +
+ {/* Card Header - Always visible */} +
setIsExpanded(!isExpanded)} + > +
+

+ {conversation.title || 'Conversation'} +

+ {isExpanded ? ( + + ) : ( + + )} +
+ + {conversation.summary && ( +

+ {conversation.summary} +

+ )} + +
+ + + {conversation.client_id} + + {conversation.segment_count !== undefined && ( + + {conversation.segment_count} segments + + )} + {conversation.memory_count !== undefined && conversation.memory_count > 0 && ( + + {conversation.memory_count} memories + + )} + {conversation.duration_seconds && ( + + + {formatDuration(conversation.duration_seconds)} + + )} + {conversation.deleted && ( + + Failed: {conversation.deletion_reason || 'Unknown'} + + )} +
+
+ + {/* Expanded Details */} + {isExpanded && ( +
+ {/* Detailed Summary */} + {conversation.detailed_summary && ( +
+

Detailed Summary

+

{conversation.detailed_summary}

+
+ )} + + {/* Transcript */} + {conversation.transcript && ( +
+

Transcript

+
+ {conversation.transcript} +
+
+ )} + + {/* Segments */} + {conversation.segments && conversation.segments.length > 0 && ( +
+

Segments ({conversation.segments.length})

+
+ {conversation.segments.map((segment, idx) => ( +
+
+ {segment.speaker} + + {Math.floor(segment.start)}s - {Math.floor(segment.end)}s + +
+

{segment.text}

+ {segment.confidence && ( + + Confidence: {(segment.confidence * 100).toFixed(1)}% + + )} +
+ ))} +
+
+ )} + + {/* Metadata */} +
+ {conversation.conversation_id && ( +
+ ID:{' '} + {conversation.conversation_id.slice(0, 8)}... +
+ )} + {conversation.audio_uuid && ( +
+ Audio UUID:{' '} + {conversation.audio_uuid.slice(0, 8)}... +
+ )} + {conversation.active_transcript_version && ( +
+ Transcript Version:{' '} + {conversation.active_transcript_version} +
+ )} + {conversation.transcript_version_count && ( +
+ Total Versions:{' '} + {conversation.transcript_version_count} +
+ )} +
+ + {/* Audio Paths */} + {(conversation.audio_path || conversation.cropped_audio_path) && ( +
+ {conversation.audio_path && ( +
+ Audio:{' '} + {conversation.audio_path} +
+ )} + {conversation.cropped_audio_path && ( +
+ Cropped:{' '} + {conversation.cropped_audio_path} +
+ )} +
+ )} +
+ )} +
+ ) +} + +export default function ConversationsTimeline() { + const [conversations, setConversations] = useState([]) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + + const loadConversations = async () => { + try { + setLoading(true) + const response = await conversationsApi.getAll() + const conversationsList = response.data.conversations || [] + setConversations(conversationsList) + setError(null) + } catch (err: any) { + setError(err.message || 'Failed to load conversations') + } finally { + setLoading(false) + } + } + + useEffect(() => { + loadConversations() + }, []) + + const formatDate = (timestamp: number | string): Date => { + if (typeof timestamp === 'string') { + const isoString = timestamp.endsWith('Z') || timestamp.includes('+') || timestamp.includes('T') && timestamp.split('T')[1].includes('-') + ? timestamp + : timestamp + 'Z' + return new Date(isoString) + } + if (timestamp === 0) { + return new Date() + } + return new Date(timestamp * 1000) + } + + const formatDuration = (seconds: number) => { + const minutes = Math.floor(seconds / 60) + const secs = Math.floor(seconds % 60) + return `${minutes}:${secs.toString().padStart(2, '0')}` + } + + if (loading) { + return ( +
+
+ Loading conversations... +
+ ) + } + + if (error) { + return ( +
+
{error}
+ +
+ ) + } + + return ( +
+ {/* Header */} +
+
+ +

+ Conversations Timeline +

+
+ +
+ + {/* Timeline */} + {conversations.length === 0 ? ( +
+ +

No conversations found

+
+ ) : ( + + {conversations.map((conv) => { + const date = formatDate(conv.created_at || '') + + return ( + } + contentStyle={{ + background: conv.deleted ? '#fee2e2' : '#fff', + color: '#1f2937', + boxShadow: '0 3px 0 #ddd' + }} + contentArrowStyle={{ borderRight: '7px solid #fff' }} + > + + + ) + })} + + )} +
+ ) +} diff --git a/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx b/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx new file mode 100644 index 00000000..d8da0aed --- /dev/null +++ b/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx @@ -0,0 +1,707 @@ +import { useState, useEffect, useRef } from 'react' +import { Calendar, RefreshCw, AlertCircle, ZoomIn, ZoomOut } from 'lucide-react' +import Gantt from 'frappe-gantt' +import { memoriesApi } from '../services/api' +import { useAuth } from '../contexts/AuthContext' + +interface TimeRange { + start: string + end: string + name?: string +} + +interface MemoryWithTimeRange { + id: string + content: string + created_at: string + metadata?: { + name?: string + timeRanges?: TimeRange[] + isPerson?: boolean + isEvent?: boolean + isPlace?: boolean + } +} + +interface GanttTask { + id: string + name: string + start: string + end: string + progress: number + custom_class?: string +} + +export default function FrappeGanttTimeline() { + const [memories, setMemories] = useState([]) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + const [useDemoData, setUseDemoData] = useState(false) + const [currentViewMode, setCurrentViewMode] = useState('Week') + const [zoomScale, setZoomScale] = useState(1) // CSS transform scale: 0.5 = 50%, 1 = 100%, 2 = 200% + const ganttContainerRef = useRef(null) + const ganttInstance = useRef(null) + const scrollContainerRef = useRef(null) + const isDragging = useRef(false) + const startX = useRef(0) + const scrollLeft = useRef(0) + const { user } = useAuth() + + // Demo data for testing the Timeline visualization - spans multiple years + const getDemoMemories = (): MemoryWithTimeRange[] => { + return [ + { + id: 'demo-graduation', + content: 'College graduation ceremony and celebration dinner with family.', + created_at: '2024-05-20T14:00:00', + metadata: { + name: 'College Graduation', + isEvent: true, + timeRanges: [ + { + name: 'Graduation Ceremony', + start: '2024-05-20T14:00:00', + end: '2024-05-20T17:00:00' + }, + { + name: 'Celebration Dinner', + start: '2024-05-20T19:00:00', + end: '2024-05-20T22:00:00' + } + ] + } + }, + { + id: 'demo-wedding', + content: "Sarah and Tom's wedding was a beautiful celebration. The ceremony started at 3 PM, followed by a reception that lasted until midnight.", + created_at: '2025-06-15T15:00:00', + metadata: { + name: "Sarah & Tom's Wedding", + isEvent: true, + timeRanges: [ + { + name: 'Wedding Ceremony', + start: '2025-06-15T15:00:00', + end: '2025-06-15T16:30:00' + }, + { + name: 'Reception', + start: '2025-06-15T18:00:00', + end: '2025-06-16T00:00:00' + } + ] + } + }, + { + id: 'demo-conference', + content: 'Tech conference with keynote presentations and networking sessions throughout the day.', + created_at: '2025-09-20T09:00:00', + metadata: { + name: 'Tech Conference 2025', + isEvent: true, + timeRanges: [ + { + name: 'Morning Keynote', + start: '2025-09-20T09:00:00', + end: '2025-09-20T11:00:00' + }, + { + name: 'Workshops', + start: '2025-09-20T13:00:00', + end: '2025-09-20T17:00:00' + } + ] + } + }, + { + id: 'demo-vacation', + content: 'Week-long vacation at the beach house with family.', + created_at: '2026-07-01T14:00:00', + metadata: { + name: 'Summer Vacation 2026', + isPlace: true, + timeRanges: [ + { + name: 'Beach House Stay', + start: '2026-07-01T14:00:00', + end: '2026-07-07T12:00:00' + } + ] + } + }, + { + id: 'demo-reunion', + content: 'Family reunion at the old homestead with extended family gathering.', + created_at: '2026-12-25T12:00:00', + metadata: { + name: 'Family Reunion', + isEvent: true, + timeRanges: [ + { + name: 'Christmas Gathering', + start: '2026-12-25T12:00:00', + end: '2026-12-25T20:00:00' + } + ] + } + } + ] + } + + const loadMemories = async () => { + if (!user?.id) return + + try { + setLoading(true) + setError(null) + const response = await memoriesApi.getAll(user.id) + + // Extract memories from response + const memoriesData = response.data.memories || response.data || [] + + // Filter memories that have timeRanges + const memoriesWithTime = memoriesData.filter((m: MemoryWithTimeRange) => + m.metadata?.timeRanges && m.metadata.timeRanges.length > 0 + ) + + console.log('๐Ÿ“… Timeline: Total memories:', memoriesData.length) + console.log('๐Ÿ“… Timeline: Memories with timeRanges:', memoriesWithTime.length) + if (memoriesWithTime.length > 0) { + console.log('๐Ÿ“… Timeline: First memory with timeRange:', memoriesWithTime[0]) + } + + setMemories(memoriesWithTime) + } catch (err: any) { + console.error('โŒ Timeline loading error:', err) + setError(err.message || 'Failed to load timeline data') + } finally { + setLoading(false) + } + } + + const convertMemoriesToGanttTasks = (memories: MemoryWithTimeRange[]): GanttTask[] => { + const tasks: GanttTask[] = [] + + memories.forEach((memory) => { + const timeRanges = memory.metadata?.timeRanges || [] + + timeRanges.forEach((range, index) => { + // Get the task name from the range name, memory metadata name, or content preview + const taskName = range.name || + memory.metadata?.name || + memory.content.substring(0, 50) + (memory.content.length > 50 ? '...' : '') + + // Determine custom class based on memory type + let customClass = 'default' + if (memory.metadata?.isEvent) customClass = 'event' + else if (memory.metadata?.isPerson) customClass = 'person' + else if (memory.metadata?.isPlace) customClass = 'place' + + tasks.push({ + id: `${memory.id}-${index}`, + name: taskName, + start: range.start, + end: range.end, + progress: 100, // All memories are completed events + custom_class: customClass + }) + }) + }) + + return tasks + } + + useEffect(() => { + if (!useDemoData) { + loadMemories() + } else { + setMemories(getDemoMemories()) + } + }, [user?.id, useDemoData]) + + useEffect(() => { + const displayMemories = useDemoData ? getDemoMemories() : memories + + if (!ganttContainerRef.current || displayMemories.length === 0) { + return + } + + // Convert memories to Gantt tasks + const tasks = convertMemoriesToGanttTasks(displayMemories) + + if (tasks.length === 0) { + return + } + + console.log('๐Ÿ“Š Creating Gantt chart with tasks:', tasks) + + try { + // Clear existing Gantt instance + if (ganttInstance.current) { + ganttContainerRef.current.innerHTML = '' + } + + // Create new Gantt instance with type assertion for custom_popup_html + ganttInstance.current = new Gantt(ganttContainerRef.current, tasks, { + view_mode: currentViewMode, + bar_height: 30, + bar_corner_radius: 3, + arrow_curve: 5, + padding: 18, + date_format: 'YYYY-MM-DD', + language: 'en', + custom_popup_html: (task: any) => { + const memory = displayMemories.find(m => task.id.startsWith(m.id)) + const startDate = new Date(task._start) + const endDate = new Date(task._end) + const formatOptions: Intl.DateTimeFormatOptions = { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit' + } + return ` + + ` + } + } as any) + + console.log('โœ… Gantt chart created successfully') + + // Add year labels to the timeline header + setTimeout(() => { + try { + const container = ganttContainerRef.current?.querySelector('.gantt-container') + if (!container) return + + // Find all unique years from tasks + const years = new Set() + tasks.forEach(task => { + const startYear = new Date(task.start).getFullYear() + const endYear = new Date(task.end).getFullYear() + years.add(startYear) + if (startYear !== endYear) years.add(endYear) + }) + + const sortedYears = Array.from(years).sort() + if (sortedYears.length <= 1) return // No need for year labels if single year + + // Get the upper header div element (HTML, not SVG) + const upperHeader = container.querySelector('.upper-header') + if (!upperHeader) return + + // Add year labels as HTML divs in a simple row at the top + sortedYears.forEach((year, index) => { + const yearLabel = document.createElement('div') + yearLabel.className = 'year-label' + yearLabel.textContent = String(year) + yearLabel.style.position = 'absolute' + yearLabel.style.left = `${20 + (index * 70)}px` // Simple horizontal spacing + yearLabel.style.top = '2px' + yearLabel.style.fontSize = '18px' + yearLabel.style.fontWeight = '700' + yearLabel.style.color = '#2563eb' // Blue color + yearLabel.style.padding = '2px 8px' + yearLabel.style.backgroundColor = '#eff6ff' + yearLabel.style.borderRadius = '4px' + yearLabel.style.zIndex = '10' + + upperHeader.appendChild(yearLabel) + }) + + } catch (error) { + console.warn('Failed to add year labels:', error) + } + }, 150) // Small delay to ensure DOM is fully rendered + } catch (err) { + console.error('โŒ Error creating Gantt chart:', err) + setError('Failed to create timeline visualization') + } + + return () => { + if (ganttInstance.current && ganttContainerRef.current) { + ganttContainerRef.current.innerHTML = '' + ganttInstance.current = null + } + } + }, [memories, useDemoData, currentViewMode]) + + // Drag-to-scroll functionality + useEffect(() => { + const container = scrollContainerRef.current + if (!container) return + + const handleMouseDown = (e: MouseEvent) => { + // Only start drag if not clicking on interactive elements + const target = e.target as HTMLElement + if (target.closest('.bar-wrapper') || target.closest('button')) { + return + } + + isDragging.current = true + startX.current = e.pageX + scrollLeft.current = container.scrollLeft + container.style.cursor = 'grabbing' + e.preventDefault() + } + + const handleMouseLeave = () => { + isDragging.current = false + container.style.cursor = 'grab' + } + + const handleMouseUp = () => { + isDragging.current = false + container.style.cursor = 'grab' + } + + const handleMouseMove = (e: MouseEvent) => { + if (!isDragging.current) return + e.preventDefault() + const x = e.pageX + const walk = (x - startX.current) * 1.5 // Scroll speed multiplier + container.scrollLeft = scrollLeft.current - walk + } + + // Add event listeners with capture phase for better control + container.addEventListener('mousedown', handleMouseDown, true) + container.addEventListener('mouseleave', handleMouseLeave) + container.addEventListener('mouseup', handleMouseUp) + container.addEventListener('mousemove', handleMouseMove) + + return () => { + container.removeEventListener('mousedown', handleMouseDown, true) + container.removeEventListener('mouseleave', handleMouseLeave) + container.removeEventListener('mouseup', handleMouseUp) + container.removeEventListener('mousemove', handleMouseMove) + } + }, []) + + // Mousewheel zoom functionality + useEffect(() => { + const container = scrollContainerRef.current + if (!container) return + + const viewModeOrder = ['Quarter Day', 'Half Day', 'Day', 'Week', 'Month'] + + const handleWheel = (e: WheelEvent) => { + // Only zoom when Ctrl or Cmd is pressed + if (e.ctrlKey || e.metaKey) { + e.preventDefault() + e.stopPropagation() + + const currentIndex = viewModeOrder.indexOf(currentViewMode) + + if (e.deltaY < 0) { + // Zoom in (scroll up = more detailed view) + if (currentIndex > 0) { + setCurrentViewMode(viewModeOrder[currentIndex - 1]) + } + } else if (e.deltaY > 0) { + // Zoom out (scroll down = less detailed view) + if (currentIndex < viewModeOrder.length - 1) { + setCurrentViewMode(viewModeOrder[currentIndex + 1]) + } + } + } + // If no modifier keys, let the browser handle normal horizontal scrolling + } + + container.addEventListener('wheel', handleWheel, { passive: false }) + + return () => { + container.removeEventListener('wheel', handleWheel) + } + }, [currentViewMode]) + + const viewModes = [ + { value: 'Quarter Day', label: 'Quarter Day' }, + { value: 'Half Day', label: 'Half Day' }, + { value: 'Day', label: 'Day' }, + { value: 'Week', label: 'Week' }, + { value: 'Month', label: 'Month' } + ] + + const changeViewMode = (mode: string) => { + setCurrentViewMode(mode) + } + + const zoomIn = () => { + setZoomScale(prev => { + const newScale = Math.min(prev + 0.25, 3) // Max 300% + // Store scroll position ratio before zoom + if (scrollContainerRef.current) { + const container = scrollContainerRef.current + const scrollRatio = (container.scrollLeft + container.clientWidth / 2) / container.scrollWidth + + // After state update, restore relative scroll position + setTimeout(() => { + if (scrollContainerRef.current) { + const newScrollLeft = scrollRatio * scrollContainerRef.current.scrollWidth - container.clientWidth / 2 + scrollContainerRef.current.scrollLeft = newScrollLeft + } + }, 0) + } + return newScale + }) + } + + const zoomOut = () => { + setZoomScale(prev => { + const newScale = Math.max(prev - 0.25, 0.5) // Min 50% + // Store scroll position ratio before zoom + if (scrollContainerRef.current) { + const container = scrollContainerRef.current + const scrollRatio = (container.scrollLeft + container.clientWidth / 2) / container.scrollWidth + + // After state update, restore relative scroll position + setTimeout(() => { + if (scrollContainerRef.current) { + const newScrollLeft = scrollRatio * scrollContainerRef.current.scrollWidth - container.clientWidth / 2 + scrollContainerRef.current.scrollLeft = newScrollLeft + } + }, 0) + } + return newScale + }) + } + + if (loading) { + return ( +
+
+

Timeline

+
+
+
+ + Loading timeline data... +
+
+
+ ) + } + + if (error) { + return ( +
+
+

Timeline

+
+
+
+ + {error} +
+
+
+ ) + } + + if (memories.length === 0 && !useDemoData) { + return ( +
+
+

Timeline

+
+ + +
+
+
+ +
+

No Timeline Events

+

+ No memories with time information found. Create memories with dates and times to see them on the timeline. +

+

+ Click "Show Demo" to see how the timeline works with sample data. +

+
+
+
+ ) + } + + return ( +
+ {/* Header */} +
+
+

Timeline (Frappe Gantt) {useDemoData && (Demo Mode)}

+

+ {useDemoData ? getDemoMemories().length : memories.length} {(useDemoData ? getDemoMemories().length : memories.length) === 1 ? 'event' : 'events'} with time information +

+
+
+ {/* Demo mode toggle */} + {useDemoData ? ( + + ) : ( + + )} + {/* Zoom controls */} +
+ +
+ {Math.round(zoomScale * 100)}% +
+ +
+ {/* View mode selector */} +
+ + +
+ +
+
+ + {/* Gantt Chart Container */} +
+ {/* Scrollable Gantt Chart */} +
+
+
+ + {/* Instructions - Fixed, not scrolling */} +
+ ๐Ÿ’ก Drag to scroll horizontally + ๐Ÿ” Hold Ctrl/Cmd + Scroll to zoom in/out +
+ + {/* Legend - Fixed, not scrolling */} +
+
+
+ Event +
+
+
+ Person +
+
+
+ Place +
+
+
+ + {/* Add custom styles for Gantt chart colors */} + +
+ ) +} diff --git a/backends/advanced/webui/src/pages/MyceliaTimeline.tsx b/backends/advanced/webui/src/pages/MyceliaTimeline.tsx new file mode 100644 index 00000000..48a4a24a --- /dev/null +++ b/backends/advanced/webui/src/pages/MyceliaTimeline.tsx @@ -0,0 +1,441 @@ +import { useState, useEffect, useRef } from 'react' +import { Calendar, RefreshCw, AlertCircle } from 'lucide-react' +import { useNavigate } from 'react-router-dom' +import * as d3 from 'd3' +import { memoriesApi } from '../services/api' +import { useAuth } from '../contexts/AuthContext' + +interface TimeRange { + start: string + end: string + name?: string +} + +interface MemoryWithTimeRange { + id: string + content: string + created_at: string + metadata?: { + name?: string + timeRanges?: TimeRange[] + isPerson?: boolean + isEvent?: boolean + isPlace?: boolean + } +} + +interface TimelineTask { + id: string + name: string + start: Date + end: Date + color: string + type: 'event' | 'person' | 'place' +} + +export default function MyceliaTimeline() { + const [memories, setMemories] = useState([]) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + const [useDemoData, setUseDemoData] = useState(false) + const svgRef = useRef(null) + const containerRef = useRef(null) + const tooltipRef = useRef(null) + const [dimensions, setDimensions] = useState({ width: 1000, height: 400 }) + const { user } = useAuth() + const navigate = useNavigate() + + // Demo data + const getDemoMemories = (): MemoryWithTimeRange[] => { + return [ + { + id: 'demo-wedding', + content: "Sarah and Tom's wedding ceremony and reception", + created_at: '2025-12-07T15:00:00', + metadata: { + name: "Wedding", + isEvent: true, + timeRanges: [ + { + name: 'Ceremony', + start: '2025-12-07T15:00:00', + end: '2025-12-07T16:30:00' + }, + { + name: 'Reception', + start: '2025-12-07T18:00:00', + end: '2025-12-07T23:00:00' + } + ] + } + }, + { + id: 'demo-conference', + content: 'Tech conference with keynote and workshops', + created_at: '2026-01-15T09:00:00', + metadata: { + name: 'Tech Conference', + isEvent: true, + timeRanges: [ + { + name: 'Keynote', + start: '2026-01-15T09:00:00', + end: '2026-01-15T11:00:00' + } + ] + } + } + ] + } + + const loadMemories = async () => { + if (!user?.id) return + + try { + setLoading(true) + setError(null) + const response = await memoriesApi.getAll(user.id) + const memoriesData = response.data.memories || response.data || [] + const memoriesWithTime = memoriesData.filter((m: MemoryWithTimeRange) => + m.metadata?.timeRanges && m.metadata.timeRanges.length > 0 + ) + setMemories(memoriesWithTime) + } catch (err: any) { + setError(err.message || 'Failed to load timeline data') + } finally { + setLoading(false) + } + } + + const convertToTasks = (memories: MemoryWithTimeRange[]): TimelineTask[] => { + const tasks: TimelineTask[] = [] + memories.forEach((memory) => { + const timeRanges = memory.metadata?.timeRanges || [] + timeRanges.forEach((range, index) => { + let type: 'event' | 'person' | 'place' = 'event' + let color = '#3b82f6' + + if (memory.metadata?.isEvent) { + type = 'event' + color = '#3b82f6' + } else if (memory.metadata?.isPerson) { + type = 'person' + color = '#10b981' + } else if (memory.metadata?.isPlace) { + type = 'place' + color = '#f59e0b' + } + + tasks.push({ + id: `${memory.id}-${index}`, + name: range.name || memory.metadata?.name || memory.content.substring(0, 30), + start: new Date(range.start), + end: new Date(range.end), + color, + type + }) + }) + }) + return tasks + } + + useEffect(() => { + if (!useDemoData) { + loadMemories() + } else { + setMemories(getDemoMemories()) + } + }, [user?.id, useDemoData]) + + // Handle container resize + useEffect(() => { + if (!containerRef.current) return + const resizeObserver = new ResizeObserver(([entry]) => { + setDimensions({ + width: entry.contentRect.width, + height: 400 + }) + }) + resizeObserver.observe(containerRef.current) + return () => resizeObserver.disconnect() + }, []) + + // D3 visualization + useEffect(() => { + if (!svgRef.current || memories.length === 0) return + + const tasks = convertToTasks(useDemoData ? getDemoMemories() : memories) + if (tasks.length === 0) return + + const svg = d3.select(svgRef.current) + svg.selectAll('*').remove() + + const margin = { top: 60, right: 40, bottom: 60, left: 150 } + const width = dimensions.width - margin.left - margin.right + const height = dimensions.height - margin.top - margin.bottom + + // Find time range + const allDates = tasks.flatMap(t => [t.start, t.end]) + const minDate = d3.min(allDates)! + const maxDate = d3.max(allDates)! + + // Create scales + const xScale = d3.scaleTime() + .domain([minDate, maxDate]) + .range([0, width]) + + const yScale = d3.scaleBand() + .domain(tasks.map(t => t.id)) + .range([0, height]) + .padding(0.3) + + // Create main group + const g = svg.append('g') + .attr('transform', `translate(${margin.left},${margin.top})`) + .attr('class', 'zoomable') + + // Add axes + const xAxis = d3.axisBottom(xScale) + .ticks(6) + .tickFormat(d3.timeFormat('%b %d, %Y') as any) + + g.append('g') + .attr('class', 'x-axis') + .attr('transform', `translate(0,${height})`) + .call(xAxis) + .selectAll('text') + .style('fill', 'currentColor') + + // Add task bars + const bars = g.append('g') + .attr('class', 'bars') + .selectAll('rect') + .data(tasks) + .enter() + + // Bar background with click and hover + bars.append('rect') + .attr('x', d => xScale(d.start)) + .attr('y', d => yScale(d.id)!) + .attr('width', d => Math.max(2, xScale(d.end) - xScale(d.start))) + .attr('height', yScale.bandwidth()) + .attr('fill', d => d.color) + .attr('rx', 4) + .style('opacity', 0.8) + .style('cursor', 'pointer') + .on('mouseover', function(event, d) { + d3.select(this).style('opacity', 1) + + // Show tooltip + if (tooltipRef.current) { + const tooltip = d3.select(tooltipRef.current) + const startDate = d.start.toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit' + }) + const endDate = d.end.toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit' + }) + + tooltip + .style('opacity', 1) + .style('left', `${event.pageX + 10}px`) + .style('top', `${event.pageY - 10}px`) + .html(` +
${d.name}
+
+
Start: ${startDate}
+
End: ${endDate}
+
Click to view memory
+
+ `) + } + }) + .on('mouseout', function() { + d3.select(this).style('opacity', 0.8) + + // Hide tooltip + if (tooltipRef.current) { + d3.select(tooltipRef.current).style('opacity', 0) + } + }) + .on('click', function(event, d) { + event.stopPropagation() + // Extract memory ID from task ID (format: "memory-id-rangeIndex") + const memoryId = d.id.split('-').slice(0, -1).join('-') + navigate(`/memories/${memoryId}`) + }) + + // Add labels + g.append('g') + .attr('class', 'labels') + .selectAll('text') + .data(tasks) + .enter() + .append('text') + .attr('x', -10) + .attr('y', d => yScale(d.id)! + yScale.bandwidth() / 2) + .attr('dy', '0.35em') + .attr('text-anchor', 'end') + .text(d => d.name) + .style('fill', 'currentColor') + .style('font-size', '12px') + + // Zoom behavior + const zoom = d3.zoom() + .scaleExtent([0.5, 5]) + .on('zoom', (event) => { + const transform = event.transform + + // Update x scale + const newXScale = transform.rescaleX(xScale) + + // Update axis + g.select('.x-axis').call( + d3.axisBottom(newXScale) + .ticks(6) + .tickFormat(d3.timeFormat('%b %d, %Y') as any) as any + ) + + // Update bars + g.selectAll('.bars rect') + .attr('x', d => newXScale(d.start)) + .attr('width', d => Math.max(2, newXScale(d.end) - newXScale(d.start))) + }) + + svg.call(zoom as any) + + }, [memories, dimensions, useDemoData]) + + if (loading) { + return ( +
+
+
+ + Loading timeline data... +
+
+
+ ) + } + + if (error) { + return ( +
+
+
+ + {error} +
+
+
+ ) + } + + return ( +
+ {/* Tooltip */} +
+ + {/* Header */} +
+
+

+ + Timeline (Mycelia D3) +

+

+ Interactive D3-based timeline with smooth pan and zoom โ€ข Click events to view details +

+
+
+ {useDemoData ? ( + + ) : ( + + )} + +
+
+ + {/* Timeline */} + {memories.length === 0 && !useDemoData ? ( +
+ +
+

No Timeline Events

+

+ No memories with time information found. Try the demo to see how it works. +

+
+
+ ) : ( +
+
+ +
+ +
+ ๐Ÿ’ก Scroll to zoom, drag to pan + ๐Ÿ–ฑ๏ธ Click bars to view memory details + ๐Ÿ‘† Hover for info +
+ +
+
+
+ Event +
+
+
+ Person +
+
+
+ Place +
+
+
+ )} +
+ ) +} diff --git a/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx b/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx new file mode 100644 index 00000000..e1bc127f --- /dev/null +++ b/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx @@ -0,0 +1,359 @@ +import { useState, useEffect } from 'react' +import { Calendar, RefreshCw, AlertCircle, ZoomIn, ZoomOut } from 'lucide-react' +import Timeline from 'react-gantt-timeline' +import { memoriesApi } from '../services/api' +import { useAuth } from '../contexts/AuthContext' + +interface TimeRange { + start: string + end: string + name?: string +} + +interface MemoryWithTimeRange { + id: string + content: string + created_at: string + metadata?: { + name?: string + timeRanges?: TimeRange[] + isPerson?: boolean + isEvent?: boolean + isPlace?: boolean + } +} + +interface ReactGanttTask { + id: string + name: string + start: Date + end: Date + color?: string +} + +export default function ReactGanttTimeline() { + const [memories, setMemories] = useState([]) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + const [useDemoData, setUseDemoData] = useState(false) + const [zoomLevel, setZoomLevel] = useState(1) // 0.5 = 50%, 1 = 100%, 2 = 200% + const { user } = useAuth() + + const handleZoomIn = () => { + setZoomLevel(prev => Math.min(prev + 0.25, 3)) // Max 300% + } + + const handleZoomOut = () => { + setZoomLevel(prev => Math.max(prev - 0.25, 0.5)) // Min 50% + } + + // Demo data for testing the Timeline visualization - spans multiple years + const getDemoMemories = (): MemoryWithTimeRange[] => { + return [ + { + id: 'demo-graduation', + content: 'College graduation ceremony and celebration dinner with family.', + created_at: '2024-05-20T14:00:00', + metadata: { + name: 'College Graduation', + isEvent: true, + timeRanges: [ + { + name: 'Graduation Ceremony', + start: '2024-05-20T14:00:00', + end: '2024-05-20T17:00:00' + }, + { + name: 'Celebration Dinner', + start: '2024-05-20T18:00:00', + end: '2024-05-20T21:00:00' + } + ] + } + }, + { + id: 'demo-vacation', + content: 'Summer vacation in Hawaii with family. Visited beaches, hiked Diamond Head, attended a luau.', + created_at: '2024-07-10T08:00:00', + metadata: { + name: 'Hawaii Vacation', + isEvent: true, + timeRanges: [ + { + name: 'Hawaii Trip', + start: '2024-07-10T08:00:00', + end: '2024-07-17T20:00:00' + } + ] + } + }, + { + id: 'demo-marathon', + content: 'Completed first marathon in Boston. Training started 6 months ago.', + created_at: '2025-04-15T06:00:00', + metadata: { + name: 'Boston Marathon', + isEvent: true, + timeRanges: [ + { + name: 'Marathon Race', + start: '2025-04-15T06:00:00', + end: '2025-04-15T11:30:00' + } + ] + } + }, + { + id: 'demo-wedding', + content: "Sarah and Tom's wedding was a beautiful celebration. The ceremony started at 3 PM, followed by a reception.", + created_at: '2025-06-15T15:00:00', + metadata: { + name: "Sarah & Tom's Wedding", + isEvent: true, + timeRanges: [ + { + name: 'Wedding Ceremony', + start: '2025-06-15T15:00:00', + end: '2025-06-15T16:30:00' + }, + { + name: 'Reception', + start: '2025-06-15T18:00:00', + end: '2025-06-16T00:00:00' + } + ] + } + }, + { + id: 'demo-conference', + content: 'Tech conference in San Francisco. Attended keynotes, workshops, and networking events.', + created_at: '2026-03-10T09:00:00', + metadata: { + name: 'Tech Conference 2026', + isEvent: true, + timeRanges: [ + { + name: 'Conference', + start: '2026-03-10T09:00:00', + end: '2026-03-13T18:00:00' + } + ] + } + } + ] + } + + const fetchMemoriesWithTimeRanges = async () => { + setLoading(true) + setError(null) + try { + const response = await memoriesApi.getAll() + + // Extract memories from response + const memoriesData = response.data.memories || response.data || [] + + const memoriesWithTimeRanges = memoriesData.filter( + (memory: MemoryWithTimeRange) => + memory.metadata?.timeRanges && + memory.metadata.timeRanges.length > 0 + ) + + if (memoriesWithTimeRanges.length === 0) { + setUseDemoData(true) + setMemories(getDemoMemories()) + setError('No memories with time ranges found. Showing demo data.') + } else { + setMemories(memoriesWithTimeRanges) + setUseDemoData(false) + } + } catch (err) { + console.error('Failed to fetch memories:', err) + setError('Failed to load memories. Showing demo data.') + setUseDemoData(true) + setMemories(getDemoMemories()) + } finally { + setLoading(false) + } + } + + useEffect(() => { + if (user) { + fetchMemoriesWithTimeRanges() + } + }, [user]) + + const handleRefresh = () => { + fetchMemoriesWithTimeRanges() + } + + const handleToggleDemoData = () => { + if (useDemoData) { + fetchMemoriesWithTimeRanges() + } else { + setMemories(getDemoMemories()) + setUseDemoData(true) + } + } + + // Convert memories to react-gantt-timeline format + const convertToReactGanttFormat = (memories: MemoryWithTimeRange[]): ReactGanttTask[] => { + const tasks: ReactGanttTask[] = [] + + memories.forEach((memory) => { + const timeRanges = memory.metadata?.timeRanges || [] + const isEvent = memory.metadata?.isEvent + const isPerson = memory.metadata?.isPerson + const isPlace = memory.metadata?.isPlace + + let color = '#3b82f6' // default blue + if (isEvent) color = '#3b82f6' // blue + else if (isPerson) color = '#10b981' // green + else if (isPlace) color = '#f59e0b' // amber + + timeRanges.forEach((range, index) => { + tasks.push({ + id: `${memory.id}-${index}`, + name: range.name || memory.metadata?.name || memory.content.substring(0, 30), + start: new Date(range.start), + end: new Date(range.end), + color: color + }) + }) + }) + + return tasks + } + + const tasks = convertToReactGanttFormat(memories) + + const data = tasks.map((task) => ({ + id: task.id, + name: task.name, + start: task.start, + end: task.end, + color: task.color + })) + + return ( +
+
+
+

+ + Timeline (React Gantt) +

+

+ Visualize your memories on an interactive timeline using react-gantt-timeline +

+
+
+ {/* Zoom controls */} +
+ +
+ {Math.round(zoomLevel * 100)}% +
+ +
+ + +
+
+ + {error && ( +
+ + {error} +
+ )} + + {loading ? ( +
+ +
+ ) : memories.length === 0 ? ( +
+ +

+ No Timeline Data +

+

+ No memories with time ranges found. Try the demo data to see the timeline in action. +

+ +
+ ) : ( +
+ {/* Timeline Container - Expands with zoom */} +
+
+ +
+
+ + {/* Legend */} +
+
+
+ Event +
+
+
+ Person +
+
+
+ Place +
+
+ + {useDemoData && ( +
+ Showing demo data with events spanning 2024-2026 +
+ )} +
+ )} +
+ ) +} diff --git a/backends/advanced/webui/src/pages/TimelineRouter.tsx b/backends/advanced/webui/src/pages/TimelineRouter.tsx new file mode 100644 index 00000000..0e983ca6 --- /dev/null +++ b/backends/advanced/webui/src/pages/TimelineRouter.tsx @@ -0,0 +1,86 @@ +import { useState } from 'react' +import { Calendar } from 'lucide-react' +import FrappeGanttTimeline from './FrappeGanttTimeline' +import ReactGanttTimeline from './ReactGanttTimeline' +import MyceliaTimeline from './MyceliaTimeline' + +type TimelineImplementation = 'frappe' | 'react-gantt' | 'mycelia' + +export default function TimelineRouter() { + const [activeImplementation, setActiveImplementation] = useState('frappe') + + return ( +
+ {/* Header */} +
+
+

+ + Timeline +

+

+ Visualize your memories on an interactive timeline +

+
+
+ + {/* Tab Navigation */} +
+ +
+ + {/* Timeline Implementation */} +
+ {activeImplementation === 'frappe' && } + {activeImplementation === 'react-gantt' && } + {activeImplementation === 'mycelia' && } +
+
+ ) +} diff --git a/backends/advanced/webui/src/services/api.ts b/backends/advanced/webui/src/services/api.ts index d40508e8..2617cdaa 100644 --- a/backends/advanced/webui/src/services/api.ts +++ b/backends/advanced/webui/src/services/api.ts @@ -138,15 +138,19 @@ export const systemApi = { // Memory Configuration Management getMemoryConfigRaw: () => api.get('/api/admin/memory/config/raw'), - updateMemoryConfigRaw: (configYaml: string) => + updateMemoryConfigRaw: (configYaml: string) => api.post('/api/admin/memory/config/raw', configYaml, { headers: { 'Content-Type': 'text/plain' } }), - validateMemoryConfig: (configYaml: string) => + validateMemoryConfig: (configYaml: string) => api.post('/api/admin/memory/config/validate', configYaml, { headers: { 'Content-Type': 'text/plain' } }), reloadMemoryConfig: () => api.post('/api/admin/memory/config/reload'), + + // Memory Provider Management + getMemoryProvider: () => api.get('/api/admin/memory/provider'), + setMemoryProvider: (provider: string) => api.post('/api/admin/memory/provider', { provider }), } export const queueApi = { diff --git a/backends/advanced/webui/src/types/react-gantt-timeline.d.ts b/backends/advanced/webui/src/types/react-gantt-timeline.d.ts new file mode 100644 index 00000000..513337aa --- /dev/null +++ b/backends/advanced/webui/src/types/react-gantt-timeline.d.ts @@ -0,0 +1,45 @@ +declare module 'react-gantt-timeline' { + import { ComponentType } from 'react' + + export interface TimelineTask { + id: string + name: string + start: Date + end: Date + color?: string + } + + export interface TimelineConfig { + header?: { + top?: { + style?: React.CSSProperties + } + middle?: { + style?: React.CSSProperties + } + bottom?: { + style?: React.CSSProperties + } + } + taskList?: { + title?: string + label?: { + width?: string + } + columns?: Array<{ + id: number + title: string + fieldName: string + width: number + }> + } + } + + export interface TimelineProps { + data: TimelineTask[] + config?: TimelineConfig + } + + const Timeline: ComponentType + export default Timeline +} From 33efa1a49019ac48a97382a470058699d2c9c3ca Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Fri, 5 Dec 2025 00:48:43 +0000 Subject: [PATCH 11/31] Changed name to chronicle --- .env.template | 16 +- CLAUDE.md | 26 +- Docs/features.md | 10 +- Docs/getting-started.md | 32 +- Docs/init-system.md | 8 +- Docs/ports-and-access.md | 4 +- Makefile | 36 +- README-K8S.md | 46 +- README.md | 2 +- app/README.md | 18 +- app/app.json | 10 +- app/app/components/DeviceDetails.tsx | 2 +- app/app/components/DeviceListItem.tsx | 2 +- app/app/hooks/useAudioListener.ts | 2 +- app/app/hooks/useDeviceConnection.ts | 2 +- app/app/hooks/useDeviceScanning.ts | 2 +- app/app/index.tsx | 4 +- app/package.json | 4 +- backends/README.md | 4 +- backends/advanced/Docs/HTTPS_SETUP.md | 10 +- backends/advanced/Docs/README.md | 4 +- backends/advanced/Docs/UI.md | 2 +- backends/advanced/Docs/architecture.md | 6 +- backends/advanced/Docs/auth.md | 4 +- backends/advanced/Docs/memories.md | 8 +- backends/advanced/Docs/quickstart.md | 32 +- backends/advanced/docker-compose-test.yml | 114 +- backends/advanced/docker-compose.yml | 12 +- backends/advanced/init-https.sh | 6 +- backends/advanced/init.py | 24 +- .../scripts/create_mycelia_api_key.py | 4 +- .../scripts/sync_friendlite_mycelia.py | 74 +- backends/advanced/setup-https.sh | 6 +- .../src/advanced_omi_backend/app_config.py | 8 +- .../src/advanced_omi_backend/app_factory.py | 2 +- .../advanced/src/advanced_omi_backend/auth.py | 6 +- .../src/advanced_omi_backend/chat_service.py | 2 +- .../advanced_omi_backend/clients/__init__.py | 2 +- .../src/advanced_omi_backend/config.py | 2 +- .../controllers/system_controller.py | 10 +- .../controllers/websocket_controller.py | 2 +- .../src/advanced_omi_backend/database.py | 4 +- .../middleware/app_middleware.py | 2 +- .../advanced_omi_backend/models/__init__.py | 4 +- .../advanced_omi_backend/models/audio_file.py | 2 +- .../models/conversation.py | 4 +- .../src/advanced_omi_backend/models/job.py | 4 +- .../routers/api_router.py | 2 +- .../routers/modules/__init__.py | 2 +- .../routers/modules/chat_routes.py | 2 +- .../routers/modules/client_routes.py | 2 +- .../routers/modules/conversation_routes.py | 2 +- .../routers/modules/health_routes.py | 24 +- .../routers/modules/memory_routes.py | 2 +- .../routers/modules/system_routes.py | 2 +- .../routers/modules/user_routes.py | 2 +- .../routers/modules/websocket_routes.py | 2 +- .../advanced_omi_backend/services/__init__.py | 2 +- .../services/memory/__init__.py | 4 +- .../services/memory/config.py | 14 +- .../services/memory/providers/__init__.py | 6 +- .../{friend_lite.py => chronicle.py} | 0 .../services/memory/providers/mcp_client.py | 8 +- .../services/memory/providers/mycelia.py | 8 +- .../memory/providers/openmemory_mcp.py | 18 +- .../services/memory/service_factory.py | 14 +- .../services/mycelia_sync.py | 40 +- .../workers/conversation_jobs.py | 7 +- .../workers/memory_jobs.py | 4 +- backends/advanced/ssl/generate-ssl.sh | 2 +- backends/advanced/start-k8s.sh | 4 +- backends/advanced/start-workers.sh | 2 +- backends/advanced/start.sh | 4 +- .../tests/test_conversation_models.py | 12 +- backends/advanced/tests/test_integration.py | 4 +- backends/advanced/upload_files.py | 6 +- backends/advanced/webui/README.md | 4 +- backends/advanced/webui/package-lock.json | 3046 +++++++++++++---- backends/advanced/webui/package.json | 10 +- .../webui/src/components/layout/Layout.tsx | 4 +- .../advanced/webui/src/pages/LoginPage.tsx | 2 +- .../advanced/webui/src/pages/Memories.tsx | 4 +- .../webui/src/pages/MemoriesRouter.tsx | 4 +- backends/advanced/webui/src/pages/System.tsx | 2 +- .../webui/src/pages/TimelineRouter.tsx | 19 +- backends/advanced/webui/tsconfig.json | 1 + backends/charts/advanced-backend/Chart.yaml | 4 +- .../templates/deployment.yaml | 8 +- .../templates/workers-deployment.yaml | 4 +- backends/charts/advanced-backend/values.yaml | 4 +- backends/charts/webui/Chart.yaml | 4 +- .../charts/webui/templates/deployment.yaml | 4 +- backends/charts/webui/values.yaml | 2 +- extras/asr-services/README.md | 4 +- extras/asr-services/quickstart.md | 6 +- .../tests/test_parakeet_service.py | 2 +- extras/havpe-relay/README.md | 2 +- extras/havpe-relay/main.py | 2 +- extras/local-omi-bt/connect-omi.py | 6 +- extras/openmemory-mcp/README.md | 38 +- extras/openmemory-mcp/run.sh | 8 +- extras/openmemory-mcp/test_standalone.py | 2 +- extras/speaker-omni-experimental/README.md | 6 +- .../charts/templates/speaker-deployment.yaml | 2 +- .../charts/templates/webui-deployment.yaml | 4 +- extras/speaker-recognition/charts/values.yaml | 2 +- extras/speaker-recognition/init.py | 2 +- extras/speaker-recognition/quickstart.md | 2 +- .../simple_speaker_recognition/__init__.py | 2 +- .../speaker-recognition/ssl/generate-ssl.sh | 2 +- .../tests/test_speaker_service_integration.py | 2 +- k8s-manifests/cross-namespace-rbac.yaml | 8 +- quickstart.md | 30 +- run-test.sh | 6 +- scripts/generate-k8s-configs.py | 12 +- scripts/k8s/cluster-status.sh | 2 +- scripts/k8s/load-env.sh | 2 +- scripts/manage-audio-files.sh | 2 +- services.py | 4 +- skaffold.yaml | 4 +- status.py | 8 +- tests/.env.test | 12 +- tests/Makefile | 4 +- tests/README.md | 6 +- tests/TESTING_USER_GUIDE.md | 2 +- tests/browser/browser_auth.robot | 2 +- tests/infrastructure/infra_tests.robot | 13 +- .../websocket_streaming_tests.robot | 3 +- tests/libs/audio_stream_library.py | 28 + tests/resources/transcript_verification.robot | 53 +- tests/resources/websocket_keywords.robot | 9 + tests/setup/setup_keywords.robot | 2 +- tests/setup/test_env.py | 10 +- tests/setup/test_manager_keywords.robot | 33 +- tests/tags.md | 4 +- wizard.py | 8 +- 136 files changed, 2958 insertions(+), 1320 deletions(-) rename backends/advanced/src/advanced_omi_backend/services/memory/providers/{friend_lite.py => chronicle.py} (100%) diff --git a/.env.template b/.env.template index 97495493..328d3301 100644 --- a/.env.template +++ b/.env.template @@ -1,7 +1,7 @@ # ======================================== -# FRIEND-LITE MASTER CONFIGURATION +# CHRONICLE MASTER CONFIGURATION # ======================================== -# This is the master configuration template for the entire Friend-Lite project. +# This is the master configuration template for the entire Chronicle project. # Copy this file to .env and customize values, then run 'make config' to generate # all service-specific configuration files. @@ -11,7 +11,7 @@ # Infrastructure namespaces INFRASTRUCTURE_NAMESPACE=infrastructure -APPLICATION_NAMESPACE=friend-lite +APPLICATION_NAMESPACE=chronicle # Deployment mode: docker-compose, kubernetes, or distributed DEPLOYMENT_MODE=docker-compose @@ -24,7 +24,7 @@ CONTAINER_REGISTRY=localhost:32000 # ======================================== # Primary domain/IP for all services -# Examples: localhost, 192.168.1.100, friend-lite.example.com, 100.x.x.x (Tailscale) +# Examples: localhost, 192.168.1.100, chronicle.example.com, 100.x.x.x (Tailscale) DOMAIN=localhost # Service ports (Docker Compose mode) @@ -105,7 +105,7 @@ PARAKEET_ASR_URL=http://host.docker.internal:8767 # MongoDB configuration MONGODB_URI=mongodb://mongo:${MONGODB_PORT} -MONGODB_K8S_URI=mongodb://mongodb.${INFRASTRUCTURE_NAMESPACE}.svc.cluster.local:27017/friend-lite +MONGODB_K8S_URI=mongodb://mongodb.${INFRASTRUCTURE_NAMESPACE}.svc.cluster.local:27017/chronicle # Qdrant configuration QDRANT_BASE_URL=qdrant @@ -120,12 +120,12 @@ NEO4J_PASSWORD=neo4j-password # MEMORY PROVIDER CONFIGURATION # ======================================== -# Memory Provider: friend_lite or openmemory_mcp -MEMORY_PROVIDER=friend_lite +# Memory Provider: chronicle or openmemory_mcp +MEMORY_PROVIDER=chronicle # OpenMemory MCP configuration (when MEMORY_PROVIDER=openmemory_mcp) OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -OPENMEMORY_CLIENT_NAME=friend_lite +OPENMEMORY_CLIENT_NAME=chronicle OPENMEMORY_USER_ID=openmemory OPENMEMORY_TIMEOUT=30 diff --git a/CLAUDE.md b/CLAUDE.md index 0f579d33..ec326b6d 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -4,7 +4,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co ## Project Overview -Friend-Lite is at the core an AI-powered personal system - various devices, including but not limited to wearables from OMI can be used for at the very least audio capture, speaker specific transcription, memory extraction and retrieval. +Chronicle is at the core an AI-powered personal system - various devices, including but not limited to wearables from OMI can be used for at the very least audio capture, speaker specific transcription, memory extraction and retrieval. On top of that - it is being designed to support other services, that can help a user with these inputs such as reminders, action items, personal diagnosis etc. This supports a comprehensive web dashboard for management. @@ -147,7 +147,7 @@ docker compose up --build - **Job Tracker**: Tracks pipeline jobs with stage events (audio โ†’ transcription โ†’ memory) and completion status - **Task Management**: BackgroundTaskManager tracks all async tasks to prevent orphaned processes - **Unified Transcription**: Deepgram/Mistral transcription with fallback to offline ASR services -- **Memory System**: Pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Memory System**: Pluggable providers (Chronicle native or OpenMemory MCP) - **Authentication**: Email-based login with MongoDB ObjectId user system - **Client Management**: Auto-generated client IDs as `{user_id_suffix}-{device_name}`, centralized ClientManager - **Data Storage**: MongoDB (`audio_chunks` collection for conversations), vector storage (Qdrant or OpenMemory) @@ -161,7 +161,7 @@ Required: - LLM Service: Memory extraction and action items (OpenAI or Ollama) Recommended: - - Vector Storage: Qdrant (Friend-Lite provider) or OpenMemory MCP server + - Vector Storage: Qdrant (Chronicle provider) or OpenMemory MCP server - Transcription: Deepgram, Mistral, or offline ASR services Optional: @@ -179,8 +179,8 @@ Optional: 4. **Speech-Driven Conversation Creation**: User-facing conversations only created when speech is detected 5. **Dual Storage System**: Audio sessions always stored in `audio_chunks`, conversations created in `conversations` collection only with speech 6. **Versioned Processing**: Transcript and memory versions tracked with active version pointers -7. **Memory Processing**: Pluggable providers (Friend-Lite native with individual facts or OpenMemory MCP delegation) -8. **Memory Storage**: Direct Qdrant (Friend-Lite) or OpenMemory server (MCP provider) +7. **Memory Processing**: Pluggable providers (Chronicle native with individual facts or OpenMemory MCP delegation) +8. **Memory Storage**: Direct Qdrant (Chronicle) or OpenMemory server (MCP provider) 9. **Action Items**: Automatic task detection with "Simon says" trigger phrases 10. **Audio Optimization**: Speech segment extraction removes silence automatically 11. **Task Tracking**: BackgroundTaskManager ensures proper cleanup of all async operations @@ -230,11 +230,11 @@ DEEPGRAM_API_KEY=your-deepgram-key-here # Optional: TRANSCRIPTION_PROVIDER=deepgram # Memory Provider -MEMORY_PROVIDER=friend_lite # or openmemory_mcp +MEMORY_PROVIDER=chronicle # or openmemory_mcp # Database MONGODB_URI=mongodb://mongo:27017 -# Database name: friend-lite +# Database name: chronicle QDRANT_BASE_URL=qdrant # Network Configuration @@ -246,12 +246,12 @@ CORS_ORIGINS=http://localhost:3000,http://localhost:5173 ### Memory Provider Configuration -Friend-Lite supports two pluggable memory backends: +Chronicle supports two pluggable memory backends: -#### Friend-Lite Memory Provider (Default) +#### Chronicle Memory Provider (Default) ```bash -# Use Friend-Lite memory provider (default) -MEMORY_PROVIDER=friend_lite +# Use Chronicle memory provider (default) +MEMORY_PROVIDER=chronicle # LLM Configuration for memory extraction LLM_PROVIDER=openai @@ -269,7 +269,7 @@ MEMORY_PROVIDER=openmemory_mcp # OpenMemory MCP Server Configuration OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -OPENMEMORY_CLIENT_NAME=friend_lite +OPENMEMORY_CLIENT_NAME=chronicle OPENMEMORY_USER_ID=openmemory OPENMEMORY_TIMEOUT=30 @@ -279,7 +279,7 @@ OPENAI_API_KEY=your-openai-key-here ### Transcription Provider Configuration -Friend-Lite supports multiple transcription services: +Chronicle supports multiple transcription services: ```bash # Option 1: Deepgram (High quality, recommended) diff --git a/Docs/features.md b/Docs/features.md index 25c5671c..57e3413f 100644 --- a/Docs/features.md +++ b/Docs/features.md @@ -1,11 +1,11 @@ -# Friend-Lite Features & Architecture +# Chronicle Features & Architecture ## Core Features -Friend-Lite supports AI-powered personal systems through multiple OMI-compatible audio devices: +Chronicle supports AI-powered personal systems through multiple OMI-compatible audio devices: **Memory System:** -- **Advanced memory system** with pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Advanced memory system** with pluggable providers (Chronicle native or OpenMemory MCP) - **Memory extraction** from conversations with individual fact storage - **Semantic memory search** with relevance threshold filtering and live results - **Memory count display** with total count tracking from native providers @@ -38,7 +38,7 @@ DevKit2 streams audio via Bluetooth using OPUS codec. The processing pipeline in **AI Processing:** - LLM-based conversation analysis (OpenAI or local Ollama) -- **Dual memory system**: Friend-Lite native or OpenMemory MCP integration +- **Dual memory system**: Chronicle native or OpenMemory MCP integration - Enhanced memory extraction with individual fact storage - **Semantic search** with relevance scoring and threshold filtering - Smart deduplication and memory updates (ADD/UPDATE/DELETE) @@ -87,7 +87,7 @@ Choose one based on your needs: **Features:** - Audio processing pipeline with real-time WebSocket support -- **Pluggable memory system**: Choose between Friend-Lite native or OpenMemory MCP +- **Pluggable memory system**: Choose between Chronicle native or OpenMemory MCP - Enhanced memory extraction with individual fact storage (no generic fallbacks) - **Semantic memory search** with relevance threshold filtering and total count display - **Speaker-based memory filtering**: Optional control over processing based on participant presence diff --git a/Docs/getting-started.md b/Docs/getting-started.md index 2f647b7b..6483f00f 100644 --- a/Docs/getting-started.md +++ b/Docs/getting-started.md @@ -1,16 +1,16 @@ # Getting Started -# Friend-Lite Backend Quickstart Guide +# Chronicle Backend Quickstart Guide -> ๐Ÿ“– **New to friend-lite?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. +> ๐Ÿ“– **New to chronicle?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. ## Overview -Friend-Lite is an eco-system of services to support "AI wearable" agents/functionality. +Chronicle is an eco-system of services to support "AI wearable" agents/functionality. At the moment, the basic functionalities are: - Audio capture (via WebSocket, from OMI device, files, or a laptop) - Audio transcription -- **Advanced memory system** with pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Advanced memory system** with pluggable providers (Chronicle native or OpenMemory MCP) - **Enhanced memory extraction** with individual fact storage and smart updates - **Semantic memory search** with relevance threshold filtering and live results - Action item extraction @@ -38,13 +38,13 @@ cd backends/advanced - **Authentication**: Admin email/password setup - **Transcription Provider**: Choose Deepgram, Mistral, or Offline (Parakeet) - **LLM Provider**: Choose OpenAI or Ollama for memory extraction -- **Memory Provider**: Choose Friend-Lite Native or OpenMemory MCP +- **Memory Provider**: Choose Chronicle Native or OpenMemory MCP - **Optional Services**: Speaker Recognition and other extras - **Network Configuration**: Ports and host settings **Example flow:** ``` -๐Ÿš€ Friend-Lite Interactive Setup +๐Ÿš€ Chronicle Interactive Setup =============================================== โ–บ Authentication Setup @@ -126,13 +126,13 @@ ADMIN_EMAIL=admin@example.com **Memory Provider Configuration:** ```bash # Memory Provider (Choose One) -# Option 1: Friend-Lite Native (Default - Recommended) -MEMORY_PROVIDER=friend_lite +# Option 1: Chronicle Native (Default - Recommended) +MEMORY_PROVIDER=chronicle # Option 2: OpenMemory MCP (Cross-client compatibility) # MEMORY_PROVIDER=openmemory_mcp # OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -# OPENMEMORY_CLIENT_NAME=friend_lite +# OPENMEMORY_CLIENT_NAME=chronicle # OPENMEMORY_USER_ID=openmemory ``` @@ -325,8 +325,8 @@ curl -X POST "http://localhost:8000/api/process-audio-files" \ ### Memory & Intelligence #### Pluggable Memory System -- **Two memory providers**: Choose between Friend-Lite native or OpenMemory MCP -- **Friend-Lite Provider**: Full control with custom extraction, individual fact storage, smart deduplication +- **Two memory providers**: Choose between Chronicle native or OpenMemory MCP +- **Chronicle Provider**: Full control with custom extraction, individual fact storage, smart deduplication - **OpenMemory MCP Provider**: Cross-client compatibility (Claude Desktop, Cursor, Windsurf), professional processing #### Enhanced Memory Processing @@ -482,7 +482,7 @@ tailscale ip -4 ## Data Architecture -The friend-lite backend uses a **user-centric data architecture**: +The chronicle backend uses a **user-centric data architecture**: - **All memories are keyed by database user_id** (not client_id) - **Client information is stored in metadata** for reference and debugging @@ -495,12 +495,12 @@ For detailed information, see [User Data Architecture](user-data-architecture.md ### Choosing a Memory Provider -Friend-Lite offers two memory backends: +Chronicle offers two memory backends: -#### 1. Friend-Lite Native +#### 1. Chronicle Native ```bash # In your .env file -MEMORY_PROVIDER=friend_lite +MEMORY_PROVIDER=chronicle LLM_PROVIDER=openai OPENAI_API_KEY=your-openai-key-here ``` @@ -519,7 +519,7 @@ OPENAI_API_KEY=your-openai-key-here cd extras/openmemory-mcp docker compose up -d -# Then configure Friend-Lite +# Then configure Chronicle MEMORY_PROVIDER=openmemory_mcp OPENMEMORY_MCP_URL=http://host.docker.internal:8765 ``` diff --git a/Docs/init-system.md b/Docs/init-system.md index fb9c1763..98d7c49a 100644 --- a/Docs/init-system.md +++ b/Docs/init-system.md @@ -1,4 +1,4 @@ -# Friend-Lite Initialization System +# Chronicle Initialization System ## Quick Links @@ -10,14 +10,14 @@ ## Overview -Friend-Lite uses a unified initialization system with clean separation of concerns: +Chronicle uses a unified initialization system with clean separation of concerns: - **Configuration** (`wizard.py`) - Set up service configurations, API keys, and .env files - **Service Management** (`services.py`) - Start, stop, and manage running services The root orchestrator handles service selection and delegates configuration to individual service scripts. In general, setup scripts only configure and do not start services automatically. Exceptions: `extras/asr-services` and `extras/openmemory-mcp` are startup scripts. This prevents unnecessary resource usage and gives you control over when services actually run. -> **New to Friend-Lite?** Most users should start with the [Quick Start Guide](../quickstart.md) instead of this detailed reference. +> **New to Chronicle?** Most users should start with the [Quick Start Guide](../quickstart.md) instead of this detailed reference. ## Architecture @@ -133,7 +133,7 @@ Services use `host.docker.internal` for inter-container communication: ## Service Management -Friend-Lite now separates **configuration** from **service lifecycle management**: +Chronicle now separates **configuration** from **service lifecycle management**: ### Unified Service Management Use the `services.py` script for all service operations: diff --git a/Docs/ports-and-access.md b/Docs/ports-and-access.md index f93137b7..67c0fd28 100644 --- a/Docs/ports-and-access.md +++ b/Docs/ports-and-access.md @@ -1,11 +1,11 @@ -# Friend-Lite Port Configuration & User Journey +# Chronicle Port Configuration & User Journey ## User Journey: Git Clone to Running Services ### 1. Clone & Setup ```bash git clone -cd friend-lite +cd chronicle # Configure all services uv run --with-requirements setup-requirements.txt python init.py diff --git a/Makefile b/Makefile index 3d03a180..9c4dca6a 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ # ======================================== -# Friend-Lite Management System +# Chronicle Management System # ======================================== -# Central management interface for Friend-Lite project +# Central management interface for Chronicle project # Handles configuration, deployment, and maintenance tasks # Load environment variables from .env file @@ -25,7 +25,7 @@ K8S_SCRIPTS_DIR := $(SCRIPTS_DIR)/k8s .DEFAULT_GOAL := menu menu: ## Show interactive menu (default) - @echo "๐ŸŽฏ Friend-Lite Management System" + @echo "๐ŸŽฏ Chronicle Management System" @echo "================================" @echo @echo "๐Ÿ“‹ Quick Actions:" @@ -59,7 +59,7 @@ menu: ## Show interactive menu (default) @echo @echo "๐Ÿ”„ Mycelia Sync:" @echo " mycelia-sync-status ๐Ÿ“Š Show Mycelia OAuth sync status" - @echo " mycelia-sync-all ๐Ÿ”„ Sync all Friend-Lite users to Mycelia" + @echo " mycelia-sync-all ๐Ÿ”„ Sync all Chronicle users to Mycelia" @echo " mycelia-sync-user ๐Ÿ‘ค Sync specific user (EMAIL=user@example.com)" @echo " mycelia-check-orphans ๐Ÿ” Find orphaned Mycelia objects" @echo " mycelia-reassign-orphans โ™ป๏ธ Reassign orphans (EMAIL=admin@example.com)" @@ -75,7 +75,7 @@ menu: ## Show interactive menu (default) @echo "๐Ÿ’ก Tip: Run 'make help' for detailed help on any target" help: ## Show detailed help for all targets - @echo "๐ŸŽฏ Friend-Lite Management System - Detailed Help" + @echo "๐ŸŽฏ Chronicle Management System - Detailed Help" @echo "================================================" @echo @echo "๐Ÿ—๏ธ KUBERNETES SETUP:" @@ -110,9 +110,9 @@ help: ## Show detailed help for all targets @echo @echo "๐Ÿ”„ MYCELIA SYNC:" @echo " mycelia-sync-status Show Mycelia OAuth sync status for all users" - @echo " mycelia-sync-all Sync all Friend-Lite users to Mycelia OAuth" + @echo " mycelia-sync-all Sync all Chronicle users to Mycelia OAuth" @echo " mycelia-sync-user Sync specific user (EMAIL=user@example.com)" - @echo " mycelia-check-orphans Find Mycelia objects without Friend-Lite owner" + @echo " mycelia-check-orphans Find Mycelia objects without Chronicle owner" @echo " mycelia-reassign-orphans Reassign orphaned objects (EMAIL=admin@example.com)" @echo @echo "๐Ÿงช ROBOT FRAMEWORK TESTING:" @@ -158,7 +158,7 @@ setup-dev: ## Setup development environment (git hooks, pre-commit) setup-k8s: ## Initial Kubernetes setup (registry + infrastructure) @echo "๐Ÿ—๏ธ Starting Kubernetes initial setup..." - @echo "This will set up the complete infrastructure for Friend-Lite" + @echo "This will set up the complete infrastructure for Chronicle" @echo @echo "๐Ÿ“‹ Setup includes:" @echo " โ€ข Insecure registry configuration" @@ -230,10 +230,10 @@ config-k8s: ## Generate Kubernetes configuration files (ConfigMap/Secret only - @kubectl apply -f k8s-manifests/configmap.yaml -n $(APPLICATION_NAMESPACE) 2>/dev/null || echo "โš ๏ธ ConfigMap not applied (cluster not available?)" @kubectl apply -f k8s-manifests/secrets.yaml -n $(APPLICATION_NAMESPACE) 2>/dev/null || echo "โš ๏ธ Secret not applied (cluster not available?)" @echo "๐Ÿ“ฆ Copying ConfigMap and Secret to speech namespace..." - @kubectl get configmap friend-lite-config -n $(APPLICATION_NAMESPACE) -o yaml | \ + @kubectl get configmap chronicle-config -n $(APPLICATION_NAMESPACE) -o yaml | \ sed -e '/namespace:/d' -e '/resourceVersion:/d' -e '/uid:/d' -e '/creationTimestamp:/d' | \ kubectl apply -n speech -f - 2>/dev/null || echo "โš ๏ธ ConfigMap not copied to speech namespace" - @kubectl get secret friend-lite-secrets -n $(APPLICATION_NAMESPACE) -o yaml | \ + @kubectl get secret chronicle-secrets -n $(APPLICATION_NAMESPACE) -o yaml | \ sed -e '/namespace:/d' -e '/resourceVersion:/d' -e '/uid:/d' -e '/creationTimestamp:/d' | \ kubectl apply -n speech -f - 2>/dev/null || echo "โš ๏ธ Secret not copied to speech namespace" @echo "โœ… Kubernetes configuration files generated" @@ -353,13 +353,13 @@ audio-manage: ## Interactive audio file management mycelia-sync-status: ## Show Mycelia OAuth sync status for all users @echo "๐Ÿ“Š Checking Mycelia OAuth sync status..." - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --status + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --status -mycelia-sync-all: ## Sync all Friend-Lite users to Mycelia OAuth - @echo "๐Ÿ”„ Syncing all Friend-Lite users to Mycelia OAuth..." +mycelia-sync-all: ## Sync all Chronicle users to Mycelia OAuth + @echo "๐Ÿ”„ Syncing all Chronicle users to Mycelia OAuth..." @echo "โš ๏ธ This will create OAuth credentials for users without them" @read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1 - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --sync-all + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --sync-all mycelia-sync-user: ## Sync specific user to Mycelia OAuth (usage: make mycelia-sync-user EMAIL=user@example.com) @echo "๐Ÿ‘ค Syncing specific user to Mycelia OAuth..." @@ -367,11 +367,11 @@ mycelia-sync-user: ## Sync specific user to Mycelia OAuth (usage: make mycelia-s echo "โŒ EMAIL parameter is required. Usage: make mycelia-sync-user EMAIL=user@example.com"; \ exit 1; \ fi - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --email $(EMAIL) + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --email $(EMAIL) -mycelia-check-orphans: ## Find Mycelia objects without Friend-Lite owner +mycelia-check-orphans: ## Find Mycelia objects without Chronicle owner @echo "๐Ÿ” Checking for orphaned Mycelia objects..." - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --check-orphans + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --check-orphans mycelia-reassign-orphans: ## Reassign orphaned objects to user (usage: make mycelia-reassign-orphans EMAIL=admin@example.com) @echo "โ™ป๏ธ Reassigning orphaned Mycelia objects..." @@ -381,7 +381,7 @@ mycelia-reassign-orphans: ## Reassign orphaned objects to user (usage: make myce fi @echo "โš ๏ธ This will reassign all orphaned objects to: $(EMAIL)" @read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1 - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --reassign-orphans --target-email $(EMAIL) + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --reassign-orphans --target-email $(EMAIL) # ======================================== # TESTING TARGETS diff --git a/README-K8S.md b/README-K8S.md index 161836af..9d83350f 100644 --- a/README-K8S.md +++ b/README-K8S.md @@ -1,6 +1,6 @@ -# Friend-Lite Kubernetes Setup Guide +# Chronicle Kubernetes Setup Guide -This guide walks you through setting up Friend-Lite from scratch on a fresh Ubuntu system, including MicroK8s installation, Docker registry configuration, and deployment via Skaffold. +This guide walks you through setting up Chronicle from scratch on a fresh Ubuntu system, including MicroK8s installation, Docker registry configuration, and deployment via Skaffold. ## System Architecture @@ -245,7 +245,7 @@ This guide walks you through setting up Friend-Lite from scratch on a fresh Ubun ### **Directory Structure** ``` -friend-lite/ +chronicle/ โ”œโ”€โ”€ scripts/ # Kubernetes deployment and management scripts โ”‚ โ”œโ”€โ”€ deploy-all-services.sh # Deploy all services โ”‚ โ”œโ”€โ”€ cluster-status.sh # Check cluster health @@ -266,9 +266,9 @@ friend-lite/ 1. **Clone Repository** ```bash - # Clone Friend-Lite repository - git clone https://github.com/yourusername/friend-lite.git - cd friend-lite + # Clone Chronicle repository + git clone https://github.com/yourusername/chronicle.git + cd chronicle # Verify template files are present ls -la skaffold.env.template @@ -387,7 +387,7 @@ The following scripts are available in the `scripts/` folder to simplify common ./scripts/cluster-status.sh # Check status of specific namespace -./scripts/cluster-status.sh friend-lite +./scripts/cluster-status.sh chronicle ``` ### **Setup Scripts** @@ -482,14 +482,14 @@ This directory contains standalone Kubernetes manifests that are not managed by 3. **Verify Deployment** ```bash # Check all resources - kubectl get all -n friend-lite + kubectl get all -n chronicle kubectl get all -n root # Check Ingress - kubectl get ingress -n friend-lite + kubectl get ingress -n chronicle # Check services - kubectl get svc -n friend-lite + kubectl get svc -n chronicle ``` ## Multi-Node Cluster Management @@ -630,14 +630,14 @@ spec: 1. **Check Application Health** ```bash # Check backend health - curl -k https://friend-lite.192-168-1-42.nip.io:32623/health + curl -k https://chronicle.192-168-1-42.nip.io:32623/health # Check WebUI - curl -k https://friend-lite.192-168-1-42.nip.io:32623/ + curl -k https://chronicle.192-168-1-42.nip.io:32623/ ``` 2. **Access WebUI** - - Open browser to: `https://friend-lite.192-168-1-42.nip.io:32623/` + - Open browser to: `https://chronicle.192-168-1-42.nip.io:32623/` - Accept self-signed certificate warning - Create admin user account - Test audio recording functionality @@ -676,7 +676,7 @@ spec: kubectl get pods -n ingress-nginx # Check Ingress configuration (run on build machine) - kubectl describe ingress -n friend-lite + kubectl describe ingress -n chronicle ``` 4. **Build Issues** @@ -729,20 +729,20 @@ spec: ```bash # View logs (run on build machine) -kubectl logs -n friend-lite deployment/advanced-backend -kubectl logs -n friend-lite deployment/webui +kubectl logs -n chronicle deployment/advanced-backend +kubectl logs -n chronicle deployment/webui # Port forward for debugging (run on build machine) -kubectl port-forward -n friend-lite svc/advanced-backend 8000:8000 -kubectl port-forward -n friend-lite svc/webui 8080:80 +kubectl port-forward -n chronicle svc/advanced-backend 8000:8000 +kubectl port-forward -n chronicle svc/webui 8080:80 # Check resource usage (run on build machine) -kubectl top pods -n friend-lite +kubectl top pods -n chronicle kubectl top nodes # Restart deployments (run on build machine) -kubectl rollout restart deployment/advanced-backend -n friend-lite -kubectl rollout restart deployment/webui -n friend-lite +kubectl rollout restart deployment/advanced-backend -n chronicle +kubectl rollout restart deployment/webui -n chronicle ``` ## Maintenance @@ -773,7 +773,7 @@ kubectl rollout restart deployment/webui -n friend-lite cp skaffold.env skaffold.env.backup # Backup Kubernetes manifests (run on build machine) - kubectl get all -n friend-lite -o yaml > friend-lite-backup.yaml + kubectl get all -n chronicle -o yaml > chronicle-backup.yaml kubectl get all -n root -o yaml > infrastructure-backup.yaml ``` @@ -791,7 +791,7 @@ chmod +x init.sh ./init.sh ``` -This will guide you through setting up Friend-Lite using Docker Compose instead of Kubernetes. +This will guide you through setting up Chronicle using Docker Compose instead of Kubernetes. ## Speaker Recognition Deployment diff --git a/README.md b/README.md index 0a43076b..34027891 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Friend-Lite +# Chronicle Self-hostable AI system that captures audio/video data from OMI devices and other sources to generate memories, action items, and contextual insights about your conversations and daily interactions. diff --git a/app/README.md b/app/README.md index 6d3272f9..d73dd748 100644 --- a/app/README.md +++ b/app/README.md @@ -1,6 +1,6 @@ -# Friend-Lite Mobile App +# Chronicle Mobile App -React Native mobile application for connecting OMI devices and streaming audio to Friend-Lite backends. Supports cross-platform deployment on iOS and Android with Bluetooth integration. +React Native mobile application for connecting OMI devices and streaming audio to Chronicle backends. Supports cross-platform deployment on iOS and Android with Bluetooth integration. ## Features @@ -64,7 +64,7 @@ npx expo prebuild --clean cd ios && pod install && cd .. # Open in Xcode -open ios/friendlite.xcworkspace +open ios/chronicle.xcworkspace ``` Build and run from Xcode interface. @@ -154,7 +154,7 @@ Backend URL: wss://[ngrok-subdomain].ngrok.io/ws_pcm ## Phone Audio Streaming (NEW) ### Overview -Stream audio directly from your phone's microphone to Friend-Lite backend, bypassing Bluetooth devices. This feature provides a direct audio input method for users who want to use their phone as the audio source. +Stream audio directly from your phone's microphone to Chronicle backend, bypassing Bluetooth devices. This feature provides a direct audio input method for users who want to use their phone as the audio source. ### Features - **Direct Microphone Access**: Use phone's built-in microphone @@ -166,7 +166,7 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas ### Setup & Usage #### Enable Phone Audio Streaming -1. **Open Friend-Lite app** +1. **Open Chronicle app** 2. **Configure Backend Connection** (see Backend Configuration section) 3. **Grant Microphone Permissions** when prompted 4. **Tap "Stream Phone Audio" button** in main interface @@ -175,7 +175,7 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas #### Requirements - **iOS**: iOS 13+ with microphone permissions - **Android**: Android API 21+ with microphone permissions -- **Network**: Stable connection to Friend-Lite backend +- **Network**: Stable connection to Chronicle backend - **Backend**: Advanced backend running with `/ws_pcm` endpoint #### Switching Audio Sources @@ -197,8 +197,8 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas - **Restart Recording**: Stop and restart phone audio streaming #### Permission Issues -- **iOS**: Settings > Privacy & Security > Microphone > Friend-Lite -- **Android**: Settings > Apps > Friend-Lite > Permissions > Microphone +- **iOS**: Settings > Privacy & Security > Microphone > Chronicle +- **Android**: Settings > Apps > Chronicle > Permissions > Microphone #### No Audio Level Visualization - **Restart App**: Close and reopen the application @@ -210,7 +210,7 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas ### Device Connection 1. **Enable Bluetooth** on your mobile device -2. **Open Friend-Lite app** +2. **Open Chronicle app** 3. **Pair OMI device**: - Go to Device Settings - Scan for nearby OMI devices diff --git a/app/app.json b/app/app.json index 9acdac77..c2446e12 100644 --- a/app/app.json +++ b/app/app.json @@ -1,7 +1,7 @@ { "expo": { - "name": "friend-lite-app", - "slug": "friend-lite-app", + "name": "chronicle-app", + "slug": "chronicle-app", "version": "1.0.0", "orientation": "portrait", "icon": "./assets/icon.png", @@ -17,9 +17,9 @@ ], "ios": { "supportsTablet": true, - "bundleIdentifier": "com.cupbearer5517.friendlite", + "bundleIdentifier": "com.cupbearer5517.chronicle", "infoPlist": { - "NSMicrophoneUsageDescription": "Friend-Lite needs access to your microphone to stream audio to the backend for processing." + "NSMicrophoneUsageDescription": "Chronicle needs access to your microphone to stream audio to the backend for processing." } }, "android": { @@ -27,7 +27,7 @@ "foregroundImage": "./assets/adaptive-icon.png", "backgroundColor": "#ffffff" }, - "package": "com.cupbearer5517.friendlite", + "package": "com.cupbearer5517.chronicle", "permissions": [ "android.permission.BLUETOOTH", "android.permission.BLUETOOTH_ADMIN", diff --git a/app/app/components/DeviceDetails.tsx b/app/app/components/DeviceDetails.tsx index ebf204c3..3bd22b4a 100644 --- a/app/app/components/DeviceDetails.tsx +++ b/app/app/components/DeviceDetails.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet, TextInput } from 'react-native'; -import { BleAudioCodec } from 'friend-lite-react-native'; +import { BleAudioCodec } from 'chronicle-react-native'; interface DeviceDetailsProps { // Device Info diff --git a/app/app/components/DeviceListItem.tsx b/app/app/components/DeviceListItem.tsx index a8083035..3da559de 100644 --- a/app/app/components/DeviceListItem.tsx +++ b/app/app/components/DeviceListItem.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet } from 'react-native'; -import { OmiDevice } from 'friend-lite-react-native'; +import { OmiDevice } from 'chronicle-react-native'; interface DeviceListItemProps { device: OmiDevice; diff --git a/app/app/hooks/useAudioListener.ts b/app/app/hooks/useAudioListener.ts index 391ed125..1dcf225e 100644 --- a/app/app/hooks/useAudioListener.ts +++ b/app/app/hooks/useAudioListener.ts @@ -1,6 +1,6 @@ import { useState, useRef, useCallback, useEffect } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection } from 'friend-lite-react-native'; +import { OmiConnection } from 'chronicle-react-native'; import { Subscription, ConnectionPriority } from 'react-native-ble-plx'; // OmiConnection might use this type for subscriptions interface UseAudioListener { diff --git a/app/app/hooks/useDeviceConnection.ts b/app/app/hooks/useDeviceConnection.ts index e729169e..964e4d4e 100644 --- a/app/app/hooks/useDeviceConnection.ts +++ b/app/app/hooks/useDeviceConnection.ts @@ -1,6 +1,6 @@ import { useState, useCallback } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection, BleAudioCodec, OmiDevice } from 'friend-lite-react-native'; +import { OmiConnection, BleAudioCodec, OmiDevice } from 'chronicle-react-native'; interface UseDeviceConnection { connectedDevice: OmiDevice | null; diff --git a/app/app/hooks/useDeviceScanning.ts b/app/app/hooks/useDeviceScanning.ts index d7780266..f4c16ff3 100644 --- a/app/app/hooks/useDeviceScanning.ts +++ b/app/app/hooks/useDeviceScanning.ts @@ -1,6 +1,6 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { BleManager, State as BluetoothState } from 'react-native-ble-plx'; -import { OmiConnection, OmiDevice } from 'friend-lite-react-native'; // Assuming this is the correct import for Omi types +import { OmiConnection, OmiDevice } from 'chronicle-react-native'; // Assuming this is the correct import for Omi types interface UseDeviceScanning { devices: OmiDevice[]; diff --git a/app/app/index.tsx b/app/app/index.tsx index 8bb1234a..2b20cb7b 100644 --- a/app/app/index.tsx +++ b/app/app/index.tsx @@ -1,6 +1,6 @@ import React, { useRef, useCallback, useEffect, useState } from 'react'; import { StyleSheet, Text, View, SafeAreaView, ScrollView, Platform, FlatList, ActivityIndicator, Alert, Switch, Button, TouchableOpacity, KeyboardAvoidingView } from 'react-native'; -import { OmiConnection } from 'friend-lite-react-native'; // OmiDevice also comes from here +import { OmiConnection } from 'chronicle-react-native'; // OmiDevice also comes from here import { State as BluetoothState } from 'react-native-ble-plx'; // Import State from ble-plx // Hooks @@ -521,7 +521,7 @@ export default function App() { contentContainerStyle={styles.content} keyboardShouldPersistTaps="handled" > - Friend Lite + Chronicle {/* Backend Connection - moved to top */} **Note**: This documentation covers the modern React interface located in `./webui/`. The legacy Streamlit interface has been moved to `src/_webui_original/` for reference. diff --git a/backends/advanced/Docs/architecture.md b/backends/advanced/Docs/architecture.md index 8211cb32..d5edb6a3 100644 --- a/backends/advanced/Docs/architecture.md +++ b/backends/advanced/Docs/architecture.md @@ -1,11 +1,11 @@ -# Friend-Lite Backend Architecture +# Chronicle Backend Architecture > ๐Ÿ“– **Prerequisite**: Read [quickstart.md](./quickstart.md) first for basic system understanding. ## System Overview -Friend-Lite is a comprehensive real-time conversation processing system that captures audio streams, performs speech-to-text transcription, and extracts memories. The system features a FastAPI backend with WebSocket audio streaming, versioned transcript and memory processing, a React web dashboard with search capabilities, and user authentication with role-based access control. +Chronicle is a comprehensive real-time conversation processing system that captures audio streams, performs speech-to-text transcription, and extracts memories. The system features a FastAPI backend with WebSocket audio streaming, versioned transcript and memory processing, a React web dashboard with search capabilities, and user authentication with role-based access control. **Core Implementation**: The complete system is implemented in `src/advanced_omi_backend/main.py` with supporting services in dedicated modules, using a modular router/controller architecture pattern. @@ -1049,7 +1049,7 @@ src/advanced_omi_backend/ "memory_versions": [ { "version_id": "version_def", - "provider": "friend_lite", + "provider": "chronicle", "created_at": "2025-01-15T10:32:00Z", "memory_count": 5 } diff --git a/backends/advanced/Docs/auth.md b/backends/advanced/Docs/auth.md index 4a3f7267..2aa7d254 100644 --- a/backends/advanced/Docs/auth.md +++ b/backends/advanced/Docs/auth.md @@ -2,7 +2,7 @@ ## Overview -Friend-Lite uses a comprehensive authentication system built on `fastapi-users` with support for multiple authentication methods including JWT tokens and cookies. The system provides secure user management with proper data isolation and role-based access control using MongoDB ObjectIds for user identification. +Chronicle uses a comprehensive authentication system built on `fastapi-users` with support for multiple authentication methods including JWT tokens and cookies. The system provides secure user management with proper data isolation and role-based access control using MongoDB ObjectIds for user identification. ## Architecture Components @@ -269,7 +269,7 @@ echo $ADMIN_PASSWORD ### Debug Commands ```bash # Check user database -docker exec -it mongo-container mongosh friend-lite +docker exec -it mongo-container mongosh chronicle # View authentication logs docker compose logs friend-backend | grep -i auth diff --git a/backends/advanced/Docs/memories.md b/backends/advanced/Docs/memories.md index 06aa3f60..b2887dc9 100644 --- a/backends/advanced/Docs/memories.md +++ b/backends/advanced/Docs/memories.md @@ -2,7 +2,7 @@ > ๐Ÿ“– **Prerequisite**: Read [quickstart.md](./quickstart.md) first for system overview. -This document explains how to configure and customize the memory service in the friend-lite backend. +This document explains how to configure and customize the memory service in the chronicle backend. **Code References**: - **Main Implementation**: `src/memory/memory_service.py` @@ -65,7 +65,7 @@ OLLAMA_BASE_URL=http://192.168.0.110:11434 QDRANT_BASE_URL=localhost # Mem0 Organization Settings (optional) -MEM0_ORGANIZATION_ID=friend-lite-org +MEM0_ORGANIZATION_ID=chronicle-org MEM0_PROJECT_ID=audio-conversations MEM0_APP_ID=omi-backend @@ -391,7 +391,7 @@ process_memory.add( "timestamp": 1720616655, "conversation_context": "audio_transcription", "device_type": "audio_recording", - "organization_id": "friend-lite-org", + "organization_id": "chronicle-org", "project_id": "audio-conversations", "app_id": "omi-backend" } @@ -583,7 +583,7 @@ The memory service exposes these endpoints with enhanced search capabilities: - **Vector-based**: Uses embeddings for contextual understanding beyond keyword matching **Memory Count API**: -- **Friend-Lite Provider**: Native Qdrant count API provides accurate total counts +- **Chronicle Provider**: Native Qdrant count API provides accurate total counts - **OpenMemory MCP Provider**: Count support varies by OpenMemory implementation - **Response Format**: `{"memories": [...], "total_count": 42}` when supported diff --git a/backends/advanced/Docs/quickstart.md b/backends/advanced/Docs/quickstart.md index 523218bc..fc5a77b7 100644 --- a/backends/advanced/Docs/quickstart.md +++ b/backends/advanced/Docs/quickstart.md @@ -1,14 +1,14 @@ -# Friend-Lite Backend Quickstart Guide +# Chronicle Backend Quickstart Guide -> ๐Ÿ“– **New to friend-lite?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. +> ๐Ÿ“– **New to chronicle?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. ## Overview -Friend-Lite is an eco-system of services to support "AI wearable" agents/functionality. +Chronicle is an eco-system of services to support "AI wearable" agents/functionality. At the moment, the basic functionalities are: - Audio capture (via WebSocket, from OMI device, files, or a laptop) - Audio transcription -- **Advanced memory system** with pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Advanced memory system** with pluggable providers (Chronicle native or OpenMemory MCP) - **Enhanced memory extraction** with individual fact storage and smart updates - **Semantic memory search** with relevance threshold filtering and live results - Action item extraction @@ -36,13 +36,13 @@ cd backends/advanced - **Authentication**: Admin email/password setup - **Transcription Provider**: Choose Deepgram, Mistral, or Offline (Parakeet) - **LLM Provider**: Choose OpenAI or Ollama for memory extraction -- **Memory Provider**: Choose Friend-Lite Native or OpenMemory MCP +- **Memory Provider**: Choose Chronicle Native or OpenMemory MCP - **Optional Services**: Speaker Recognition and other extras - **Network Configuration**: Ports and host settings **Example flow:** ``` -๐Ÿš€ Friend-Lite Interactive Setup +๐Ÿš€ Chronicle Interactive Setup =============================================== โ–บ Authentication Setup @@ -124,13 +124,13 @@ ADMIN_EMAIL=admin@example.com **Memory Provider Configuration:** ```bash # Memory Provider (Choose One) -# Option 1: Friend-Lite Native (Default - Recommended) -MEMORY_PROVIDER=friend_lite +# Option 1: Chronicle Native (Default - Recommended) +MEMORY_PROVIDER=chronicle # Option 2: OpenMemory MCP (Cross-client compatibility) # MEMORY_PROVIDER=openmemory_mcp # OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -# OPENMEMORY_CLIENT_NAME=friend_lite +# OPENMEMORY_CLIENT_NAME=chronicle # OPENMEMORY_USER_ID=openmemory ``` @@ -323,8 +323,8 @@ curl -X POST "http://localhost:8000/api/audio/upload" \ ### Memory & Intelligence #### Pluggable Memory System -- **Two memory providers**: Choose between Friend-Lite native or OpenMemory MCP -- **Friend-Lite Provider**: Full control with custom extraction, individual fact storage, smart deduplication +- **Two memory providers**: Choose between Chronicle native or OpenMemory MCP +- **Chronicle Provider**: Full control with custom extraction, individual fact storage, smart deduplication - **OpenMemory MCP Provider**: Cross-client compatibility (Claude Desktop, Cursor, Windsurf), professional processing #### Enhanced Memory Processing @@ -480,7 +480,7 @@ tailscale ip -4 ## Data Architecture -The friend-lite backend uses a **user-centric data architecture**: +The chronicle backend uses a **user-centric data architecture**: - **All memories are keyed by database user_id** (not client_id) - **Client information is stored in metadata** for reference and debugging @@ -493,12 +493,12 @@ For detailed information, see [User Data Architecture](user-data-architecture.md ### Choosing a Memory Provider -Friend-Lite offers two memory backends: +Chronicle offers two memory backends: -#### 1. Friend-Lite Native +#### 1. Chronicle Native ```bash # In your .env file -MEMORY_PROVIDER=friend_lite +MEMORY_PROVIDER=chronicle LLM_PROVIDER=openai OPENAI_API_KEY=your-openai-key-here ``` @@ -517,7 +517,7 @@ OPENAI_API_KEY=your-openai-key-here cd extras/openmemory-mcp docker compose up -d -# Then configure Friend-Lite +# Then configure Chronicle MEMORY_PROVIDER=openmemory_mcp OPENMEMORY_MCP_URL=http://host.docker.internal:8765 ``` diff --git a/backends/advanced/docker-compose-test.yml b/backends/advanced/docker-compose-test.yml index 1dde7c55..a507a455 100644 --- a/backends/advanced/docker-compose-test.yml +++ b/backends/advanced/docker-compose-test.yml @@ -3,7 +3,7 @@ # Uses different ports to avoid conflicts with development environment services: - friend-backend-test: + chronicle-backend-test: build: context: . dockerfile: Dockerfile @@ -35,7 +35,7 @@ services: - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} # - PARAKEET_ASR_URL=${PARAKEET_ASR_URL} # Memory provider configuration - - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} + - MEMORY_PROVIDER=${MEMORY_PROVIDER:-chronicle} - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} - MYCELIA_URL=http://mycelia-backend-test:5173 @@ -75,7 +75,7 @@ services: ports: - "3001:80" # Avoid conflict with dev on 3000 depends_on: - friend-backend-test: + chronicle-backend-test: condition: service_healthy mongo-test: condition: service_healthy @@ -145,7 +145,7 @@ services: - ADMIN_PASSWORD=test-admin-password-123 - ADMIN_EMAIL=test-admin@example.com - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} - - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} + - MEMORY_PROVIDER=${MEMORY_PROVIDER:-chronicle} - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} - MYCELIA_URL=http://mycelia-backend-test:5173 @@ -157,7 +157,7 @@ services: # Wait for audio queue to drain before timing out (test mode) - WAIT_FOR_AUDIO_QUEUE_DRAIN=true depends_on: - friend-backend-test: + chronicle-backend-test: condition: service_healthy mongo-test: condition: service_healthy @@ -168,58 +168,58 @@ services: restart: unless-stopped # Mycelia - AI memory and timeline service (test environment) - mycelia-backend-test: - build: - context: ../../extras/mycelia/backend - dockerfile: Dockerfile.simple - ports: - - "5100:5173" # Test backend port - environment: - # Shared JWT secret for Friend-Lite authentication (test key) - - JWT_SECRET=test-jwt-signing-key-for-integration-tests - - SECRET_KEY=test-jwt-signing-key-for-integration-tests - # MongoDB connection (test database) - - MONGO_URL=mongodb://mongo-test:27017 - - MONGO_DB=mycelia_test - - DATABASE_NAME=mycelia_test - # Redis connection (ioredis uses individual host/port, not URL) - - REDIS_HOST=redis-test - - REDIS_PORT=6379 - volumes: - - ../../extras/mycelia/backend/app:/app/app # Mount source for development - depends_on: - mongo-test: - condition: service_healthy - redis-test: - condition: service_started - healthcheck: - test: ["CMD", "deno", "eval", "fetch('http://localhost:5173/health').then(r => r.ok ? Deno.exit(0) : Deno.exit(1))"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 5s - restart: unless-stopped - profiles: - - mycelia + # mycelia-backend-test: + # build: + # context: ../../extras/mycelia/backend + # dockerfile: Dockerfile.simple + # ports: + # - "5100:5173" # Test backend port + # environment: + # # Shared JWT secret for Chronicle authentication (test key) + # - JWT_SECRET=test-jwt-signing-key-for-integration-tests + # - SECRET_KEY=test-jwt-signing-key-for-integration-tests + # # MongoDB connection (test database) + # - MONGO_URL=mongodb://mongo-test:27017 + # - MONGO_DB=mycelia_test + # - DATABASE_NAME=mycelia_test + # # Redis connection (ioredis uses individual host/port, not URL) + # - REDIS_HOST=redis-test + # - REDIS_PORT=6379 + # volumes: + # - ../../extras/mycelia/backend/app:/app/app # Mount source for development + # depends_on: + # mongo-test: + # condition: service_healthy + # redis-test: + # condition: service_started + # healthcheck: + # test: ["CMD", "deno", "eval", "fetch('http://localhost:5173/health').then(r => r.ok ? Deno.exit(0) : Deno.exit(1))"] + # interval: 30s + # timeout: 10s + # retries: 3 + # start_period: 5s + # restart: unless-stopped + # profiles: + # - mycelia - mycelia-frontend-test: - build: - context: ../../extras/mycelia - dockerfile: frontend/Dockerfile.simple - args: - - VITE_API_URL=http://localhost:5100 - ports: - - "3002:8080" # Nginx serves on 8080 internally - environment: - - VITE_API_URL=http://localhost:5100 - volumes: - - ../../extras/mycelia/frontend/src:/app/src # Mount source for development - depends_on: - mycelia-backend-test: - condition: service_healthy - restart: unless-stopped - profiles: - - mycelia + # mycelia-frontend-test: + # build: + # context: ../../extras/mycelia + # dockerfile: frontend/Dockerfile.simple + # args: + # - VITE_API_URL=http://localhost:5100 + # ports: + # - "3002:8080" # Nginx serves on 8080 internally + # environment: + # - VITE_API_URL=http://localhost:5100 + # volumes: + # - ../../extras/mycelia/frontend/src:/app/src # Mount source for development + # depends_on: + # mycelia-backend-test: + # condition: service_healthy + # restart: unless-stopped + # profiles: + # - mycelia # caddy: # image: caddy:2-alpine @@ -233,7 +233,7 @@ services: # depends_on: # webui-test: # condition: service_started - # friend-backend-test: + # chronicle-backend-test: # condition: service_healthy # restart: unless-stopped diff --git a/backends/advanced/docker-compose.yml b/backends/advanced/docker-compose.yml index d9d58dca..ea2f936b 100644 --- a/backends/advanced/docker-compose.yml +++ b/backends/advanced/docker-compose.yml @@ -1,5 +1,5 @@ services: - friend-backend: + chronicle-backend: build: context: . dockerfile: Dockerfile @@ -52,7 +52,7 @@ services: restart: unless-stopped # Unified Worker Container - # No CUDA needed for friend-backend and workers, workers only orchestrate jobs and call external services + # No CUDA needed for chronicle-backend and workers, workers only orchestrate jobs and call external services # Runs all workers in a single container for efficiency: # - 3 RQ workers (transcription, memory, default queues) # - 1 Audio stream worker (Redis Streams consumer - must be single to maintain sequential chunks) @@ -102,7 +102,7 @@ services: # - "${WEBUI_PORT:-3010}:80" - 3010:80 depends_on: - friend-backend: + chronicle-backend: condition: service_healthy restart: unless-stopped @@ -119,7 +119,7 @@ services: - caddy_data:/data - caddy_config:/config depends_on: - friend-backend: + chronicle-backend: condition: service_healthy restart: unless-stopped profiles: @@ -138,7 +138,7 @@ services: - ./webui/src:/app/src - ./webui/public:/app/public depends_on: - friend-backend: + chronicle-backend: condition: service_healthy profiles: - dev @@ -216,7 +216,7 @@ services: # UNCOMMENT OUT FOR LOCAL DEMO - EXPOSES to internet # ngrok: # image: ngrok/ngrok:latest - # depends_on: [friend-backend, proxy] + # depends_on: [chronicle-backend, proxy] # ports: # - "4040:4040" # Ngrok web interface # environment: diff --git a/backends/advanced/init-https.sh b/backends/advanced/init-https.sh index 8cca1ba1..cfeebf61 100755 --- a/backends/advanced/init-https.sh +++ b/backends/advanced/init-https.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Initialize Friend-Lite Advanced Backend with HTTPS proxy +# Initialize Chronicle Advanced Backend with HTTPS proxy # Usage: ./init.sh if [ $# -ne 1 ]; then @@ -23,7 +23,7 @@ if ! echo "$TAILSCALE_IP" | grep -E '^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{ exit 1 fi -echo "๐Ÿš€ Initializing Friend-Lite Advanced Backend with Tailscale IP: $TAILSCALE_IP" +echo "๐Ÿš€ Initializing Chronicle Advanced Backend with Tailscale IP: $TAILSCALE_IP" echo "" # Check if nginx.conf.template exists @@ -98,7 +98,7 @@ echo " ๐Ÿ“ฑ Navigate to Live Record page" echo " ๐ŸŽค Microphone access will work over HTTPS" echo "" echo "๐Ÿ”ง Services included:" -echo " - Friend-Lite Backend: Internal (proxied through nginx)" +echo " - Chronicle Backend: Internal (proxied through nginx)" echo " - Web Dashboard: https://localhost/ or https://$TAILSCALE_IP/" echo " - WebSocket Audio: wss://localhost/ws_pcm or wss://$TAILSCALE_IP/ws_pcm" echo "" diff --git a/backends/advanced/init.py b/backends/advanced/init.py index 667f5209..756ca371 100644 --- a/backends/advanced/init.py +++ b/backends/advanced/init.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Advanced Backend Interactive Setup Script +Chronicle Advanced Backend Interactive Setup Script Interactive configuration for all services and API keys """ @@ -22,7 +22,7 @@ from rich.text import Text -class FriendLiteSetup: +class ChronicleSetup: def __init__(self, args=None): self.console = Console() self.config: Dict[str, Any] = {} @@ -265,26 +265,26 @@ def setup_memory(self): self.print_section("Memory Storage Configuration") choices = { - "1": "Friend-Lite Native (Qdrant + custom extraction)", + "1": "Chronicle Native (Qdrant + custom extraction)", "2": "OpenMemory MCP (cross-client compatible, external server)" } choice = self.prompt_choice("Choose your memory storage backend:", choices, "1") if choice == "1": - self.config["MEMORY_PROVIDER"] = "friend_lite" - self.console.print("[blue][INFO][/blue] Friend-Lite Native memory provider selected") + self.config["MEMORY_PROVIDER"] = "chronicle" + self.console.print("[blue][INFO][/blue] Chronicle Native memory provider selected") qdrant_url = self.prompt_value("Qdrant URL", "qdrant") self.config["QDRANT_BASE_URL"] = qdrant_url - self.console.print("[green][SUCCESS][/green] Friend-Lite memory provider configured") + self.console.print("[green][SUCCESS][/green] Chronicle memory provider configured") elif choice == "2": self.config["MEMORY_PROVIDER"] = "openmemory_mcp" self.console.print("[blue][INFO][/blue] OpenMemory MCP selected") mcp_url = self.prompt_value("OpenMemory MCP server URL", "http://host.docker.internal:8765") - client_name = self.prompt_value("OpenMemory client name", "friend_lite") + client_name = self.prompt_value("OpenMemory client name", "chronicle") user_id = self.prompt_value("OpenMemory user ID", "openmemory") self.config["OPENMEMORY_MCP_URL"] = mcp_url @@ -473,7 +473,7 @@ def show_summary(self): self.console.print(f"โœ… Admin Account: {self.config.get('ADMIN_EMAIL', 'Not configured')}") self.console.print(f"โœ… Transcription: {self.config.get('TRANSCRIPTION_PROVIDER', 'Not configured')}") self.console.print(f"โœ… LLM Provider: {self.config.get('LLM_PROVIDER', 'Not configured')}") - self.console.print(f"โœ… Memory Provider: {self.config.get('MEMORY_PROVIDER', 'friend_lite')}") + self.console.print(f"โœ… Memory Provider: {self.config.get('MEMORY_PROVIDER', 'chronicle')}") # Auto-determine URLs based on HTTPS configuration if self.config.get('HTTPS_ENABLED') == 'true': server_ip = self.config.get('SERVER_IP', 'localhost') @@ -523,8 +523,8 @@ def show_next_steps(self): def run(self): """Run the complete setup process""" - self.print_header("๐Ÿš€ Friend-Lite Interactive Setup") - self.console.print("This wizard will help you configure Friend-Lite with all necessary services.") + self.print_header("๐Ÿš€ Chronicle Interactive Setup") + self.console.print("This wizard will help you configure Chronicle with all necessary services.") self.console.print("We'll ask for your API keys and preferences step by step.") self.console.print() @@ -569,7 +569,7 @@ def run(self): def main(): """Main entry point""" - parser = argparse.ArgumentParser(description="Friend-Lite Advanced Backend Setup") + parser = argparse.ArgumentParser(description="Chronicle Advanced Backend Setup") parser.add_argument("--speaker-service-url", help="Speaker Recognition service URL (default: prompt user)") parser.add_argument("--parakeet-asr-url", @@ -581,7 +581,7 @@ def main(): args = parser.parse_args() - setup = FriendLiteSetup(args) + setup = ChronicleSetup(args) setup.run() diff --git a/backends/advanced/scripts/create_mycelia_api_key.py b/backends/advanced/scripts/create_mycelia_api_key.py index ac2149e8..a517af7b 100755 --- a/backends/advanced/scripts/create_mycelia_api_key.py +++ b/backends/advanced/scripts/create_mycelia_api_key.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -"""Create a proper Mycelia API key (not OAuth client) for Friend-Lite user.""" +"""Create a proper Mycelia API key (not OAuth client) for Chronicle user.""" import os import sys @@ -73,7 +73,7 @@ def main(): "hashedKey": hashed_key, # Note: hashedKey, not hash! "salt": base64.b64encode(salt).decode('utf-8'), # Store as base64 like Mycelia "owner": USER_ID, - "name": "Friend-Lite Integration", + "name": "Chronicle Integration", "policies": [ { "resource": "**", diff --git a/backends/advanced/scripts/sync_friendlite_mycelia.py b/backends/advanced/scripts/sync_friendlite_mycelia.py index c7051f2c..3849a5a9 100644 --- a/backends/advanced/scripts/sync_friendlite_mycelia.py +++ b/backends/advanced/scripts/sync_friendlite_mycelia.py @@ -1,25 +1,25 @@ #!/usr/bin/env python3 """ -Sync Friend-Lite users with Mycelia OAuth credentials. +Sync Chronicle users with Mycelia OAuth credentials. -This script helps migrate existing Friend-Lite installations to use Mycelia, -or sync existing Mycelia installations with Friend-Lite users. +This script helps migrate existing Chronicle installations to use Mycelia, +or sync existing Mycelia installations with Chronicle users. Usage: # Dry run (preview changes) - python scripts/sync_friendlite_mycelia.py --dry-run + python scripts/sync_chronicle_mycelia.py --dry-run # Sync all users - python scripts/sync_friendlite_mycelia.py --sync-all + python scripts/sync_chronicle_mycelia.py --sync-all # Sync specific user - python scripts/sync_friendlite_mycelia.py --email admin@example.com + python scripts/sync_chronicle_mycelia.py --email admin@example.com # Check for orphaned Mycelia objects - python scripts/sync_friendlite_mycelia.py --check-orphans + python scripts/sync_chronicle_mycelia.py --check-orphans # Reassign orphaned objects to a user - python scripts/sync_friendlite_mycelia.py --reassign-orphans --target-email admin@example.com + python scripts/sync_chronicle_mycelia.py --reassign-orphans --target-email admin@example.com Environment Variables: MONGODB_URI or MONGO_URL - MongoDB connection string @@ -41,18 +41,18 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) -class FriendLiteMyceliaSync: - """Sync Friend-Lite users with Mycelia OAuth credentials.""" +class ChronicleMyceliaSync: + """Sync Chronicle users with Mycelia OAuth credentials.""" - def __init__(self, mongo_url: str, mycelia_db: str, friendlite_db: str): + def __init__(self, mongo_url: str, mycelia_db: str, chronicle_db: str): self.mongo_url = mongo_url self.mycelia_db = mycelia_db - self.friendlite_db = friendlite_db + self.chronicle_db = chronicle_db self.client = MongoClient(mongo_url) print(f"๐Ÿ“Š Connected to MongoDB:") print(f" URL: {mongo_url}") - print(f" Friend-Lite DB: {friendlite_db}") + print(f" Chronicle DB: {chronicle_db}") print(f" Mycelia DB: {mycelia_db}\n") def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: @@ -62,9 +62,9 @@ def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: h.update(api_key.encode('utf-8')) return base64.b64encode(h.digest()).decode('utf-8') - def get_all_friendlite_users(self) -> List[Dict]: - """Get all users from Friend-Lite database.""" - db = self.client[self.friendlite_db] + def get_all_chronicle_users(self) -> List[Dict]: + """Get all users from Chronicle database.""" + db = self.client[self.chronicle_db] users = list(db["users"].find({})) return users @@ -84,7 +84,7 @@ def get_mycelia_api_key_for_user(self, user_id: str) -> Optional[Dict]: return api_key def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = False) -> Tuple[str, str]: - """Create a Mycelia API key for a Friend-Lite user.""" + """Create a Mycelia API key for a Chronicle user.""" # Generate API key random_part = secrets.token_urlsafe(32) api_key = f"mycelia_{random_part}" @@ -96,7 +96,7 @@ def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = "hashedKey": hashed_key, "salt": base64.b64encode(salt).decode('utf-8'), "owner": user_id, - "name": f"Friend-Lite Auto ({user_email})", + "name": f"Chronicle Auto ({user_email})", "policies": [{"resource": "**", "action": "*", "effect": "allow"}], "openPrefix": open_prefix, "createdAt": datetime.utcnow(), @@ -111,8 +111,8 @@ def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = result = db["api_keys"].insert_one(api_key_doc) client_id = str(result.inserted_id) - # Update Friend-Lite user document - fl_db = self.client[self.friendlite_db] + # Update Chronicle user document + fl_db = self.client[self.chronicle_db] fl_db["users"].update_one( {"_id": ObjectId(user_id)}, { @@ -156,13 +156,13 @@ def sync_user(self, user: Dict, dry_run: bool = False) -> bool: return False def sync_all_users(self, dry_run: bool = False): - """Sync all Friend-Lite users to Mycelia OAuth.""" - users = self.get_all_friendlite_users() + """Sync all Chronicle users to Mycelia OAuth.""" + users = self.get_all_chronicle_users() print(f"{'='*80}") print(f"SYNC ALL USERS") print(f"{'='*80}") - print(f"Found {len(users)} Friend-Lite users\n") + print(f"Found {len(users)} Chronicle users\n") if dry_run: print("๐Ÿ” DRY RUN MODE - No changes will be made\n") @@ -180,8 +180,8 @@ def sync_all_users(self, dry_run: bool = False): print(f"{'='*80}\n") def check_orphaned_objects(self): - """Find Mycelia objects with userId not matching any Friend-Lite user.""" - users = self.get_all_friendlite_users() + """Find Mycelia objects with userId not matching any Chronicle user.""" + users = self.get_all_chronicle_users() user_ids = {str(user["_id"]) for user in users} objects = self.get_all_mycelia_objects() @@ -189,7 +189,7 @@ def check_orphaned_objects(self): print(f"{'='*80}") print(f"ORPHANED OBJECTS CHECK") print(f"{'='*80}") - print(f"Friend-Lite users: {len(user_ids)}") + print(f"Chronicle users: {len(user_ids)}") print(f"Mycelia objects: {len(objects)}\n") orphaned = [] @@ -229,20 +229,20 @@ def check_orphaned_objects(self): return orphaned def reassign_orphaned_objects(self, target_email: str, dry_run: bool = False): - """Reassign all orphaned objects to a specific Friend-Lite user.""" + """Reassign all orphaned objects to a specific Chronicle user.""" # Get target user - fl_db = self.client[self.friendlite_db] + fl_db = self.client[self.chronicle_db] target_user = fl_db["users"].find_one({"email": target_email}) if not target_user: - print(f"โœ— User with email '{target_email}' not found in Friend-Lite") + print(f"โœ— User with email '{target_email}' not found in Chronicle") return target_user_id = str(target_user["_id"]) print(f"Target user: {target_email} (ID: {target_user_id})\n") # Find orphaned objects - users = self.get_all_friendlite_users() + users = self.get_all_chronicle_users() user_ids = {str(user["_id"]) for user in users} objects = self.get_all_mycelia_objects() @@ -291,7 +291,7 @@ def reassign_orphaned_objects(self, target_email: str, dry_run: bool = False): def display_sync_status(self): """Display current sync status.""" - users = self.get_all_friendlite_users() + users = self.get_all_chronicle_users() print(f"{'='*80}") print(f"SYNC STATUS") @@ -326,13 +326,13 @@ def display_sync_status(self): def main(): parser = argparse.ArgumentParser( - description="Sync Friend-Lite users with Mycelia OAuth credentials", + description="Sync Chronicle users with Mycelia OAuth credentials", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=__doc__ ) parser.add_argument("--dry-run", action="store_true", help="Preview changes without making them") - parser.add_argument("--sync-all", action="store_true", help="Sync all Friend-Lite users") + parser.add_argument("--sync-all", action="store_true", help="Sync all Chronicle users") parser.add_argument("--email", type=str, help="Sync specific user by email") parser.add_argument("--check-orphans", action="store_true", help="Check for orphaned Mycelia objects") parser.add_argument("--reassign-orphans", action="store_true", help="Reassign orphaned objects to target user") @@ -346,14 +346,14 @@ def main(): # Extract database name from MONGODB_URI if present if "/" in mongo_url and mongo_url.count("/") >= 3: - friendlite_db = mongo_url.split("/")[-1].split("?")[0] or "friend-lite" + chronicle_db = mongo_url.split("/")[-1].split("?")[0] or "chronicle" else: - friendlite_db = "friend-lite" + chronicle_db = "chronicle" mycelia_db = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia")) # Create sync service - sync = FriendLiteMyceliaSync(mongo_url, mycelia_db, friendlite_db) + sync = ChronicleMyceliaSync(mongo_url, mycelia_db, chronicle_db) # Execute requested action if args.status: @@ -361,7 +361,7 @@ def main(): elif args.sync_all: sync.sync_all_users(dry_run=args.dry_run) elif args.email: - fl_db = sync.client[friendlite_db] + fl_db = sync.client[chronicle_db] user = fl_db["users"].find_one({"email": args.email}) if user: sync.sync_user(user, dry_run=args.dry_run) diff --git a/backends/advanced/setup-https.sh b/backends/advanced/setup-https.sh index 51f98fe9..e0f733df 100755 --- a/backends/advanced/setup-https.sh +++ b/backends/advanced/setup-https.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Friend-Lite Advanced Backend Initialization Script +# Chronicle Advanced Backend Initialization Script # Comprehensive setup for all configuration files and optional services # Colors for output @@ -73,8 +73,8 @@ if [ ! -f "pyproject.toml" ] || [ ! -d "src" ]; then exit 1 fi -print_header "Friend-Lite Advanced Backend Initialization" -echo "This script will help you set up the Friend-Lite backend with all necessary configurations." +print_header "Chronicle Advanced Backend Initialization" +echo "This script will help you set up the Chronicle backend with all necessary configurations." echo "" # Function to prompt yes/no diff --git a/backends/advanced/src/advanced_omi_backend/app_config.py b/backends/advanced/src/advanced_omi_backend/app_config.py index 4caa70c5..d42535fd 100644 --- a/backends/advanced/src/advanced_omi_backend/app_config.py +++ b/backends/advanced/src/advanced_omi_backend/app_config.py @@ -1,5 +1,5 @@ """ -Application configuration for Friend-Lite backend. +Application configuration for Chronicle backend. Centralizes all application-level configuration including database connections, service configurations, and environment variables that were previously in main.py. @@ -29,7 +29,7 @@ def __init__(self): # MongoDB Configuration self.mongodb_uri = os.getenv("MONGODB_URI", "mongodb://mongo:27017") self.mongo_client = AsyncIOMotorClient(self.mongodb_uri) - self.db = self.mongo_client.get_default_database("friend-lite") + self.db = self.mongo_client.get_default_database("chronicle") self.users_col = self.db["users"] self.speakers_col = self.db["speakers"] @@ -66,7 +66,7 @@ def __init__(self): # External Services Configuration self.qdrant_base_url = os.getenv("QDRANT_BASE_URL", "qdrant") self.qdrant_port = os.getenv("QDRANT_PORT", "6333") - self.memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + self.memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() # Redis Configuration self.redis_url = os.getenv("REDIS_URL", "redis://localhost:6379/0") @@ -83,7 +83,7 @@ def __init__(self): self.max_workers = os.cpu_count() or 4 # Memory service configuration - self.memory_service_supports_threshold = self.memory_provider == "friend_lite" + self.memory_service_supports_threshold = self.memory_provider == "chronicle" # Global configuration instance diff --git a/backends/advanced/src/advanced_omi_backend/app_factory.py b/backends/advanced/src/advanced_omi_backend/app_factory.py index 65b1adbf..7ccda184 100644 --- a/backends/advanced/src/advanced_omi_backend/app_factory.py +++ b/backends/advanced/src/advanced_omi_backend/app_factory.py @@ -1,5 +1,5 @@ """ -Application factory for Friend-Lite backend. +Application factory for Chronicle backend. Creates and configures the FastAPI application with all routers, middleware, and service initializations. diff --git a/backends/advanced/src/advanced_omi_backend/auth.py b/backends/advanced/src/advanced_omi_backend/auth.py index 8b489988..4648e276 100644 --- a/backends/advanced/src/advanced_omi_backend/auth.py +++ b/backends/advanced/src/advanced_omi_backend/auth.py @@ -118,12 +118,12 @@ def generate_jwt_for_user(user_id: str, user_email: str) -> str: from datetime import datetime, timedelta import jwt - # Create JWT payload matching Friend-Lite's standard format + # Create JWT payload matching Chronicle's standard format payload = { "sub": user_id, # Subject = user ID "email": user_email, - "iss": "friend-lite", # Issuer - "aud": "friend-lite", # Audience + "iss": "chronicle", # Issuer + "aud": "chronicle", # Audience "exp": datetime.utcnow() + timedelta(hours=24), # 24 hour expiration "iat": datetime.utcnow(), # Issued at } diff --git a/backends/advanced/src/advanced_omi_backend/chat_service.py b/backends/advanced/src/advanced_omi_backend/chat_service.py index 4ec5ecff..1cd1a2e3 100644 --- a/backends/advanced/src/advanced_omi_backend/chat_service.py +++ b/backends/advanced/src/advanced_omi_backend/chat_service.py @@ -1,5 +1,5 @@ """ -Chat service implementation for Friend-Lite with memory integration. +Chat service implementation for Chronicle with memory integration. This module provides: - Chat session management with MongoDB persistence diff --git a/backends/advanced/src/advanced_omi_backend/clients/__init__.py b/backends/advanced/src/advanced_omi_backend/clients/__init__.py index 099f3c45..70c41823 100644 --- a/backends/advanced/src/advanced_omi_backend/clients/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/clients/__init__.py @@ -1,4 +1,4 @@ -"""Client implementations for Friend-Lite backend. +"""Client implementations for Chronicle backend. This module provides reusable client implementations that can be used for: - Integration testing diff --git a/backends/advanced/src/advanced_omi_backend/config.py b/backends/advanced/src/advanced_omi_backend/config.py index f2168e6d..ce018d6e 100644 --- a/backends/advanced/src/advanced_omi_backend/config.py +++ b/backends/advanced/src/advanced_omi_backend/config.py @@ -1,5 +1,5 @@ """ -Configuration management for Friend-Lite backend. +Configuration management for Chronicle backend. Currently contains diarization settings because they were used in multiple places causing circular imports. Other configurations can be moved here as needed. diff --git a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py index 9341cc59..27b2810f 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py @@ -24,13 +24,13 @@ async def get_current_metrics(): """Get current system metrics.""" try: # Get memory provider configuration - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() # Get basic system metrics metrics = { "timestamp": int(time.time()), "memory_provider": memory_provider, - "memory_provider_supports_threshold": memory_provider == "friend_lite", + "memory_provider_supports_threshold": memory_provider == "chronicle", } return metrics @@ -470,10 +470,10 @@ async def delete_all_user_memories(user: User): async def get_memory_provider(): """Get current memory provider configuration.""" try: - current_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + current_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() # Get available providers - available_providers = ["friend_lite", "openmemory_mcp", "mycelia"] + available_providers = ["chronicle", "openmemory_mcp", "mycelia"] return { "current_provider": current_provider, @@ -493,7 +493,7 @@ async def set_memory_provider(provider: str): try: # Validate provider provider = provider.lower().strip() - valid_providers = ["friend_lite", "openmemory_mcp", "mycelia"] + valid_providers = ["chronicle", "openmemory_mcp", "mycelia"] if provider not in valid_providers: return JSONResponse( diff --git a/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py index a4338f2b..e138a6e5 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py @@ -1,6 +1,6 @@ """ -WebSocket controller for Friend-Lite backend. +WebSocket controller for Chronicle backend. This module handles WebSocket connections for audio streaming. """ diff --git a/backends/advanced/src/advanced_omi_backend/database.py b/backends/advanced/src/advanced_omi_backend/database.py index cca103ea..0d5e6507 100644 --- a/backends/advanced/src/advanced_omi_backend/database.py +++ b/backends/advanced/src/advanced_omi_backend/database.py @@ -1,5 +1,5 @@ """ -Database configuration and utilities for the Friend-Lite backend. +Database configuration and utilities for the Chronicle backend. This module provides centralized database access to avoid duplication across main.py and router modules. @@ -22,7 +22,7 @@ serverSelectionTimeoutMS=5000, # Fail fast if server unavailable socketTimeoutMS=20000, # 20 second timeout for operations ) -db = mongo_client.get_default_database("friend-lite") +db = mongo_client.get_default_database("chronicle") # Collection references (for non-Beanie collections) users_col = db["users"] diff --git a/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py b/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py index be2f2705..eafeffec 100644 --- a/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py +++ b/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py @@ -1,5 +1,5 @@ """ -Middleware configuration for Friend-Lite backend. +Middleware configuration for Chronicle backend. Centralizes CORS configuration and global exception handlers. """ diff --git a/backends/advanced/src/advanced_omi_backend/models/__init__.py b/backends/advanced/src/advanced_omi_backend/models/__init__.py index 52c63c20..a19fa0db 100644 --- a/backends/advanced/src/advanced_omi_backend/models/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/models/__init__.py @@ -1,8 +1,8 @@ """ -Models package for Friend-Lite backend. +Models package for Chronicle backend. This package contains Pydantic models that define the structure and validation -for all data entities in the Friend-Lite system. +for all data entities in the Chronicle system. """ # Models can be imported directly from their files diff --git a/backends/advanced/src/advanced_omi_backend/models/audio_file.py b/backends/advanced/src/advanced_omi_backend/models/audio_file.py index de1c6f3f..00060037 100644 --- a/backends/advanced/src/advanced_omi_backend/models/audio_file.py +++ b/backends/advanced/src/advanced_omi_backend/models/audio_file.py @@ -1,5 +1,5 @@ """ -AudioFile models for Friend-Lite backend. +AudioFile models for Chronicle backend. This module contains the Beanie Document model for audio_chunks collection, which stores ALL audio files (both with and without speech). This is the diff --git a/backends/advanced/src/advanced_omi_backend/models/conversation.py b/backends/advanced/src/advanced_omi_backend/models/conversation.py index 55c31244..87dc731a 100644 --- a/backends/advanced/src/advanced_omi_backend/models/conversation.py +++ b/backends/advanced/src/advanced_omi_backend/models/conversation.py @@ -1,5 +1,5 @@ """ -Conversation models for Friend-Lite backend. +Conversation models for Chronicle backend. This module contains Beanie Document and Pydantic models for conversations, transcript versions, and memory versions. @@ -28,7 +28,7 @@ class TranscriptProvider(str, Enum): class MemoryProvider(str, Enum): """Supported memory providers.""" - FRIEND_LITE = "friend_lite" + CHRONICLE = "chronicle" OPENMEMORY_MCP = "openmemory_mcp" MYCELIA = "mycelia" diff --git a/backends/advanced/src/advanced_omi_backend/models/job.py b/backends/advanced/src/advanced_omi_backend/models/job.py index 9d355ce5..8a19fd8e 100644 --- a/backends/advanced/src/advanced_omi_backend/models/job.py +++ b/backends/advanced/src/advanced_omi_backend/models/job.py @@ -45,9 +45,9 @@ async def _ensure_beanie_initialized(): # Create MongoDB client client = AsyncIOMotorClient(mongodb_uri) try: - database = client.get_default_database("friend-lite") + database = client.get_default_database("chronicle") except ConfigurationError: - database = client["friend-lite"] + database = client["chronicle"] raise _beanie_initialized = True # Initialize Beanie diff --git a/backends/advanced/src/advanced_omi_backend/routers/api_router.py b/backends/advanced/src/advanced_omi_backend/routers/api_router.py index a510d396..528713c0 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/api_router.py +++ b/backends/advanced/src/advanced_omi_backend/routers/api_router.py @@ -1,5 +1,5 @@ """ -Main API router for Friend-Lite backend. +Main API router for Chronicle backend. This module aggregates all the functional router modules and provides a single entry point for the API endpoints. diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py b/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py index 371fd38d..a5669b06 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py @@ -1,5 +1,5 @@ """ -Router modules for Friend-Lite API. +Router modules for Chronicle API. This package contains organized router modules for different functional areas: - user_routes: User management and authentication diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py index a1fea4fc..d0c64904 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py @@ -1,5 +1,5 @@ """ -Chat API routes for Friend-Lite with streaming support and memory integration. +Chat API routes for Chronicle with streaming support and memory integration. This module provides: - RESTful chat session management endpoints diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py index 191ca39f..821ad52a 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py @@ -1,5 +1,5 @@ """ -Client management routes for Friend-Lite API. +Client management routes for Chronicle API. Handles active client monitoring and management. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py index e2b76f7d..8da0f5b0 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py @@ -1,5 +1,5 @@ """ -Conversation management routes for Friend-Lite API. +Conversation management routes for Chronicle API. Handles conversation CRUD operations, audio processing, and transcript management. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py index 06e0da1e..24865f90 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py @@ -1,5 +1,5 @@ """ -Health check routes for Friend-Lite backend. +Health check routes for Chronicle backend. This module provides health check endpoints for monitoring the application's status. """ @@ -118,7 +118,7 @@ async def health_check(): critical_services_healthy = True # Get configuration once at the start - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") speaker_service_url = os.getenv("SPEAKER_SERVICE_URL") openmemory_mcp_url = os.getenv("OPENMEMORY_MCP_URL") @@ -230,38 +230,38 @@ async def health_check(): overall_healthy = False # Check memory service (provider-dependent) - if memory_provider == "friend_lite": + if memory_provider == "chronicle": try: - # Test Friend-Lite memory service connection with timeout + # Test Chronicle memory service connection with timeout test_success = await asyncio.wait_for(memory_service.test_connection(), timeout=8.0) if test_success: health_status["services"]["memory_service"] = { - "status": "โœ… Friend-Lite Memory Connected", + "status": "โœ… Chronicle Memory Connected", "healthy": True, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } else: health_status["services"]["memory_service"] = { - "status": "โš ๏ธ Friend-Lite Memory Test Failed", + "status": "โš ๏ธ Chronicle Memory Test Failed", "healthy": False, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } overall_healthy = False except asyncio.TimeoutError: health_status["services"]["memory_service"] = { - "status": "โš ๏ธ Friend-Lite Memory Timeout (8s) - Check Qdrant", + "status": "โš ๏ธ Chronicle Memory Timeout (8s) - Check Qdrant", "healthy": False, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } overall_healthy = False except Exception as e: health_status["services"]["memory_service"] = { - "status": f"โš ๏ธ Friend-Lite Memory Failed: {str(e)}", + "status": f"โš ๏ธ Chronicle Memory Failed: {str(e)}", "healthy": False, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } overall_healthy = False diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py index c9bc75e3..1d28a674 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py @@ -1,5 +1,5 @@ """ -Memory management routes for Friend-Lite API. +Memory management routes for Chronicle API. Handles memory CRUD operations, search, and debug functionality. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py index 10587b5c..e51c036c 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py @@ -1,5 +1,5 @@ """ -System and utility routes for Friend-Lite API. +System and utility routes for Chronicle API. Handles metrics, auth config, and other system utilities. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py index 808b8185..12ed5c63 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py @@ -1,5 +1,5 @@ """ -User management routes for Friend-Lite API. +User management routes for Chronicle API. Handles user CRUD operations and admin user management. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py index 454cabb9..d9754a87 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py @@ -1,5 +1,5 @@ """ -WebSocket routes for Friend-Lite backend. +WebSocket routes for Chronicle backend. This module handles WebSocket connections for audio streaming. """ diff --git a/backends/advanced/src/advanced_omi_backend/services/__init__.py b/backends/advanced/src/advanced_omi_backend/services/__init__.py index 81d3c535..d656f34c 100644 --- a/backends/advanced/src/advanced_omi_backend/services/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/__init__.py @@ -1,5 +1,5 @@ """ -Services module for Friend-Lite backend. +Services module for Chronicle backend. This module contains business logic services and their provider implementations. """ diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py index c2413ff2..1b777028 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py @@ -1,7 +1,7 @@ """Memory service package. This package provides memory management functionality with support for -multiple memory providers (Friend-Lite, Mycelia, OpenMemory MCP). +multiple memory providers (Chronicle, Mycelia, OpenMemory MCP). The memory service handles extraction, storage, and retrieval of memories from user conversations and interactions. @@ -10,7 +10,7 @@ - base.py: Abstract base classes and interfaces - config.py: Configuration management - service_factory.py: Provider selection and instantiation -- providers/friend_lite.py: Friend-Lite native provider (LLM + Qdrant) +- providers/chronicle.py: Chronicle native provider (LLM + Qdrant) - providers/mycelia.py: Mycelia backend provider - providers/openmemory_mcp.py: OpenMemory MCP provider - providers/llm_providers.py: LLM implementations (OpenAI, Ollama) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py index 3946deae..7560d88f 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -34,7 +34,7 @@ class VectorStoreProvider(Enum): class MemoryProvider(Enum): """Supported memory service providers.""" - FRIEND_LITE = "friend_lite" # Default sophisticated implementation + CHRONICLE = "chronicle" # Default sophisticated implementation OPENMEMORY_MCP = "openmemory_mcp" # OpenMemory MCP backend MYCELIA = "mycelia" # Mycelia memory backend @@ -42,7 +42,7 @@ class MemoryProvider(Enum): @dataclass class MemoryConfig: """Configuration for memory service.""" - memory_provider: MemoryProvider = MemoryProvider.FRIEND_LITE + memory_provider: MemoryProvider = MemoryProvider.CHRONICLE llm_provider: LLMProvider = LLMProvider.OPENAI vector_store_provider: VectorStoreProvider = VectorStoreProvider.QDRANT llm_config: Dict[str, Any] = None @@ -111,7 +111,7 @@ def create_qdrant_config( def create_openmemory_config( server_url: str = "http://localhost:8765", - client_name: str = "friend_lite", + client_name: str = "chronicle", user_id: str = "default", timeout: int = 30 ) -> Dict[str, Any]: @@ -145,7 +145,7 @@ def build_memory_config_from_env() -> MemoryConfig: """Build memory configuration from environment variables and YAML config.""" try: # Determine memory provider - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() if memory_provider not in [p.value for p in MemoryProvider]: raise ValueError(f"Unsupported memory provider: {memory_provider}") @@ -155,7 +155,7 @@ def build_memory_config_from_env() -> MemoryConfig: if memory_provider_enum == MemoryProvider.OPENMEMORY_MCP: openmemory_config = create_openmemory_config( server_url=os.getenv("OPENMEMORY_MCP_URL", "http://localhost:8765"), - client_name=os.getenv("OPENMEMORY_CLIENT_NAME", "friend_lite"), + client_name=os.getenv("OPENMEMORY_CLIENT_NAME", "chronicle"), user_id=os.getenv("OPENMEMORY_USER_ID", "default"), timeout=int(os.getenv("OPENMEMORY_TIMEOUT", "30")) ) @@ -199,7 +199,7 @@ def build_memory_config_from_env() -> MemoryConfig: timeout_seconds=int(os.getenv("MYCELIA_TIMEOUT", "30")) ) - # For Friend-Lite provider, use existing complex configuration + # For Chronicle provider, use existing complex configuration # Import config loader from advanced_omi_backend.memory_config_loader import get_config_loader @@ -282,7 +282,7 @@ def build_memory_config_from_env() -> MemoryConfig: extraction_enabled = config_loader.is_memory_extraction_enabled() extraction_prompt = config_loader.get_memory_prompt() if extraction_enabled else None - memory_logger.info(f"๐Ÿ”ง Memory config: Provider=Friend-Lite, LLM={llm_provider}, VectorStore={vector_store_provider}, Extraction={extraction_enabled}") + memory_logger.info(f"๐Ÿ”ง Memory config: Provider=Chronicle, LLM={llm_provider}, VectorStore={vector_store_provider}, Extraction={extraction_enabled}") return MemoryConfig( memory_provider=memory_provider_enum, diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py index 43d438cf..3a71f7cf 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py @@ -1,7 +1,7 @@ """Memory service provider implementations. This package contains all memory service provider implementations: -- friend_lite: Friend-Lite native implementation with LLM + vector store +- chronicle: Chronicle native implementation with LLM + vector store - openmemory_mcp: OpenMemory MCP backend integration - mycelia: Mycelia backend integration - llm_providers: LLM provider implementations (OpenAI, Ollama) @@ -9,7 +9,7 @@ - mcp_client: MCP client utilities """ -from .friend_lite import MemoryService as FriendLiteMemoryService +from .chronicle import MemoryService as ChronicleMemoryService from .openmemory_mcp import OpenMemoryMCPService from .mycelia import MyceliaMemoryService from .llm_providers import OpenAIProvider @@ -17,7 +17,7 @@ from .mcp_client import MCPClient, MCPError __all__ = [ - "FriendLiteMemoryService", + "ChronicleMemoryService", "OpenMemoryMCPService", "MyceliaMemoryService", "OpenAIProvider", diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py index 15226971..3e08fae7 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py @@ -28,7 +28,7 @@ class MCPClient: client: HTTP client instance """ - def __init__(self, server_url: str, client_name: str = "friend_lite", user_id: str = "default", user_email: str = "", timeout: int = 30): + def __init__(self, server_url: str, client_name: str = "chronicle", user_id: str = "default", user_email: str = "", timeout: int = 30): """Initialize client for OpenMemory. Args: @@ -118,7 +118,7 @@ async def add_memories(self, text: str) -> List[str]: "text": text, "app": self.client_name, # Use app name (OpenMemory accepts name or UUID) "metadata": { - "source": "friend_lite", + "source": "chronicle", "client": self.client_name, "user_email": self.user_email }, @@ -212,7 +212,7 @@ async def search_memory(self, query: str, limit: int = 10) -> List[Dict[str, Any else: memories = [] - # Format memories for Friend-Lite + # Format memories for Chronicle formatted_memories = [] for memory in memories: formatted_memories.append({ @@ -366,7 +366,7 @@ async def get_memory(self, memory_id: str) -> Optional[Dict[str, Any]]: response.raise_for_status() result = response.json() - # Format memory for Friend-Lite + # Format memory for Chronicle if isinstance(result, dict): return { "id": result.get("id", memory_id), diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py index 40184776..87fbe690 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py @@ -43,7 +43,7 @@ class MyceliaMemoryService(MemoryServiceBase): """Memory service implementation using Mycelia backend. This class implements the MemoryServiceBase interface by delegating memory - operations to a Mycelia server using JWT authentication from Friend-Lite. + operations to a Mycelia server using JWT authentication from Chronicle. Args: api_url: Mycelia API endpoint URL @@ -194,7 +194,7 @@ async def _call_resource( Args: action: Action to perform (create, list, get, delete, etc.) - jwt_token: User's JWT token from Friend-Lite + jwt_token: User's JWT token from Chronicle **params: Additional parameters for the action Returns: @@ -242,7 +242,7 @@ async def _extract_memories_via_llm( return [] try: - # Get OpenAI client using Friend-Lite's utility + # Get OpenAI client using Chronicle's utility client = _get_openai_client( api_key=self.llm_config.get("api_key"), base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), @@ -300,7 +300,7 @@ async def _extract_temporal_entity_via_llm( return None try: - # Get OpenAI client using Friend-Lite's utility + # Get OpenAI client using Chronicle's utility client = _get_openai_client( api_key=self.llm_config.get("api_key"), base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py index d18be16a..d8811c67 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py @@ -2,7 +2,7 @@ This module provides a concrete implementation of the MemoryServiceBase interface that uses OpenMemory MCP as the backend for all memory operations. It maintains -compatibility with the existing Friend-Lite memory service API while leveraging +compatibility with the existing Chronicle memory service API while leveraging OpenMemory's standardized memory management capabilities. """ @@ -20,10 +20,10 @@ class OpenMemoryMCPService(MemoryServiceBase): """Memory service implementation using OpenMemory MCP as backend. - + This class implements the MemoryServiceBase interface by delegating memory operations to an OpenMemory MCP server. It handles the translation between - Friend-Lite's memory service API and the standardized MCP operations. + Chronicle's memory service API and the standardized MCP operations. Key features: - Maintains compatibility with existing MemoryServiceBase interface @@ -63,7 +63,7 @@ def __init__( """ super().__init__() self.server_url = server_url or os.getenv("OPENMEMORY_MCP_URL", "http://localhost:8765") - self.client_name = client_name or os.getenv("OPENMEMORY_CLIENT_NAME", "friend_lite") + self.client_name = client_name or os.getenv("OPENMEMORY_CLIENT_NAME", "chronicle") self.user_id = user_id or os.getenv("OPENMEMORY_USER_ID", "default") self.timeout = int(timeout or os.getenv("OPENMEMORY_TIMEOUT", "30")) self.mcp_client: Optional[MCPClient] = None @@ -145,7 +145,7 @@ async def add_memory( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id original_user_email = self.mcp_client.user_email - self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID self.mcp_client.user_email = user_email # Use the actual user's email try: @@ -206,7 +206,7 @@ async def search_memories( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID try: results = await self.mcp_client.search_memory( @@ -256,7 +256,7 @@ async def get_all_memories( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID try: results = await self.mcp_client.list_memories(limit=limit) @@ -296,7 +296,7 @@ async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Opt # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id or self.user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id or self.user_id # Use the actual Chronicle user's ID try: result = await self.mcp_client.get_memory(memory_id) @@ -343,7 +343,7 @@ async def update_memory( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id or self.user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id or self.user_id # Use the actual Chronicle user's ID try: success = await self.mcp_client.update_memory( diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py index 37922186..dc57dbe9 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py @@ -1,7 +1,7 @@ """Memory service factory for creating appropriate memory service instances. This module provides a factory pattern for instantiating memory services -based on configuration. It supports both the sophisticated Friend-Lite +based on configuration. It supports both the sophisticated Chronicle implementation and the OpenMemory MCP backend. """ @@ -36,10 +36,10 @@ def create_memory_service(config: MemoryConfig) -> MemoryServiceBase: """ memory_logger.info(f"๐Ÿง  Creating memory service with provider: {config.memory_provider.value}") - if config.memory_provider == MemoryProvider.FRIEND_LITE: - # Use the sophisticated Friend-Lite implementation - from .providers.friend_lite import MemoryService as FriendLiteMemoryService - return FriendLiteMemoryService(config) + if config.memory_provider == MemoryProvider.CHRONICLE: + # Use the sophisticated Chronicle implementation + from .providers.chronicle import MemoryService as ChronicleMemoryService + return ChronicleMemoryService(config) elif config.memory_provider == MemoryProvider.OPENMEMORY_MCP: # Use OpenMemory MCP implementation @@ -156,7 +156,7 @@ def get_service_info() -> dict: # Try to determine provider from service type if "OpenMemoryMCP" in info["service_type"]: info["memory_provider"] = "openmemory_mcp" - elif "FriendLite" in info["service_type"] or "MemoryService" in info["service_type"]: - info["memory_provider"] = "friend_lite" + elif "Chronicle" in info["service_type"] or "MemoryService" in info["service_type"]: + info["memory_provider"] = "chronicle" return info \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py index dd94bf63..93f4e342 100644 --- a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -1,9 +1,9 @@ """ Mycelia OAuth Synchronization Service. -This module synchronizes Friend-Lite users with Mycelia OAuth API keys, +This module synchronizes Chronicle users with Mycelia OAuth API keys, ensuring that when users access Mycelia directly, they use credentials -that map to their Friend-Lite user ID. +that map to their Chronicle user ID. """ import logging @@ -19,7 +19,7 @@ class MyceliaSyncService: - """Synchronize Friend-Lite users with Mycelia OAuth API keys.""" + """Synchronize Chronicle users with Mycelia OAuth API keys.""" def __init__(self): """Initialize the sync service.""" @@ -31,15 +31,15 @@ def __init__(self): # Test environment uses mycelia_test, production uses mycelia self.mycelia_db = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia")) - # Friend-Lite database - extract from MONGODB_URI or use default - # Test env: test_db, Production: friend-lite + # Chronicle database - extract from MONGODB_URI or use default + # Test env: test_db, Production: chronicle if "/" in self.mongo_url and self.mongo_url.count("/") >= 3: # Extract database name from mongodb://host:port/database - self.friendlite_db = self.mongo_url.split("/")[-1].split("?")[0] or "friend-lite" + self.chronicle_db = self.mongo_url.split("/")[-1].split("?")[0] or "chronicle" else: - self.friendlite_db = "friend-lite" + self.chronicle_db = "chronicle" - logger.info(f"MyceliaSyncService initialized: {self.mongo_url}, Mycelia DB: {self.mycelia_db}, Friend-Lite DB: {self.friendlite_db}") + logger.info(f"MyceliaSyncService initialized: {self.mongo_url}, Mycelia DB: {self.mycelia_db}, Chronicle DB: {self.chronicle_db}") def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: """Hash API key with salt (matches Mycelia's implementation).""" @@ -54,10 +54,10 @@ def _create_mycelia_api_key( user_email: str ) -> Tuple[str, str]: """ - Create a Mycelia API key for a Friend-Lite user. + Create a Mycelia API key for a Chronicle user. Args: - user_id: Friend-Lite user ID (MongoDB ObjectId as string) + user_id: Chronicle user ID (MongoDB ObjectId as string) user_email: User email address Returns: @@ -85,7 +85,7 @@ def _create_mycelia_api_key( existing = api_keys_collection.find_one({ "owner": user_id, "isActive": True, - "name": f"Friend-Lite Auto ({user_email})" + "name": f"Chronicle Auto ({user_email})" }) if existing: @@ -98,8 +98,8 @@ def _create_mycelia_api_key( api_key_doc = { "hashedKey": hashed_key, "salt": base64.b64encode(salt).decode('utf-8'), - "owner": user_id, # CRITICAL: owner = Friend-Lite user ID - "name": f"Friend-Lite Auto ({user_email})", + "owner": user_id, # CRITICAL: owner = Chronicle user ID + "name": f"Chronicle Auto ({user_email})", "policies": [ { "resource": "**", @@ -126,10 +126,10 @@ def sync_user_to_mycelia( user_email: str ) -> Optional[Tuple[str, str]]: """ - Sync a Friend-Lite user to Mycelia OAuth. + Sync a Chronicle user to Mycelia OAuth. Args: - user_id: Friend-Lite user ID + user_id: Chronicle user ID user_email: User email Returns: @@ -139,10 +139,10 @@ def sync_user_to_mycelia( # Create Mycelia API key client_id, api_key = self._create_mycelia_api_key(user_id, user_email) - # Store credentials in Friend-Lite user document (if new key was created) + # Store credentials in Chronicle user document (if new key was created) if api_key: client = MongoClient(self.mongo_url) - db = client[self.friendlite_db] + db = client[self.chronicle_db] users_collection = db["users"] from bson import ObjectId @@ -182,9 +182,9 @@ def sync_admin_user(self) -> Optional[Tuple[str, str]]: logger.warning("ADMIN_EMAIL not set, skipping Mycelia sync") return None - # Get admin user from Friend-Lite database + # Get admin user from Chronicle database client = MongoClient(self.mongo_url) - db = client[self.friendlite_db] + db = client[self.chronicle_db] users_collection = db["users"] admin_user = users_collection.find_one({"email": admin_email}) @@ -234,7 +234,7 @@ async def sync_admin_on_startup(): logger.info("๐Ÿ”„ Starting Mycelia OAuth synchronization...") # Check if Mycelia sync is enabled - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") if memory_provider != "mycelia": logger.info("Mycelia sync skipped (MEMORY_PROVIDER != mycelia)") return diff --git a/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py index 1d4bd985..0059c816 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py @@ -405,10 +405,15 @@ async def open_conversation_job( ) # Determine end reason based on how we exited the loop - # Check session completion_reason from Redis + # Check session completion_reason from Redis (set by WebSocket controller on disconnect) completion_reason = await redis_client.hget(session_key, "completion_reason") completion_reason_str = completion_reason.decode() if completion_reason else None + # Determine end_reason with proper precedence: + # 1. websocket_disconnect (explicit disconnect from client) + # 2. inactivity_timeout (no speech for SPEECH_INACTIVITY_THRESHOLD_SECONDS) + # 3. max_duration (conversation exceeded max runtime) + # 4. user_stopped (user manually stopped recording) if completion_reason_str == "websocket_disconnect": end_reason = "websocket_disconnect" elif timeout_triggered: diff --git a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py index fdb16b7d..22ffaaf2 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py @@ -31,7 +31,7 @@ async def process_memory_job( V2 Architecture: 1. Extracts memories from conversation transcript 2. Checks primary speakers filter if configured - 3. Uses configured memory provider (friend_lite or openmemory_mcp) + 3. Uses configured memory provider (chronicle or openmemory_mcp) 4. Stores memory references in conversation document Note: Listening jobs are restarted by open_conversation_job (not here). @@ -140,7 +140,7 @@ async def process_memory_job( transcript_version_id = conversation_model.active_transcript_version or "unknown" # Determine memory provider from memory service - memory_provider = conversation_model.MemoryProvider.FRIEND_LITE # Default + memory_provider = conversation_model.MemoryProvider.CHRONICLE # Default try: from advanced_omi_backend.services.memory import get_memory_service memory_service_obj = get_memory_service() diff --git a/backends/advanced/ssl/generate-ssl.sh b/backends/advanced/ssl/generate-ssl.sh index efc5d8c2..b0fd4b3d 100755 --- a/backends/advanced/ssl/generate-ssl.sh +++ b/backends/advanced/ssl/generate-ssl.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Generate self-signed SSL certificate for Friend-Lite Advanced Backend +# Generate self-signed SSL certificate for Chronicle Advanced Backend # Supports localhost, IP addresses, and domain names SERVER_ADDRESS="$1" diff --git a/backends/advanced/start-k8s.sh b/backends/advanced/start-k8s.sh index 487b82c8..a2f3d817 100755 --- a/backends/advanced/start-k8s.sh +++ b/backends/advanced/start-k8s.sh @@ -1,11 +1,11 @@ #!/bin/bash -# Friend-Lite Backend Kubernetes Startup Script +# Chronicle Backend Kubernetes Startup Script # Starts both the FastAPI backend and RQ workers for K8s deployment set -e -echo "๐Ÿš€ Starting Friend-Lite Backend (Kubernetes)..." +echo "๐Ÿš€ Starting Chronicle Backend (Kubernetes)..." # Debug environment variables echo "๐Ÿ” Environment check:" diff --git a/backends/advanced/start-workers.sh b/backends/advanced/start-workers.sh index 2e39848d..f62b5a42 100755 --- a/backends/advanced/start-workers.sh +++ b/backends/advanced/start-workers.sh @@ -4,7 +4,7 @@ set -e -echo "๐Ÿš€ Starting Friend-Lite Workers..." +echo "๐Ÿš€ Starting Chronicle Workers..." # Clean up any stale worker registrations from previous runs echo "๐Ÿงน Cleaning up stale worker registrations from Redis..." diff --git a/backends/advanced/start.sh b/backends/advanced/start.sh index 51946672..40fa4abf 100755 --- a/backends/advanced/start.sh +++ b/backends/advanced/start.sh @@ -1,11 +1,11 @@ #!/bin/bash -# Friend-Lite Backend Startup Script +# Chronicle Backend Startup Script # Starts both the FastAPI backend and RQ workers set -e -echo "๐Ÿš€ Starting Friend-Lite Backend..." +echo "๐Ÿš€ Starting Chronicle Backend..." # Function to handle shutdown shutdown() { diff --git a/backends/advanced/tests/test_conversation_models.py b/backends/advanced/tests/test_conversation_models.py index 197fddee..e4387c89 100644 --- a/backends/advanced/tests/test_conversation_models.py +++ b/backends/advanced/tests/test_conversation_models.py @@ -92,7 +92,7 @@ def test_memory_version_model(self): version_id="mem-v1", memory_count=5, transcript_version_id="trans-v1", - provider=MemoryProvider.FRIEND_LITE, + provider=MemoryProvider.CHRONICLE, model="gpt-4o-mini", created_at=datetime.now(), processing_time_seconds=45.2, @@ -102,7 +102,7 @@ def test_memory_version_model(self): assert version.version_id == "mem-v1" assert version.memory_count == 5 assert version.transcript_version_id == "trans-v1" - assert version.provider == MemoryProvider.FRIEND_LITE + assert version.provider == MemoryProvider.CHRONICLE assert version.model == "gpt-4o-mini" assert version.processing_time_seconds == 45.2 assert version.metadata["extraction_quality"] == "high" @@ -151,7 +151,7 @@ def test_add_memory_version(self): version_id="m1", memory_count=3, transcript_version_id="v1", - provider=MemoryProvider.FRIEND_LITE, + provider=MemoryProvider.CHRONICLE, model="gpt-4o-mini", processing_time_seconds=30.0 ) @@ -198,7 +198,7 @@ def test_active_version_properties(self): # Add versions segments = [SpeakerSegment(start=0.0, end=5.0, text="Test", speaker="Speaker A")] conversation.add_transcript_version("v1", "Test", segments, TranscriptProvider.DEEPGRAM) - conversation.add_memory_version("m1", 2, "v1", MemoryProvider.FRIEND_LITE) + conversation.add_memory_version("m1", 2, "v1", MemoryProvider.CHRONICLE) # Should return active versions active_transcript = conversation.active_transcript @@ -217,7 +217,7 @@ def test_provider_enums(self): assert TranscriptProvider.PARAKEET == "parakeet" # Test MemoryProvider enum - assert MemoryProvider.FRIEND_LITE == "friend_lite" + assert MemoryProvider.CHRONICLE == "chronicle" assert MemoryProvider.OPENMEMORY_MCP == "openmemory_mcp" def test_conversation_model_dump(self): @@ -227,7 +227,7 @@ def test_conversation_model_dump(self): # Add some versions segments = [SpeakerSegment(start=0.0, end=5.0, text="Test", speaker="Speaker A")] conversation.add_transcript_version("v1", "Test", segments, TranscriptProvider.DEEPGRAM) - conversation.add_memory_version("m1", 2, "v1", MemoryProvider.FRIEND_LITE) + conversation.add_memory_version("m1", 2, "v1", MemoryProvider.CHRONICLE) # Test model_dump() works conv_dict = conversation.model_dump() diff --git a/backends/advanced/tests/test_integration.py b/backends/advanced/tests/test_integration.py index a4422d4c..a8086d1b 100644 --- a/backends/advanced/tests/test_integration.py +++ b/backends/advanced/tests/test_integration.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -End-to-end integration test for Friend-Lite backend with unified transcription support. +End-to-end integration test for Chronicle backend with unified transcription support. This test validates the complete audio processing pipeline using isolated test environment: 1. Service startup with docker-compose-test.yml (isolated ports and databases) @@ -954,7 +954,7 @@ def validate_memory_extraction(self, upload_response: dict): client_memories = self.wait_for_memory_processing(memory_job_id, client_id) # Check if we're using OpenMemory MCP provider - memory_provider = os.environ.get("MEMORY_PROVIDER", "friend_lite") + memory_provider = os.environ.get("MEMORY_PROVIDER", "chronicle") if not client_memories: if memory_provider == "openmemory_mcp": diff --git a/backends/advanced/upload_files.py b/backends/advanced/upload_files.py index 44ca0e26..ead58e74 100755 --- a/backends/advanced/upload_files.py +++ b/backends/advanced/upload_files.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Upload audio files to the Friend-Lite backend for processing. +Upload audio files to the Chronicle backend for processing. """ import argparse @@ -376,7 +376,7 @@ def poll_job_status(job_id: str, token: str, base_url: str, total_files: int) -> def parse_args(): """Parse command line arguments.""" - parser = argparse.ArgumentParser(description="Upload audio files to Friend-Lite backend") + parser = argparse.ArgumentParser(description="Upload audio files to Chronicle backend") parser.add_argument( "files", nargs="*", @@ -394,7 +394,7 @@ def main(): """Main function to orchestrate the upload process.""" args = parse_args() - logger.info("Friend-Lite Audio File Upload Tool") + logger.info("Chronicle Audio File Upload Tool") logger.info("=" * 40) # Load environment variables diff --git a/backends/advanced/webui/README.md b/backends/advanced/webui/README.md index f093f66b..303b2780 100644 --- a/backends/advanced/webui/README.md +++ b/backends/advanced/webui/README.md @@ -1,6 +1,6 @@ -# Friend-Lite Web Dashboard +# Chronicle Web Dashboard -A modern React-based web interface for the Friend-Lite AI-powered personal audio system. +A modern React-based web interface for the Chronicle AI-powered personal audio system. ## Features diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index bde3b515..4582a222 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -1,23 +1,28 @@ { - "name": "friend-lite-webui", + "name": "chronicle-webui", "version": "0.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "friend-lite-webui", + "name": "chronicle-webui", "version": "0.1.0", "dependencies": { "axios": "^1.6.2", "clsx": "^2.0.0", + "d3": "^7.8.5", + "frappe-gantt": "^0.6.1", "lucide-react": "^0.294.0", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router-dom": "^6.20.0" + "react-router-dom": "^6.20.0", + "react-vertical-timeline-component": "^3.6.0" }, "devDependencies": { + "@types/d3": "^7.4.3", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", + "@types/react-vertical-timeline-component": "^3.3.6", "@typescript-eslint/eslint-plugin": "^6.14.0", "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react": "^4.2.1", @@ -26,6 +31,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", + "sass-embedded": "^1.80.7", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" @@ -360,6 +366,13 @@ "node": ">=6.9.0" } }, + "node_modules/@bufbuild/protobuf": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@bufbuild/protobuf/-/protobuf-2.10.1.tgz", + "integrity": "sha512-ckS3+vyJb5qGpEYv/s1OebUHDi/xSNtfgw1wqKZo7MR9F2z+qXr0q5XagafAG/9O0QPVIUfST0smluYSTpYFkg==", + "dev": true, + "license": "(Apache-2.0 AND BSD-3-Clause)" + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.21.5", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", @@ -1024,6 +1037,316 @@ "node": ">= 8" } }, + "node_modules/@parcel/watcher": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz", + "integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "dependencies": { + "detect-libc": "^1.0.3", + "is-glob": "^4.0.3", + "micromatch": "^4.0.5", + "node-addon-api": "^7.0.0" + }, + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "@parcel/watcher-android-arm64": "2.5.1", + "@parcel/watcher-darwin-arm64": "2.5.1", + "@parcel/watcher-darwin-x64": "2.5.1", + "@parcel/watcher-freebsd-x64": "2.5.1", + "@parcel/watcher-linux-arm-glibc": "2.5.1", + "@parcel/watcher-linux-arm-musl": "2.5.1", + "@parcel/watcher-linux-arm64-glibc": "2.5.1", + "@parcel/watcher-linux-arm64-musl": "2.5.1", + "@parcel/watcher-linux-x64-glibc": "2.5.1", + "@parcel/watcher-linux-x64-musl": "2.5.1", + "@parcel/watcher-win32-arm64": "2.5.1", + "@parcel/watcher-win32-ia32": "2.5.1", + "@parcel/watcher-win32-x64": "2.5.1" + } + }, + "node_modules/@parcel/watcher-android-arm64": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz", + "integrity": "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-darwin-arm64": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz", + "integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-darwin-x64": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz", + "integrity": "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-freebsd-x64": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz", + "integrity": "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm-glibc": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz", + "integrity": "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm-musl": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz", + "integrity": "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm64-glibc": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz", + "integrity": "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm64-musl": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz", + "integrity": "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-x64-glibc": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz", + "integrity": "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-x64-musl": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz", + "integrity": "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-arm64": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz", + "integrity": "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-ia32": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz", + "integrity": "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-x64": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz", + "integrity": "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", @@ -1376,59 +1699,360 @@ "@babel/types": "^7.28.2" } }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "node_modules/@types/d3": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", + "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-array": "*", + "@types/d3-axis": "*", + "@types/d3-brush": "*", + "@types/d3-chord": "*", + "@types/d3-color": "*", + "@types/d3-contour": "*", + "@types/d3-delaunay": "*", + "@types/d3-dispatch": "*", + "@types/d3-drag": "*", + "@types/d3-dsv": "*", + "@types/d3-ease": "*", + "@types/d3-fetch": "*", + "@types/d3-force": "*", + "@types/d3-format": "*", + "@types/d3-geo": "*", + "@types/d3-hierarchy": "*", + "@types/d3-interpolate": "*", + "@types/d3-path": "*", + "@types/d3-polygon": "*", + "@types/d3-quadtree": "*", + "@types/d3-random": "*", + "@types/d3-scale": "*", + "@types/d3-scale-chromatic": "*", + "@types/d3-selection": "*", + "@types/d3-shape": "*", + "@types/d3-time": "*", + "@types/d3-time-format": "*", + "@types/d3-timer": "*", + "@types/d3-transition": "*", + "@types/d3-zoom": "*" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", "dev": true, "license": "MIT" }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "node_modules/@types/d3-axis": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", + "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-brush": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", + "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-chord": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", + "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", "dev": true, "license": "MIT" }, - "node_modules/@types/prop-types": { - "version": "15.7.15", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", - "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", "dev": true, "license": "MIT" }, - "node_modules/@types/react": { - "version": "18.3.23", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.23.tgz", - "integrity": "sha512-/LDXMQh55EzZQ0uVAZmKKhfENivEvWz6E+EYzh+/MCjMhNsotd+ZHhBGIjFDTi6+fz0OhQQQLbTgdQIxxCsC0w==", + "node_modules/@types/d3-contour": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", + "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", "dev": true, "license": "MIT", "dependencies": { - "@types/prop-types": "*", - "csstype": "^3.0.2" + "@types/d3-array": "*", + "@types/geojson": "*" } }, - "node_modules/@types/react-dom": { - "version": "18.3.7", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", - "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "node_modules/@types/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", "dev": true, - "license": "MIT", - "peerDependencies": { - "@types/react": "^18.0.0" - } + "license": "MIT" }, - "node_modules/@types/semver": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz", - "integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==", + "node_modules/@types/d3-dispatch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz", + "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==", "dev": true, "license": "MIT" }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.21.0.tgz", - "integrity": "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==", + "node_modules/@types/d3-drag": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", + "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-dsv": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", + "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-fetch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", + "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-dsv": "*" + } + }, + "node_modules/@types/d3-force": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", + "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-format": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", + "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-geo": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", + "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, + "node_modules/@types/d3-hierarchy": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", + "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-polygon": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", + "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-quadtree": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", + "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-random": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", + "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-selection": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", + "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-shape": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz", + "integrity": "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-time-format": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", + "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/d3-transition": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", + "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-zoom": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", + "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/d3-interpolate": "*", + "@types/d3-selection": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", + "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.23", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.23.tgz", + "integrity": "sha512-/LDXMQh55EzZQ0uVAZmKKhfENivEvWz6E+EYzh+/MCjMhNsotd+ZHhBGIjFDTi6+fz0OhQQQLbTgdQIxxCsC0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@types/react-vertical-timeline-component": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/@types/react-vertical-timeline-component/-/react-vertical-timeline-component-3.3.6.tgz", + "integrity": "sha512-OUvyPXRjXvUD/SNLO0CW0GbIxVF32Ios5qHecMSfw6kxnK1cPULD9NV80EuqZ3WmS/s6BgbcwmN8k4ISb3akhQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/semver": { + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz", + "integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.21.0.tgz", + "integrity": "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==", "dev": true, "license": "MIT", "dependencies": { @@ -1893,6 +2517,13 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, + "node_modules/buffer-builder": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/buffer-builder/-/buffer-builder-0.2.0.tgz", + "integrity": "sha512-7VPMEPuYznPSoR21NE1zvd2Xna6c/CloiZCfcMXR1Jny6PjX0N4Nsa38zcBFo/FMK+BlA+FLKbJCQ0i2yxp+Xg==", + "dev": true, + "license": "MIT/X11" + }, "node_modules/call-bind-apply-helpers": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", @@ -2002,6 +2633,12 @@ "node": ">= 6" } }, + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", + "license": "MIT" + }, "node_modules/clsx": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", @@ -2031,6 +2668,13 @@ "dev": true, "license": "MIT" }, + "node_modules/colorjs.io": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/colorjs.io/-/colorjs.io-0.5.2.tgz", + "integrity": "sha512-twmVoizEW7ylZSN32OgKdXRmo1qg+wT5/6C3xu5b9QsWzSFAhHLn2xd8ro0diCsKfCj1RdaTP/nrcW+vAoQPIw==", + "dev": true, + "license": "MIT" + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -2102,100 +2746,533 @@ "dev": true, "license": "MIT" }, - "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "dev": true, - "license": "MIT", + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "license": "ISC", "dependencies": { - "ms": "^2.1.3" + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" }, "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "node": ">=12" } }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "license": "MIT" + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "license": "MIT", + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "license": "ISC", "engines": { - "node": ">=0.4.0" + "node": ">=12" } }, - "node_modules/didyoumean": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", - "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", - "dev": true, - "license": "Apache-2.0" + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } }, - "node_modules/dir-glob": { + "node_modules/d3-chord": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "license": "MIT", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "license": "ISC", "dependencies": { - "path-type": "^4.0.0" + "d3-path": "1 - 3" }, "engines": { - "node": ">=8" + "node": ">=12" } }, - "node_modules/dlv": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", - "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", - "dev": true, - "license": "MIT" + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } }, - "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "license": "Apache-2.0", + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "license": "ISC", "dependencies": { - "esutils": "^2.0.2" + "d3-array": "^3.2.0" }, "engines": { - "node": ">=6.0.0" + "node": ">=12" } }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "license": "MIT", + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "license": "ISC", "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" + "delaunator": "5" }, "engines": { - "node": ">= 0.4" + "node": ">=12" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true, - "license": "MIT" + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "license": "ISC", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "license": "ISC", + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "license": "ISC", + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", + "dev": true, + "license": "Apache-2.0", + "optional": true, + "bin": { + "detect-libc": "bin/detect-libc.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" }, "node_modules/electron-to-chromium": { "version": "1.5.200", @@ -2709,6 +3786,12 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/frappe-gantt": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/frappe-gantt/-/frappe-gantt-0.6.1.tgz", + "integrity": "sha512-1cSU9vLbwypjzaxnCfnEE03Xr3HlAV2S8dRtjxw62o+amkx1A8bBIFd2jp84mcDdTCM77Ij4LzZBslAKZB8oMg==", + "license": "MIT" + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2945,10 +4028,22 @@ "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "license": "MIT", "dependencies": { - "function-bind": "^1.1.2" + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" }, "engines": { - "node": ">= 0.4" + "node": ">=0.10.0" } }, "node_modules/ignore": { @@ -2961,6 +4056,13 @@ "node": ">= 4" } }, + "node_modules/immutable": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-5.1.4.tgz", + "integrity": "sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA==", + "dev": true, + "license": "MIT" + }, "node_modules/import-fresh": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", @@ -3007,6 +4109,15 @@ "dev": true, "license": "ISC" }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, "node_modules/is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", @@ -3135,111 +4246,388 @@ "dev": true, "license": "MIT", "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lucide-react": { + "version": "0.294.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.294.0.tgz", + "integrity": "sha512-V7o0/VECSGbLHn3/1O67FUgBwWB+hmzshrgDVRJQhMh8uj5D3HBuIvhuAmQTtlupILSplwIZg5FTc4tTKMA2SA==", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" } }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "license": "MIT", "bin": { - "jsesc": "bin/jsesc" + "nanoid": "bin/nanoid.cjs" }, "engines": { - "node": ">=6" + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true, "license": "MIT" }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "node_modules/node-addon-api": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", + "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", "dev": true, - "license": "MIT" + "license": "MIT", + "optional": true }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", "dev": true, "license": "MIT" }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true, "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, "engines": { - "node": ">=6" + "node": ">=0.10.0" } }, - "node_modules/keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", "dev": true, "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", "dependencies": { - "json-buffer": "3.0.1" + "wrappy": "1" } }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "license": "MIT", "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" } }, - "node_modules/lilconfig": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", - "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, "engines": { - "node": ">=14" + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/antonk52" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "dev": true, - "license": "MIT" - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, "license": "MIT", "dependencies": { - "p-locate": "^5.0.0" + "p-limit": "^3.0.2" }, "engines": { "node": ">=10" @@ -3248,815 +4636,1030 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", "dev": true, - "license": "MIT" + "license": "BlueOak-1.0.0" }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, "license": "MIT", "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" + "callsites": "^3.0.0" }, - "bin": { - "loose-envify": "cli.js" + "engines": { + "node": ">=6" } }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" + "license": "MIT", + "engines": { + "node": ">=8" } }, - "node_modules/lucide-react": { - "version": "0.294.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.294.0.tgz", - "integrity": "sha512-V7o0/VECSGbLHn3/1O67FUgBwWB+hmzshrgDVRJQhMh8uj5D3HBuIvhuAmQTtlupILSplwIZg5FTc4tTKMA2SA==", - "license": "ISC", - "peerDependencies": { - "react": "^16.5.1 || ^17.0.0 || ^18.0.0" + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, "license": "MIT", "engines": { - "node": ">= 0.4" + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" } }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, "license": "MIT", "engines": { - "node": ">= 8" + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "dev": true, "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, "engines": { - "node": ">=8.6" + "node": ">=0.10.0" } }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">= 6" } }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "license": "MIT", "dependencies": { - "mime-db": "1.52.0" + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" }, "engines": { - "node": ">= 0.6" + "node": "^10 || ^12 || >=14" } }, - "node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "brace-expansion": "^2.0.1" + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": ">=14.0.0" }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "peerDependencies": { + "postcss": "^8.0.0" } }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "node_modules/postcss-js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", "dev": true, - "license": "ISC", + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "^12 || ^14 || >= 16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.4.21" } }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/mz": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", - "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "node_modules/postcss-load-config": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", + "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "license": "MIT", "dependencies": { - "any-promise": "^1.0.0", - "object-assign": "^4.0.1", - "thenify-all": "^1.0.0" + "lilconfig": "^3.0.0", + "yaml": "^2.3.4" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } } }, - "node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", "dev": true, "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, { "type": "github", "url": "https://github.com/sponsors/ai" } ], "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" + "dependencies": { + "postcss-selector-parser": "^6.1.1" }, "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" } }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", "dev": true, "license": "MIT" }, - "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", "license": "MIT" }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, "engines": { "node": ">=0.10.0" } }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "license": "MIT", - "engines": { - "node": ">=0.10.0" + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" } }, - "node_modules/object-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", - "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", - "dev": true, + "node_modules/react-intersection-observer": { + "version": "8.34.0", + "resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-8.34.0.tgz", + "integrity": "sha512-TYKh52Zc0Uptp5/b4N91XydfSGKubEhgZRtcg1rhTKABXijc4Sdr1uTp5lJ8TN27jwUsdXxjHXtHa0kPj704sw==", "license": "MIT", - "engines": { - "node": ">= 6" + "peerDependencies": { + "react": "^15.0.0 || ^16.0.0 || ^17.0.0|| ^18.0.0" } }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "license": "ISC", - "dependencies": { - "wrappy": "1" - } + "node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT" }, - "node_modules/optionator": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", - "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", "dev": true, "license": "MIT", - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.5" - }, "engines": { - "node": ">= 0.8.0" + "node": ">=0.10.0" } }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, + "node_modules/react-router": { + "version": "6.30.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.1.tgz", + "integrity": "sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ==", "license": "MIT", "dependencies": { - "yocto-queue": "^0.1.0" + "@remix-run/router": "1.23.0" }, "engines": { - "node": ">=10" + "node": ">=14.0.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "peerDependencies": { + "react": ">=16.8" } }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, + "node_modules/react-router-dom": { + "version": "6.30.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.1.tgz", + "integrity": "sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw==", "license": "MIT", "dependencies": { - "p-limit": "^3.0.2" + "@remix-run/router": "1.23.0", + "react-router": "6.30.1" }, "engines": { - "node": ">=10" + "node": ">=14.0.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" } }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "dev": true, - "license": "BlueOak-1.0.0" - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, + "node_modules/react-vertical-timeline-component": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/react-vertical-timeline-component/-/react-vertical-timeline-component-3.6.0.tgz", + "integrity": "sha512-l9zulqjIGlRuaQeplGzV4r/tG2RYBpYt84Il8w4IxnJze2cDIGI04MKo3F7f1sHT0Sih1ohEFts8UV23AJS15Q==", + "hasInstallScript": true, "license": "MIT", "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" + "classnames": "^2.2.6", + "prop-types": "^15.7.2", + "react-intersection-observer": "^8.26.2" } }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", "dev": true, "license": "MIT", - "engines": { - "node": ">=8" + "dependencies": { + "pify": "^2.3.0" } }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dev": true, "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, "engines": { - "node": ">=0.10.0" + "node": ">=8.10.0" } }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", "dev": true, "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, "engines": { - "node": ">=16 || 14 >=14.18" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/path-type": { + "node_modules/resolve-from": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=4" } }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", "dev": true, "license": "MIT", "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" + "iojs": ">=1.0.0", + "node": ">=0.10.0" } }, - "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/pirates": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", - "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "license": "Unlicense" + }, + "node_modules/rollup": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.2.tgz", + "integrity": "sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==", "dev": true, "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, "engines": { - "node": ">= 6" + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.46.2", + "@rollup/rollup-android-arm64": "4.46.2", + "@rollup/rollup-darwin-arm64": "4.46.2", + "@rollup/rollup-darwin-x64": "4.46.2", + "@rollup/rollup-freebsd-arm64": "4.46.2", + "@rollup/rollup-freebsd-x64": "4.46.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.46.2", + "@rollup/rollup-linux-arm-musleabihf": "4.46.2", + "@rollup/rollup-linux-arm64-gnu": "4.46.2", + "@rollup/rollup-linux-arm64-musl": "4.46.2", + "@rollup/rollup-linux-loongarch64-gnu": "4.46.2", + "@rollup/rollup-linux-ppc64-gnu": "4.46.2", + "@rollup/rollup-linux-riscv64-gnu": "4.46.2", + "@rollup/rollup-linux-riscv64-musl": "4.46.2", + "@rollup/rollup-linux-s390x-gnu": "4.46.2", + "@rollup/rollup-linux-x64-gnu": "4.46.2", + "@rollup/rollup-linux-x64-musl": "4.46.2", + "@rollup/rollup-win32-arm64-msvc": "4.46.2", + "@rollup/rollup-win32-ia32-msvc": "4.46.2", + "@rollup/rollup-win32-x64-msvc": "4.46.2", + "fsevents": "~2.3.2" } }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "dev": true, "funding": [ { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" + "type": "github", + "url": "https://github.com/sponsors/feross" }, { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" + "type": "patreon", + "url": "https://www.patreon.com/feross" }, { - "type": "github", - "url": "https://github.com/sponsors/ai" + "type": "consulting", + "url": "https://feross.org/support" } ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" + "queue-microtask": "^1.2.2" } }, - "node_modules/postcss-import": { - "version": "15.1.0", - "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", - "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "license": "BSD-3-Clause" + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/sass": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.93.3.tgz", + "integrity": "sha512-elOcIZRTM76dvxNAjqYrucTSI0teAF/L2Lv0s6f6b7FOwcwIuA357bIE871580AjHJuSvLIRUosgV+lIWx6Rgg==", "dev": true, "license": "MIT", + "optional": true, "dependencies": { - "postcss-value-parser": "^4.0.0", - "read-cache": "^1.0.0", - "resolve": "^1.1.7" + "chokidar": "^4.0.0", + "immutable": "^5.0.2", + "source-map-js": ">=0.6.2 <2.0.0" + }, + "bin": { + "sass": "sass.js" }, "engines": { "node": ">=14.0.0" }, - "peerDependencies": { - "postcss": "^8.0.0" + "optionalDependencies": { + "@parcel/watcher": "^2.4.1" } }, - "node_modules/postcss-js": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", - "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", + "node_modules/sass-embedded": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.93.3.tgz", + "integrity": "sha512-+VUy01yfDqNmIVMd/LLKl2TTtY0ovZN0rTonh+FhKr65mFwIYgU9WzgIZKS7U9/SPCQvWTsTGx9jyt+qRm/XFw==", "dev": true, "license": "MIT", "dependencies": { - "camelcase-css": "^2.0.1" + "@bufbuild/protobuf": "^2.5.0", + "buffer-builder": "^0.2.0", + "colorjs.io": "^0.5.0", + "immutable": "^5.0.2", + "rxjs": "^7.4.0", + "supports-color": "^8.1.1", + "sync-child-process": "^1.0.2", + "varint": "^6.0.0" }, - "engines": { - "node": "^12 || ^14 || >= 16" + "bin": { + "sass": "dist/bin/sass.js" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" + "engines": { + "node": ">=16.0.0" }, - "peerDependencies": { - "postcss": "^8.4.21" + "optionalDependencies": { + "sass-embedded-all-unknown": "1.93.3", + "sass-embedded-android-arm": "1.93.3", + "sass-embedded-android-arm64": "1.93.3", + "sass-embedded-android-riscv64": "1.93.3", + "sass-embedded-android-x64": "1.93.3", + "sass-embedded-darwin-arm64": "1.93.3", + "sass-embedded-darwin-x64": "1.93.3", + "sass-embedded-linux-arm": "1.93.3", + "sass-embedded-linux-arm64": "1.93.3", + "sass-embedded-linux-musl-arm": "1.93.3", + "sass-embedded-linux-musl-arm64": "1.93.3", + "sass-embedded-linux-musl-riscv64": "1.93.3", + "sass-embedded-linux-musl-x64": "1.93.3", + "sass-embedded-linux-riscv64": "1.93.3", + "sass-embedded-linux-x64": "1.93.3", + "sass-embedded-unknown-all": "1.93.3", + "sass-embedded-win32-arm64": "1.93.3", + "sass-embedded-win32-x64": "1.93.3" + } + }, + "node_modules/sass-embedded-all-unknown": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.93.3.tgz", + "integrity": "sha512-3okGgnE41eg+CPLtAPletu6nQ4N0ij7AeW+Sl5Km4j29XcmqZQeFwYjHe1AlKTEgLi/UAONk1O8i8/lupeKMbw==", + "cpu": [ + "!arm", + "!arm64", + "!riscv64", + "!x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "sass": "1.93.3" } }, - "node_modules/postcss-load-config": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", - "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } + "node_modules/sass-embedded-android-arm": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.93.3.tgz", + "integrity": "sha512-8xOw9bywfOD6Wv24BgCmgjkk6tMrsOTTHcb28KDxeJtFtoxiUyMbxo0vChpPAfp2Hyg2tFFKS60s0s4JYk+Raw==", + "cpu": [ + "arm" ], + "dev": true, "license": "MIT", - "dependencies": { - "lilconfig": "^3.0.0", - "yaml": "^2.3.4" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">= 14" - }, - "peerDependencies": { - "postcss": ">=8.0.9", - "ts-node": ">=9.0.0" - }, - "peerDependenciesMeta": { - "postcss": { - "optional": true - }, - "ts-node": { - "optional": true - } + "node": ">=14.0.0" } }, - "node_modules/postcss-nested": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", - "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } + "node_modules/sass-embedded-android-arm64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.93.3.tgz", + "integrity": "sha512-uqUl3Kt1IqdGVAcAdbmC+NwuUJy8tM+2ZnB7/zrt6WxWVShVCRdFnWR9LT8HJr7eJN7AU8kSXxaVX/gedanPsg==", + "cpu": [ + "arm64" ], + "dev": true, "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.1.1" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">=12.0" - }, - "peerDependencies": { - "postcss": "^8.2.14" + "node": ">=14.0.0" } }, - "node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "node_modules/sass-embedded-android-riscv64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.93.3.tgz", + "integrity": "sha512-2jNJDmo+3qLocjWqYbXiBDnfgwrUeZgZFHJIwAefU7Fn66Ot7rsXl+XPwlokaCbTpj7eMFIqsRAZ/uDueXNCJg==", + "cpu": [ + "riscv64" + ], "dev": true, "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">=4" + "node": ">=14.0.0" } }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "node_modules/sass-embedded-android-x64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.93.3.tgz", + "integrity": "sha512-y0RoAU6ZenQFcjM9PjQd3cRqRTjqwSbtWLL/p68y2oFyh0QGN0+LQ826fc0ZvU/AbqCsAizkqjzOn6cRZJxTTQ==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT" + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=14.0.0" + } }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "node_modules/sass-embedded-darwin-arm64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.93.3.tgz", + "integrity": "sha512-7zb/hpdMOdKteK17BOyyypemglVURd1Hdz6QGsggy60aUFfptTLQftLRg8r/xh1RbQAUKWFbYTNaM47J9yPxYg==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">= 0.8.0" + "node": ">=14.0.0" } }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "license": "MIT" - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "node_modules/sass-embedded-darwin-x64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.93.3.tgz", + "integrity": "sha512-Ek1Vp8ZDQEe327Lz0b7h3hjvWH3u9XjJiQzveq74RPpJQ2q6d9LfWpjiRRohM4qK6o4XOHw1X10OMWPXJtdtWg==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">=6" + "node": ">=14.0.0" } }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "node_modules/sass-embedded-linux-arm": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.93.3.tgz", + "integrity": "sha512-yeiv2y+dp8B4wNpd3+JsHYD0mvpXSfov7IGyQ1tMIR40qv+ROkRqYiqQvAOXf76Qwh4Y9OaYZtLpnsPjfeq6mA==", + "cpu": [ + "arm" + ], "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } + "license": "MIT", + "optional": true, + "os": [ + "linux" ], - "license": "MIT" + "engines": { + "node": ">=14.0.0" + } }, - "node_modules/react": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", - "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "node_modules/sass-embedded-linux-arm64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.93.3.tgz", + "integrity": "sha512-RBrHWgfd8Dd8w4fbmdRVXRrhh8oBAPyeWDTKAWw8ZEmuXfVl4ytjDuyxaVilh6rR1xTRTNpbaA/YWApBlLrrNw==", + "cpu": [ + "arm64" + ], + "dev": true, "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=0.10.0" + "node": ">=14.0.0" } }, - "node_modules/react-dom": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", - "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "node_modules/sass-embedded-linux-musl-arm": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.93.3.tgz", + "integrity": "sha512-fU0fwAwbp7sBE3h5DVU5UPzvaLg7a4yONfFWkkcCp6ZrOiPuGRHXXYriWQ0TUnWy4wE+svsVuWhwWgvlb/tkKg==", + "cpu": [ + "arm" + ], + "dev": true, "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.2" - }, - "peerDependencies": { - "react": "^18.3.1" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.0.0" } }, - "node_modules/react-refresh": { - "version": "0.17.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", - "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "node_modules/sass-embedded-linux-musl-arm64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.93.3.tgz", + "integrity": "sha512-PS829l+eUng+9W4PFclXGb4uA2+965NHV3/Sa5U7qTywjeeUUYTZg70dJHSqvhrBEfCc2XJABeW3adLJbyQYkw==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=0.10.0" + "node": ">=14.0.0" } }, - "node_modules/react-router": { - "version": "6.30.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.1.tgz", - "integrity": "sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ==", + "node_modules/sass-embedded-linux-musl-riscv64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.93.3.tgz", + "integrity": "sha512-cK1oBY+FWQquaIGEeQ5H74KTO8cWsSWwXb/WaildOO9U6wmUypTgUYKQ0o5o/29nZbWWlM1PHuwVYTSnT23Jjg==", + "cpu": [ + "riscv64" + ], + "dev": true, "license": "MIT", - "dependencies": { - "@remix-run/router": "1.23.0" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { "node": ">=14.0.0" - }, - "peerDependencies": { - "react": ">=16.8" } }, - "node_modules/react-router-dom": { - "version": "6.30.1", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.1.tgz", - "integrity": "sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw==", + "node_modules/sass-embedded-linux-musl-x64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.93.3.tgz", + "integrity": "sha512-A7wkrsHu2/I4Zpa0NMuPGkWDVV7QGGytxGyUq3opSXgAexHo/vBPlGoDXoRlSdex0cV+aTMRPjoGIfdmNlHwyg==", + "cpu": [ + "x64" + ], + "dev": true, "license": "MIT", - "dependencies": { - "@remix-run/router": "1.23.0", - "react-router": "6.30.1" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { "node": ">=14.0.0" - }, - "peerDependencies": { - "react": ">=16.8", - "react-dom": ">=16.8" } }, - "node_modules/read-cache": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", - "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "node_modules/sass-embedded-linux-riscv64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.93.3.tgz", + "integrity": "sha512-vWkW1+HTF5qcaHa6hO80gx/QfB6GGjJUP0xLbnAoY4pwEnw5ulGv6RM8qYr8IDhWfVt/KH+lhJ2ZFxnJareisQ==", + "cpu": [ + "riscv64" + ], "dev": true, "license": "MIT", - "dependencies": { - "pify": "^2.3.0" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.0.0" } }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "node_modules/sass-embedded-linux-x64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.93.3.tgz", + "integrity": "sha512-k6uFxs+e5jSuk1Y0niCwuq42F9ZC5UEP7P+RIOurIm8w/5QFa0+YqeW+BPWEW5M1FqVOsNZH3qGn4ahqvAEjPA==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=8.10.0" + "node": ">=14.0.0" } }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "node_modules/sass-embedded-unknown-all": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.93.3.tgz", + "integrity": "sha512-o5wj2rLpXH0C+GJKt/VpWp6AnMsCCbfFmnMAttcrsa+U3yrs/guhZ3x55KAqqUsE8F47e3frbsDL+1OuQM5DAA==", "dev": true, "license": "MIT", + "optional": true, + "os": [ + "!android", + "!darwin", + "!linux", + "!win32" + ], "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "sass": "1.93.3" } }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "node_modules/sass-embedded-win32-arm64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.93.3.tgz", + "integrity": "sha512-0dOfT9moy9YmBolodwYYXtLwNr4jL4HQC9rBfv6mVrD7ud8ue2kDbn+GVzj1hEJxvEexVSmDCf7MHUTLcGs9xQ==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=4" + "node": ">=14.0.0" } }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "node_modules/sass-embedded-win32-x64": { + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.93.3.tgz", + "integrity": "sha512-wHFVfxiS9hU/sNk7KReD+lJWRp3R0SLQEX4zfOnRP2zlvI2X4IQR5aZr9GNcuMP6TmNpX0nQPZTegS8+h9RrEg==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" + "node": ">=14.0.0" } }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", + "node_modules/sass-embedded/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "glob": "^7.1.3" + "has-flag": "^4.0.0" }, - "bin": { - "rimraf": "bin.js" + "engines": { + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/rollup": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.2.tgz", - "integrity": "sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==", + "node_modules/sass/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", "dev": true, "license": "MIT", + "optional": true, "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" + "readdirp": "^4.0.1" }, "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" + "node": ">= 14.16.0" }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.46.2", - "@rollup/rollup-android-arm64": "4.46.2", - "@rollup/rollup-darwin-arm64": "4.46.2", - "@rollup/rollup-darwin-x64": "4.46.2", - "@rollup/rollup-freebsd-arm64": "4.46.2", - "@rollup/rollup-freebsd-x64": "4.46.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.46.2", - "@rollup/rollup-linux-arm-musleabihf": "4.46.2", - "@rollup/rollup-linux-arm64-gnu": "4.46.2", - "@rollup/rollup-linux-arm64-musl": "4.46.2", - "@rollup/rollup-linux-loongarch64-gnu": "4.46.2", - "@rollup/rollup-linux-ppc64-gnu": "4.46.2", - "@rollup/rollup-linux-riscv64-gnu": "4.46.2", - "@rollup/rollup-linux-riscv64-musl": "4.46.2", - "@rollup/rollup-linux-s390x-gnu": "4.46.2", - "@rollup/rollup-linux-x64-gnu": "4.46.2", - "@rollup/rollup-linux-x64-musl": "4.46.2", - "@rollup/rollup-win32-arm64-msvc": "4.46.2", - "@rollup/rollup-win32-ia32-msvc": "4.46.2", - "@rollup/rollup-win32-x64-msvc": "4.46.2", - "fsevents": "~2.3.2" + "funding": { + "url": "https://paulmillr.com/funding/" } }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "node_modules/sass/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" + "optional": true, + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" } }, "node_modules/scheduler": { @@ -4333,6 +5936,29 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/sync-child-process": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/sync-child-process/-/sync-child-process-1.0.2.tgz", + "integrity": "sha512-8lD+t2KrrScJ/7KXCSyfhT3/hRq78rC0wBFqNJXv3mZyn6hW2ypM05JmlSvtqRbeq6jqA94oHbxAr2vYsJ8vDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "sync-message-port": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/sync-message-port": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sync-message-port/-/sync-message-port-1.1.3.tgz", + "integrity": "sha512-GTt8rSKje5FilG+wEdfCkOcLL7LWqpMlr2c3LRuKt/YXxcJ52aGSbGBAdI4L3aaqfrBt6y711El53ItyH1NWzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/tailwindcss": { "version": "3.4.17", "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.17.tgz", @@ -4434,6 +6060,13 @@ "dev": true, "license": "Apache-2.0" }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD" + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -4522,6 +6155,13 @@ "dev": true, "license": "MIT" }, + "node_modules/varint": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/varint/-/varint-6.0.0.tgz", + "integrity": "sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==", + "dev": true, + "license": "MIT" + }, "node_modules/vite": { "version": "5.4.19", "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.19.tgz", diff --git a/backends/advanced/webui/package.json b/backends/advanced/webui/package.json index 17894a86..43647927 100644 --- a/backends/advanced/webui/package.json +++ b/backends/advanced/webui/package.json @@ -1,5 +1,5 @@ { - "name": "friend-lite-webui", + "name": "chronicle-webui", "private": true, "version": "0.1.0", "type": "module", @@ -12,14 +12,19 @@ "dependencies": { "axios": "^1.6.2", "clsx": "^2.0.0", + "d3": "^7.8.5", + "frappe-gantt": "^0.6.1", "lucide-react": "^0.294.0", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router-dom": "^6.20.0" + "react-router-dom": "^6.20.0", + "react-vertical-timeline-component": "^3.6.0" }, "devDependencies": { + "@types/d3": "^7.4.3", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", + "@types/react-vertical-timeline-component": "^3.3.6", "@typescript-eslint/eslint-plugin": "^6.14.0", "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react": "^4.2.1", @@ -28,6 +33,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", + "sass-embedded": "^1.80.7", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" diff --git a/backends/advanced/webui/src/components/layout/Layout.tsx b/backends/advanced/webui/src/components/layout/Layout.tsx index f4caf629..5995f823 100644 --- a/backends/advanced/webui/src/components/layout/Layout.tsx +++ b/backends/advanced/webui/src/components/layout/Layout.tsx @@ -31,7 +31,7 @@ export default function Layout() {

- Friend-Lite Dashboard + Chronicle Dashboard

@@ -102,7 +102,7 @@ export default function Layout() {
- ๐ŸŽต Friend-Lite Dashboard v1.0 | AI-powered personal audio system + ๐ŸŽต Chronicle Dashboard v1.0 | AI-powered personal audio system
diff --git a/backends/advanced/webui/src/pages/LoginPage.tsx b/backends/advanced/webui/src/pages/LoginPage.tsx index 717bb61d..7093e73a 100644 --- a/backends/advanced/webui/src/pages/LoginPage.tsx +++ b/backends/advanced/webui/src/pages/LoginPage.tsx @@ -58,7 +58,7 @@ export default function LoginPage() {

- Friend-Lite Dashboard + Chronicle Dashboard

Sign in to your account diff --git a/backends/advanced/webui/src/pages/Memories.tsx b/backends/advanced/webui/src/pages/Memories.tsx index 0c4973b6..732d1683 100644 --- a/backends/advanced/webui/src/pages/Memories.tsx +++ b/backends/advanced/webui/src/pages/Memories.tsx @@ -258,7 +258,7 @@ export default function Memories() { {memoryProvider && (

- Provider: {memoryProvider === 'friend_lite' ? 'Friend-Lite' : memoryProvider === 'openmemory_mcp' ? 'OpenMemory MCP' : memoryProvider} + Provider: {memoryProvider === 'chronicle' ? 'Chronicle' : memoryProvider === 'openmemory_mcp' ? 'OpenMemory MCP' : memoryProvider}

)}
@@ -313,7 +313,7 @@ export default function Memories() {
- {/* Initial Search Threshold Slider - Show for Friend-Lite provider */} + {/* Initial Search Threshold Slider - Show for Chronicle provider */} {memoryProviderSupportsThreshold && (
diff --git a/backends/advanced/webui/src/pages/MemoriesRouter.tsx b/backends/advanced/webui/src/pages/MemoriesRouter.tsx index b39663f9..fe6285e9 100644 --- a/backends/advanced/webui/src/pages/MemoriesRouter.tsx +++ b/backends/advanced/webui/src/pages/MemoriesRouter.tsx @@ -4,7 +4,7 @@ import Memories from './Memories' /** * Memories page wrapper that stores JWT for cross-origin Mycelia access. - * Always displays Friend-Lite native Memories component (backend proxies to provider). + * Always displays Chronicle native Memories component (backend proxies to provider). */ export default function MemoriesRouter() { const { token } = useAuth() @@ -17,6 +17,6 @@ export default function MemoriesRouter() { }, [token]) // Always show the native Memories page (works for all providers) - // Friend-Lite backend will proxy to Mycelia when needed + // Chronicle backend will proxy to Mycelia when needed return } diff --git a/backends/advanced/webui/src/pages/System.tsx b/backends/advanced/webui/src/pages/System.tsx index c722ada9..5c52e057 100644 --- a/backends/advanced/webui/src/pages/System.tsx +++ b/backends/advanced/webui/src/pages/System.tsx @@ -359,7 +359,7 @@ export default function System() { > {availableProviders.map((provider) => ( diff --git a/backends/advanced/webui/src/pages/TimelineRouter.tsx b/backends/advanced/webui/src/pages/TimelineRouter.tsx index 0e983ca6..fbe2f9cb 100644 --- a/backends/advanced/webui/src/pages/TimelineRouter.tsx +++ b/backends/advanced/webui/src/pages/TimelineRouter.tsx @@ -1,10 +1,9 @@ import { useState } from 'react' import { Calendar } from 'lucide-react' import FrappeGanttTimeline from './FrappeGanttTimeline' -import ReactGanttTimeline from './ReactGanttTimeline' import MyceliaTimeline from './MyceliaTimeline' -type TimelineImplementation = 'frappe' | 'react-gantt' | 'mycelia' +type TimelineImplementation = 'frappe' | 'mycelia' export default function TimelineRouter() { const [activeImplementation, setActiveImplementation] = useState('frappe') @@ -42,21 +41,6 @@ export default function TimelineRouter() { Default -
diff --git a/backends/advanced/webui/tsconfig.json b/backends/advanced/webui/tsconfig.json index 7a7611e4..7355a7c8 100644 --- a/backends/advanced/webui/tsconfig.json +++ b/backends/advanced/webui/tsconfig.json @@ -16,6 +16,7 @@ /* Linting */ "strict": true, + "noImplicitAny": false, "noUnusedLocals": true, "noUnusedParameters": true, "noFallthroughCasesInSwitch": true diff --git a/backends/charts/advanced-backend/Chart.yaml b/backends/charts/advanced-backend/Chart.yaml index 01aad364..c70e0509 100644 --- a/backends/charts/advanced-backend/Chart.yaml +++ b/backends/charts/advanced-backend/Chart.yaml @@ -1,10 +1,10 @@ apiVersion: v2 name: advanced-backend -description: Friend-lite Advanced Backend Service +description: Chronicle Advanced Backend Service version: 0.1.0 appVersion: "1.0" keywords: - - friend-lite + - chronicle - backend - ai sources: diff --git a/backends/charts/advanced-backend/templates/deployment.yaml b/backends/charts/advanced-backend/templates/deployment.yaml index 4082bd65..0e40a7fb 100644 --- a/backends/charts/advanced-backend/templates/deployment.yaml +++ b/backends/charts/advanced-backend/templates/deployment.yaml @@ -25,9 +25,9 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: # Override specific values from Kubernetes/Helm if needed {{- range $key, $value := .Values.env }} @@ -70,9 +70,9 @@ spec: command: ["./start-workers.sh"] envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: {{- range $key, $value := .Values.env }} - name: {{ $key }} diff --git a/backends/charts/advanced-backend/templates/workers-deployment.yaml b/backends/charts/advanced-backend/templates/workers-deployment.yaml index effcc10d..22751d31 100644 --- a/backends/charts/advanced-backend/templates/workers-deployment.yaml +++ b/backends/charts/advanced-backend/templates/workers-deployment.yaml @@ -24,9 +24,9 @@ spec: command: ["./start-workers.sh"] envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: # Override specific values from Kubernetes/Helm if needed {{- range $key, $value := .Values.env }} diff --git a/backends/charts/advanced-backend/values.yaml b/backends/charts/advanced-backend/values.yaml index 8827a62d..a3a2812f 100644 --- a/backends/charts/advanced-backend/values.yaml +++ b/backends/charts/advanced-backend/values.yaml @@ -55,9 +55,9 @@ ingress: # hosts: defined in ingress-values.yaml and overridden by Skaffold # tls: - # - secretName: friend-lite-tls + # - secretName: chronicle-tls # hosts: - # - friend-lite.192-168-1-42.nip.io + # - chronicle.192-168-1-42.nip.io resources: limits: diff --git a/backends/charts/webui/Chart.yaml b/backends/charts/webui/Chart.yaml index cc3b2d32..869eba76 100644 --- a/backends/charts/webui/Chart.yaml +++ b/backends/charts/webui/Chart.yaml @@ -1,10 +1,10 @@ apiVersion: v2 name: webui -description: Friend-lite WebUI +description: Chronicle WebUI version: 0.1.0 appVersion: "1.0" keywords: - - friend-lite + - chronicle - webui - frontend sources: diff --git a/backends/charts/webui/templates/deployment.yaml b/backends/charts/webui/templates/deployment.yaml index a1358e2e..6d57204c 100644 --- a/backends/charts/webui/templates/deployment.yaml +++ b/backends/charts/webui/templates/deployment.yaml @@ -24,9 +24,9 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: {{- range $key, $value := .Values.env }} - name: {{ $key }} diff --git a/backends/charts/webui/values.yaml b/backends/charts/webui/values.yaml index c56229e4..1dce6ab4 100644 --- a/backends/charts/webui/values.yaml +++ b/backends/charts/webui/values.yaml @@ -38,7 +38,7 @@ ingress: - host: external.example.com # Overridden by Skaffold setValueTemplates paths: *commonPaths # tls: - # - secretName: friend-lite-tls + # - secretName: chronicle-tls # hosts: # - webui.example.com diff --git a/extras/asr-services/README.md b/extras/asr-services/README.md index b235b659..670d33b5 100644 --- a/extras/asr-services/README.md +++ b/extras/asr-services/README.md @@ -1,6 +1,6 @@ # ASR Services -Offline Automatic Speech Recognition (ASR) services for Friend-Lite using the Wyoming protocol. +Offline Automatic Speech Recognition (ASR) services for Chronicle using the Wyoming protocol. ## Overview @@ -100,7 +100,7 @@ services: ## Integration -### With Friend-Lite Backend +### With Chronicle Backend The ASR services integrate as fallback transcription when Deepgram is unavailable: ```bash # Backend configuration diff --git a/extras/asr-services/quickstart.md b/extras/asr-services/quickstart.md index 1fed4c12..994d17bc 100644 --- a/extras/asr-services/quickstart.md +++ b/extras/asr-services/quickstart.md @@ -40,8 +40,8 @@ python client.py --host localhost --port 8765 --audio test.wav python client.py --host localhost --port 8765 --microphone ``` -### Integration with Friend-Lite -Set the offline ASR URI in your Friend-Lite backend: +### Integration with Chronicle +Set the offline ASR URI in your Chronicle backend: ```bash # In your .env file OFFLINE_ASR_TCP_URI=tcp://localhost:8765 @@ -55,7 +55,7 @@ OFFLINE_ASR_TCP_URI=tcp://localhost:8765 ## Next Steps -1. **Configure Backend**: Update Friend-Lite to use offline ASR as fallback +1. **Configure Backend**: Update Chronicle to use offline ASR as fallback 2. **Test Integration**: Verify transcription works when Deepgram is unavailable 3. **Performance Tuning**: Monitor CPU/memory usage and adjust as needed 4. **Production Deploy**: Scale services based on load requirements diff --git a/extras/asr-services/tests/test_parakeet_service.py b/extras/asr-services/tests/test_parakeet_service.py index 0fd3462d..4c94af12 100644 --- a/extras/asr-services/tests/test_parakeet_service.py +++ b/extras/asr-services/tests/test_parakeet_service.py @@ -13,7 +13,7 @@ Run with: # Run the test (service management is automatic) - cd /home/ankush/workspaces/friend-lite/extras/asr-services + cd /home/ankush/workspaces/chronicle/extras/asr-services uv run pytest tests/test_parakeet_service.py -v -s """ diff --git a/extras/havpe-relay/README.md b/extras/havpe-relay/README.md index 5ab061e3..2793b36d 100644 --- a/extras/havpe-relay/README.md +++ b/extras/havpe-relay/README.md @@ -161,4 +161,4 @@ You can test the relay using the provided test listener (if needed): ## License -This project is part of the friend-lite ecosystem. +This project is part of the chronicle ecosystem. diff --git a/extras/havpe-relay/main.py b/extras/havpe-relay/main.py index 4494bb00..eac6d58b 100644 --- a/extras/havpe-relay/main.py +++ b/extras/havpe-relay/main.py @@ -526,7 +526,7 @@ async def main(): # Print startup banner with authentication info logger.info("๐ŸŽต ========================================") - logger.info("๐ŸŽต Friend-Lite HAVPE Relay with Authentication") + logger.info("๐ŸŽต Chronicle HAVPE Relay with Authentication") logger.info("๐ŸŽต ========================================") logger.info(f"๐ŸŽง ESP32 Server: {args.host}:{args.port}") logger.info(f"๐Ÿ“ก Backend API: {BACKEND_URL}") diff --git a/extras/local-omi-bt/connect-omi.py b/extras/local-omi-bt/connect-omi.py index a689bb4b..302a17d7 100644 --- a/extras/local-omi-bt/connect-omi.py +++ b/extras/local-omi-bt/connect-omi.py @@ -10,8 +10,8 @@ from bleak.backends.device import BLEDevice from dotenv import load_dotenv, set_key from easy_audio_interfaces.filesystem import RollingFileSink -from friend_lite.bluetooth import listen_to_omi, print_devices -from friend_lite.decoder import OmiOpusDecoder +from chronicle.bluetooth import listen_to_omi, print_devices +from chronicle.decoder import OmiOpusDecoder from wyoming.audio import AudioChunk # Setup logging @@ -49,7 +49,7 @@ async def as_audio_chunks(it) -> AsyncGenerator[AudioChunk, None]: async for data in it: yield AudioChunk(audio=data, rate=16000, width=2, channels=1) -# Add this to friend-lite sdk +# Add this to chronicle sdk async def list_devices(prefix: str = "OMI") -> list[BLEDevice]: devices = await BleakScanner.discover() filtered_devices = [] diff --git a/extras/openmemory-mcp/README.md b/extras/openmemory-mcp/README.md index 82d033e0..940a33e5 100644 --- a/extras/openmemory-mcp/README.md +++ b/extras/openmemory-mcp/README.md @@ -1,6 +1,6 @@ # OpenMemory MCP Service -This directory contains a local deployment of the OpenMemory MCP (Model Context Protocol) server, which can be used as an alternative memory provider for Friend-Lite. +This directory contains a local deployment of the OpenMemory MCP (Model Context Protocol) server, which can be used as an alternative memory provider for Chronicle. ## What is OpenMemory MCP? @@ -30,9 +30,9 @@ cp .env.template .env ./run.sh --with-ui ``` -### 3. Configure Friend-Lite +### 3. Configure Chronicle -In your Friend-Lite backend `.env` file: +In your Chronicle backend `.env` file: ```bash # Use OpenMemory MCP instead of built-in memory processing @@ -52,7 +52,7 @@ The deployment includes: 2. **Qdrant Vector Database** (port 6334) - Stores memory embeddings - Enables semantic search - - Isolated from main Friend-Lite Qdrant + - Isolated from main Chronicle Qdrant 3. **OpenMemory UI** (port 3001, optional) - Web interface for memory management @@ -69,16 +69,16 @@ The deployment includes: - **UI** (if enabled): http://localhost:3001 -## How It Works with Friend-Lite +## How It Works with Chronicle -When configured with `MEMORY_PROVIDER=openmemory_mcp`, Friend-Lite will: +When configured with `MEMORY_PROVIDER=openmemory_mcp`, Chronicle will: 1. Send raw conversation transcripts to OpenMemory MCP 2. OpenMemory extracts memories using OpenAI 3. Memories are stored in the dedicated Qdrant instance -4. Friend-Lite can search memories via the MCP protocol +4. Chronicle can search memories via the MCP protocol -This replaces Friend-Lite's built-in memory processing with OpenMemory's implementation. +This replaces Chronicle's built-in memory processing with OpenMemory's implementation. ## Managing Services @@ -98,7 +98,7 @@ docker compose restart ## Testing -### Standalone Test (No Friend-Lite Dependencies) +### Standalone Test (No Chronicle Dependencies) Test the OpenMemory MCP server directly: @@ -117,9 +117,9 @@ This test verifies: - Memory deletion - MCP protocol endpoints -### Integration Test (With Friend-Lite) +### Integration Test (With Chronicle) -Test the integration between Friend-Lite and OpenMemory MCP: +Test the integration between Chronicle and OpenMemory MCP: ```bash # From backends/advanced directory @@ -134,7 +134,7 @@ This test verifies: - MCP client functionality - OpenMemoryMCPService implementation - Service factory integration -- Memory operations through Friend-Lite interface +- Memory operations through Chronicle interface ## Troubleshooting @@ -143,35 +143,35 @@ This test verifies: If ports are already in use, edit `docker-compose.yml`: - Change `8765:8765` to another port for MCP server - Change `6334:6333` to another port for Qdrant -- Update Friend-Lite's `OPENMEMORY_MCP_URL` accordingly +- Update Chronicle's `OPENMEMORY_MCP_URL` accordingly ### Memory Not Working 1. Check OpenMemory logs: `docker compose logs openmemory-mcp` 2. Verify OPENAI_API_KEY is set correctly -3. Ensure Friend-Lite backend is configured with correct URL +3. Ensure Chronicle backend is configured with correct URL 4. Test MCP endpoint: `curl http://localhost:8765/api/v1/memories?user_id=test` ### Connection Issues -- Ensure containers are on same network if running Friend-Lite in Docker +- Ensure containers are on same network if running Chronicle in Docker - Use `host.docker.internal` instead of `localhost` when connecting from Docker containers ## Advanced Configuration ### Using with Docker Network -If Friend-Lite backend is also running in Docker: +If Chronicle backend is also running in Docker: ```yaml -# In Friend-Lite docker-compose.yml +# In Chronicle docker-compose.yml networks: default: external: name: openmemory-mcp_openmemory-network ``` -Then use container names in Friend-Lite .env: +Then use container names in Chronicle .env: ```bash OPENMEMORY_MCP_URL=http://openmemory-mcp:8765 ``` @@ -184,4 +184,4 @@ OpenMemory uses OpenAI by default. To use different models, you would need to mo - [OpenMemory Documentation](https://docs.mem0.ai/open-memory/introduction) - [MCP Protocol Spec](https://github.com/mem0ai/mem0/tree/main/openmemory) -- [Friend-Lite Memory Docs](../../backends/advanced/MEMORY_PROVIDERS.md) \ No newline at end of file +- [Chronicle Memory Docs](../../backends/advanced/MEMORY_PROVIDERS.md) \ No newline at end of file diff --git a/extras/openmemory-mcp/run.sh b/extras/openmemory-mcp/run.sh index 1cc0bf21..1092207a 100755 --- a/extras/openmemory-mcp/run.sh +++ b/extras/openmemory-mcp/run.sh @@ -2,7 +2,7 @@ set -e -echo "๐Ÿš€ Starting OpenMemory MCP installation for Friend-Lite..." +echo "๐Ÿš€ Starting OpenMemory MCP installation for Chronicle..." # Set environment variables OPENAI_API_KEY="${OPENAI_API_KEY:-}" @@ -64,9 +64,9 @@ if docker ps | grep -q openmemory-mcp; then curl -s http://localhost:8765/openapi.json | jq '.paths | keys[]' fi echo "" - echo "๐Ÿ“š Integration with Friend-Lite:" - echo " Set MEMORY_PROVIDER=openmemory_mcp in your Friend-Lite .env" - echo " Set OPENMEMORY_MCP_URL=http://localhost:8765 in your Friend-Lite .env" + echo "๐Ÿ“š Integration with Chronicle:" + echo " Set MEMORY_PROVIDER=openmemory_mcp in your Chronicle .env" + echo " Set OPENMEMORY_MCP_URL=http://localhost:8765 in your Chronicle .env" echo "" echo "๐Ÿ” Check logs: docker compose logs -f" echo "๐Ÿ›‘ Stop services: docker compose down" diff --git a/extras/openmemory-mcp/test_standalone.py b/extras/openmemory-mcp/test_standalone.py index 58f011a4..08720f4e 100755 --- a/extras/openmemory-mcp/test_standalone.py +++ b/extras/openmemory-mcp/test_standalone.py @@ -2,7 +2,7 @@ """Standalone test script for OpenMemory MCP server. This script tests the OpenMemory MCP server directly using its REST API, -without any dependencies on Friend-Lite backend code. +without any dependencies on Chronicle backend code. """ import asyncio diff --git a/extras/speaker-omni-experimental/README.md b/extras/speaker-omni-experimental/README.md index 0f0d34c7..5c7e3a30 100644 --- a/extras/speaker-omni-experimental/README.md +++ b/extras/speaker-omni-experimental/README.md @@ -355,7 +355,7 @@ Approximate processing times (7B model on RTX 4090): ## ๐Ÿ”ฎ Integration Path -This experimental system can be integrated with the existing Friend-Lite backend: +This experimental system can be integrated with the existing Chronicle backend: 1. **Standalone Testing**: Use this directory for initial family testing 2. **API Wrapper**: Create FastAPI endpoint similar to traditional speaker service @@ -372,7 +372,7 @@ This experimental system can be integrated with the existing Friend-Lite backend ## ๐Ÿ”— Related Files - `../speaker-recognition/`: Traditional PyAnnote-based system -- `../../backends/advanced-backend/`: Main Friend-Lite backend +- `../../backends/advanced-backend/`: Main Chronicle backend - `../../extras/test-audios/`: Sample audio files for testing ## ๐Ÿ“ Development Notes @@ -394,4 +394,4 @@ This is an experimental system. Feedback and improvements welcome: ## ๐Ÿ“„ License -Part of the Friend-Lite project. See main repository license. \ No newline at end of file +Part of the Chronicle project. See main repository license. \ No newline at end of file diff --git a/extras/speaker-recognition/charts/templates/speaker-deployment.yaml b/extras/speaker-recognition/charts/templates/speaker-deployment.yaml index 94417297..d77f6204 100644 --- a/extras/speaker-recognition/charts/templates/speaker-deployment.yaml +++ b/extras/speaker-recognition/charts/templates/speaker-deployment.yaml @@ -34,7 +34,7 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: name: {{ .Values.secrets.name | default (printf "%s-secrets" .Release.Name) }} env: diff --git a/extras/speaker-recognition/charts/templates/webui-deployment.yaml b/extras/speaker-recognition/charts/templates/webui-deployment.yaml index aca7f872..ab8ba3e5 100644 --- a/extras/speaker-recognition/charts/templates/webui-deployment.yaml +++ b/extras/speaker-recognition/charts/templates/webui-deployment.yaml @@ -28,9 +28,9 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: {{- range $key, $value := .Values.webui.env }} - name: {{ $key }} diff --git a/extras/speaker-recognition/charts/values.yaml b/extras/speaker-recognition/charts/values.yaml index a1ea8f34..afccf104 100644 --- a/extras/speaker-recognition/charts/values.yaml +++ b/extras/speaker-recognition/charts/values.yaml @@ -94,6 +94,6 @@ secrets: hfToken: "" deepgramApiKey: "" # Fixed secret name to prevent regeneration - uses existing secret from Makefile - name: "friend-lite-secrets" + name: "chronicle-secrets" # Don't create the secret, use existing one from Makefile create: false \ No newline at end of file diff --git a/extras/speaker-recognition/init.py b/extras/speaker-recognition/init.py index 8d1dd547..8267e35b 100755 --- a/extras/speaker-recognition/init.py +++ b/extras/speaker-recognition/init.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Speaker Recognition Setup Script +Chronicle Speaker Recognition Setup Script Interactive configuration for speaker recognition service """ diff --git a/extras/speaker-recognition/quickstart.md b/extras/speaker-recognition/quickstart.md index c594069a..47a99f56 100644 --- a/extras/speaker-recognition/quickstart.md +++ b/extras/speaker-recognition/quickstart.md @@ -47,7 +47,7 @@ docker compose up --build -d **Important**: Accept the SSL certificate warning when prompted. Self-signed certificates are used for local development. -**Note**: Speaker Recognition runs on port 8444 (HTTPS) and 8081 (HTTP) to avoid conflicts with the main Friend-Lite backend which uses the standard ports 443/80. +**Note**: Speaker Recognition runs on port 8444 (HTTPS) and 8081 (HTTP) to avoid conflicts with the main Chronicle backend which uses the standard ports 443/80. **Need to customize other settings?** Copy `.env.template` to `.env` and modify: ```bash diff --git a/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py b/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py index bb656b8f..c2893e0f 100644 --- a/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py +++ b/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py @@ -18,7 +18,7 @@ """ __version__ = "0.1.0" -__author__ = "Friend-Lite Team" +__author__ = "Chronicle Team" # Import core classes for convenience from .core.audio_backend import AudioBackend diff --git a/extras/speaker-recognition/ssl/generate-ssl.sh b/extras/speaker-recognition/ssl/generate-ssl.sh index c1e832c5..6ef71bd8 100755 --- a/extras/speaker-recognition/ssl/generate-ssl.sh +++ b/extras/speaker-recognition/ssl/generate-ssl.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Generate self-signed SSL certificate for Friend-Lite Advanced Backend +# Generate self-signed SSL certificate for Chronicle Advanced Backend # Supports localhost, IP addresses, and domain names SERVER_ADDRESS="$1" diff --git a/extras/speaker-recognition/tests/test_speaker_service_integration.py b/extras/speaker-recognition/tests/test_speaker_service_integration.py index 5d8872da..58e55b61 100644 --- a/extras/speaker-recognition/tests/test_speaker_service_integration.py +++ b/extras/speaker-recognition/tests/test_speaker_service_integration.py @@ -34,7 +34,7 @@ CLEANUP_CONTAINERS = os.environ.get("CLEANUP_CONTAINERS", "true").lower() == "true" REBUILD = os.environ.get("REBUILD", "false").lower() == "true" -REPO_ROOT = Path(__file__).resolve().parents[3] # Go up to friend-lite root +REPO_ROOT = Path(__file__).resolve().parents[3] # Go up to chronicle root SPEAKER_DIR = REPO_ROOT / "extras" / "speaker-recognition" TEST_ASSETS_DIR = SPEAKER_DIR / "tests" / "assets" diff --git a/k8s-manifests/cross-namespace-rbac.yaml b/k8s-manifests/cross-namespace-rbac.yaml index 6beb54c6..632cbf2f 100644 --- a/k8s-manifests/cross-namespace-rbac.yaml +++ b/k8s-manifests/cross-namespace-rbac.yaml @@ -6,16 +6,16 @@ metadata: name: speech-config-reader namespace: speech --- -# Role in friend-lite namespace to read ConfigMap/Secret +# Role in chronicle namespace to read ConfigMap/Secret apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: config-reader - namespace: friend-lite + namespace: chronicle rules: - apiGroups: [""] resources: ["configmaps", "secrets"] - resourceNames: ["friend-lite-config", "friend-lite-secrets"] + resourceNames: ["chronicle-config", "chronicle-secrets"] verbs: ["get", "list"] --- # RoleBinding to allow speech service account to read config @@ -23,7 +23,7 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: speech-config-access - namespace: friend-lite + namespace: chronicle subjects: - kind: ServiceAccount name: speech-config-reader diff --git a/quickstart.md b/quickstart.md index bcadc72a..3c6f2bcb 100644 --- a/quickstart.md +++ b/quickstart.md @@ -1,10 +1,10 @@ -# Friend-Lite Quick Start +# Chronicle Quick Start ## What You're Building (Complete Beginners Start Here!) You're setting up your own personal AI that: - **Runs on your home computer** - processes audio, stores memories, runs AI models -- **Connects to your phone** - where you use the Friend-Lite app and OMI device +- **Connects to your phone** - where you use the Chronicle app and OMI device - **Works everywhere** - your phone can access your home AI from anywhere Think of it like having Siri/Alexa, but it's **your own AI** running on **your hardware** with **your data**. @@ -13,12 +13,12 @@ Think of it like having Siri/Alexa, but it's **your own AI** running on **your h ### On Your Home Computer - **Docker** - Runs all the AI services (like having multiple apps in containers) -- **Friend-Lite Backend** - The main AI brain (transcription, memory, processing) +- **Chronicle Backend** - The main AI brain (transcription, memory, processing) - **Tailscale** - Creates secure tunnel so your phone can reach home ### On Your Phone - **Tailscale** - Connects securely to your home computer -- **Friend-Lite Mobile App** - Interface for your OMI device and conversations +- **Chronicle Mobile App** - Interface for your OMI device and conversations ### AI Services (Choose Your Path) @@ -101,14 +101,14 @@ The setup wizard will automatically download and configure: *Note: First-time setup will download AI models (this can take time and storage space)* -## Step 3: Download and Setup Friend-Lite +## Step 3: Download and Setup Chronicle ### On Your Home Computer **Download the code:** ```bash -git clone https://github.com/AnkushMalaker/friend-lite.git -cd friend-lite +git clone https://github.com/AnkushMalaker/chronicle.git +cd chronicle ``` **Run the setup wizard:** @@ -173,26 +173,26 @@ Before connecting your phone, make sure everything works: *Your browser will warn about "unsafe certificate" - click "Advanced" โ†’ "Proceed anyway"* -2. You should see the Friend-Lite dashboard +2. You should see the Chronicle dashboard 3. Click "Live Recording" in the sidebar 4. Test your microphone - record a short clip 5. Check that it gets transcribed and appears in "Conversations" 6. **Only proceed to phone setup when this works perfectly!** -## Step 5: Install Friend-Lite on Your Phone +## Step 5: Install Chronicle on Your Phone **No development setup needed - just download and install!** ### Android Users -1. Go to [GitHub Releases](https://github.com/AnkushMalaker/friend-lite/releases) -2. Find the latest release and download `friend-lite-android.apk` +1. Go to [GitHub Releases](https://github.com/AnkushMalaker/chronicle/releases) +2. Find the latest release and download `chronicle-android.apk` 3. Install APK on your phone: - Enable "Install from unknown sources" in Android settings - Tap the downloaded APK file to install ### iPhone Users -1. Go to [GitHub Releases](https://github.com/AnkushMalaker/friend-lite/releases) -2. Find the latest release and download `friend-lite-ios.ipa` +1. Go to [GitHub Releases](https://github.com/AnkushMalaker/chronicle/releases) +2. Find the latest release and download `chronicle-ios.ipa` 3. Install using sideloading tool: - **AltStore** (recommended): [altstore.io](https://altstore.io) - **Sideloadly**: [sideloadly.io](https://sideloadly.io) @@ -201,7 +201,7 @@ Before connecting your phone, make sure everything works: ### Configure the App 1. **First**: Make sure Tailscale is running on your phone -2. Open Friend-Lite app +2. Open Chronicle app 3. Go to Settings โ†’ Backend Configuration 4. Enter Backend URL: `https://[your-tailscale-ip]` @@ -216,7 +216,7 @@ Before connecting your phone, make sure everything works: ## Step 6: Connect Your OMI Device 1. Turn on your OMI/Friend device (make sure it's charged) -2. Open Friend-Lite app on your phone +2. Open Chronicle app on your phone 3. Go to "Devices" tab โ†’ "Add New Device" 4. Follow Bluetooth pairing instructions 5. Once connected, start a conversation! diff --git a/run-test.sh b/run-test.sh index fce082e5..ebc39a07 100755 --- a/run-test.sh +++ b/run-test.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Friend-Lite Local Test Runner +# Chronicle Local Test Runner # Runs the same tests as GitHub CI but configured for local development # Usage: ./run-test.sh [advanced-backend|speaker-recognition|all] @@ -63,12 +63,12 @@ run_speaker_recognition_tests() { } # Main execution -print_info "Friend-Lite Local Test Runner" +print_info "Chronicle Local Test Runner" print_info "==============================" # Check if we're in the right directory if [ ! -f "CLAUDE.md" ]; then - print_error "Please run this script from the friend-lite root directory" + print_error "Please run this script from the chronicle root directory" exit 1 fi diff --git a/scripts/generate-k8s-configs.py b/scripts/generate-k8s-configs.py index 9b800fff..2eea45aa 100755 --- a/scripts/generate-k8s-configs.py +++ b/scripts/generate-k8s-configs.py @@ -12,7 +12,7 @@ from env_utils import get_resolved_env_vars, classify_secrets -def generate_k8s_manifests(namespace: str = "friend-lite"): +def generate_k8s_manifests(namespace: str = "chronicle"): """Generate Kubernetes ConfigMap and Secret manifests""" print(f"Generating Kubernetes ConfigMap and Secret for namespace {namespace}...") @@ -30,10 +30,10 @@ def generate_k8s_manifests(namespace: str = "friend-lite"): f.write("apiVersion: v1\n") f.write("kind: ConfigMap\n") f.write("metadata:\n") - f.write(f" name: friend-lite-config\n") + f.write(f" name: chronicle-config\n") f.write(f" namespace: {namespace}\n") f.write(" labels:\n") - f.write(" app.kubernetes.io/name: friend-lite\n") + f.write(" app.kubernetes.io/name: chronicle\n") f.write(" app.kubernetes.io/component: config\n") f.write("data:\n") @@ -50,10 +50,10 @@ def generate_k8s_manifests(namespace: str = "friend-lite"): f.write("kind: Secret\n") f.write("type: Opaque\n") f.write("metadata:\n") - f.write(f" name: friend-lite-secrets\n") + f.write(f" name: chronicle-secrets\n") f.write(f" namespace: {namespace}\n") f.write(" labels:\n") - f.write(" app.kubernetes.io/name: friend-lite\n") + f.write(" app.kubernetes.io/name: chronicle\n") f.write(" app.kubernetes.io/component: secrets\n") f.write("data:\n") @@ -74,7 +74,7 @@ def generate_k8s_manifests(namespace: str = "friend-lite"): def main(): """Main entry point""" - namespace = sys.argv[1] if len(sys.argv) > 1 else "friend-lite" + namespace = sys.argv[1] if len(sys.argv) > 1 else "chronicle" generate_k8s_manifests(namespace) if __name__ == "__main__": diff --git a/scripts/k8s/cluster-status.sh b/scripts/k8s/cluster-status.sh index 9733066f..8f3cb644 100644 --- a/scripts/k8s/cluster-status.sh +++ b/scripts/k8s/cluster-status.sh @@ -5,7 +5,7 @@ # # Usage: ./scripts/cluster-status.sh [namespace] # Example: ./scripts/cluster-status.sh -# Example: ./scripts/cluster-status.sh friend-lite +# Example: ./scripts/cluster-status.sh chronicle set -e diff --git a/scripts/k8s/load-env.sh b/scripts/k8s/load-env.sh index 97f873eb..a9ab113f 100644 --- a/scripts/k8s/load-env.sh +++ b/scripts/k8s/load-env.sh @@ -31,7 +31,7 @@ load_config_env() { export SPEAKER_NODE="${SPEAKER_NODE:-}" export CONTAINER_REGISTRY="${CONTAINER_REGISTRY:-localhost:32000}" export INFRASTRUCTURE_NAMESPACE="${INFRASTRUCTURE_NAMESPACE:-root}" - export APPLICATION_NAMESPACE="${APPLICATION_NAMESPACE:-friend-lite}" + export APPLICATION_NAMESPACE="${APPLICATION_NAMESPACE:-chronicle}" export STORAGE_CLASS="${STORAGE_CLASS:-openebs-hostpath}" } diff --git a/scripts/manage-audio-files.sh b/scripts/manage-audio-files.sh index 981d38cf..f02547f1 100755 --- a/scripts/manage-audio-files.sh +++ b/scripts/manage-audio-files.sh @@ -5,7 +5,7 @@ set -e -NAMESPACE="friend-lite" +NAMESPACE="chronicle" POD_NAME="" AUDIO_CHUNKS_DIR="/app/data/audio_chunks" DATA_DIR="/app/data" diff --git a/services.py b/services.py index a3d734d4..ba5fed2f 100755 --- a/services.py +++ b/services.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Service Management +Chronicle Service Management Start, stop, and manage configured services """ @@ -265,7 +265,7 @@ def show_status(): console.print("\n๐Ÿ’ก [dim]Use 'python services.py start --all' to start all configured services[/dim]") def main(): - parser = argparse.ArgumentParser(description="Friend-Lite Service Management") + parser = argparse.ArgumentParser(description="Chronicle Service Management") subparsers = parser.add_subparsers(dest='command', help='Available commands') # Start command diff --git a/skaffold.yaml b/skaffold.yaml index f40d4407..279566ce 100644 --- a/skaffold.yaml +++ b/skaffold.yaml @@ -1,7 +1,7 @@ apiVersion: skaffold/v4beta13 kind: Config metadata: - name: friend-lite + name: chronicle build: tagPolicy: dateTime: @@ -149,7 +149,7 @@ profiles: image.repository: "{{.IMAGE_REPO_advanced_backend}}" image.tag: "{{.IMAGE_TAG_advanced_backend}}" # Override specific Kubernetes-specific values (not in env file) - env.MONGODB_URI: "mongodb://mongodb.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local:27017/friend-lite" + env.MONGODB_URI: "mongodb://mongodb.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local:27017/chronicle" env.QDRANT_BASE_URL: "qdrant.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local" env.REDIS_URL: "redis://redis-master.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local:6379/0" persistence.storageClass: "openebs-hostpath" diff --git a/status.py b/status.py index 1ae9a353..babf6cb7 100644 --- a/status.py +++ b/status.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Health Status Checker +Chronicle Health Status Checker Show runtime health status of all services """ @@ -162,7 +162,7 @@ def get_service_health(service_name: str) -> Dict[str, Any]: def show_quick_status(): """Show quick status overview""" - console.print("\n๐Ÿฅ [bold]Friend-Lite Health Status[/bold]\n") + console.print("\n๐Ÿฅ [bold]Chronicle Health Status[/bold]\n") table = Table(title="Service Status Overview") table.add_column("Service", style="cyan", no_wrap=True) @@ -215,7 +215,7 @@ def show_quick_status(): def show_detailed_status(): """Show detailed status with backend health breakdown""" - console.print("\n๐Ÿฅ [bold]Friend-Lite Detailed Health Status[/bold]\n") + console.print("\n๐Ÿฅ [bold]Chronicle Detailed Health Status[/bold]\n") # Get all service statuses for service_name, service_info in SERVICES.items(): @@ -320,7 +320,7 @@ def show_json_status(): def main(): parser = argparse.ArgumentParser( - description="Friend-Lite Health Status Checker", + description="Chronicle Health Status Checker", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=""" Examples: diff --git a/tests/.env.test b/tests/.env.test index 4317e347..974dcee2 100644 --- a/tests/.env.test +++ b/tests/.env.test @@ -11,4 +11,14 @@ ADMIN_PASSWORD=test-admin-password-123 # Test configuration TEST_TIMEOUT=120 -TEST_DEVICE_NAME=robot-test \ No newline at end of file +TEST_DEVICE_NAME=robot-test + +MEMORY_PROVIDER=chronicle + +# Docker container names (test environment) +BACKEND_CONTAINER=advanced-chronicle-backend-test-1 +WORKERS_CONTAINER=advanced-workers-test-1 +MONGO_CONTAINER=advanced-mongo-test-1 +REDIS_CONTAINER=advanced-redis-test-1 +QDRANT_CONTAINER=advanced-qdrant-test-1 +WEBUI_CONTAINER=advanced-webui-test-1 \ No newline at end of file diff --git a/tests/Makefile b/tests/Makefile index b878b0f3..9acfb9e5 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -1,4 +1,4 @@ -# Friend-Lite Test Makefile +# Chronicle Test Makefile # Shortcuts for running tests .PHONY: help all clean @@ -8,7 +8,7 @@ OUTPUTDIR ?= results TEST_DIR = endpoints integration infrastructure help: - @echo "Friend-Lite Test Targets:" + @echo "Chronicle Test Targets:" @echo "" @echo "Running Tests:" @echo " make all - Run all tests" diff --git a/tests/README.md b/tests/README.md index 1a6c7480..a16a0281 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,6 +1,6 @@ -# Friend-Lite API Tests +# Chronicle API Tests -Comprehensive Robot Framework test suite for the Friend-Lite advanced backend API endpoints. +Comprehensive Robot Framework test suite for the Chronicle advanced backend API endpoints. ## Quick Start @@ -87,7 +87,7 @@ If you already have the backend running, you can use the Makefile directly: ## Running Tests ### Prerequisites -1. Friend-Lite backend running at `http://localhost:8001` (or set `API_URL` in `.env`) +1. Chronicle backend running at `http://localhost:8001` (or set `API_URL` in `.env`) 2. Admin user credentials configured in `.env` 3. Robot Framework and RequestsLibrary installed diff --git a/tests/TESTING_USER_GUIDE.md b/tests/TESTING_USER_GUIDE.md index e2b3b8ad..d1ebbd9d 100644 --- a/tests/TESTING_USER_GUIDE.md +++ b/tests/TESTING_USER_GUIDE.md @@ -1,6 +1,6 @@ # Robot Framework Testing User Guide -A beginner-friendly guide to setting up VSCode for Robot Framework testing, running tests, and creating new tests for the Friend-Lite project. +A beginner-friendly guide to setting up VSCode for Robot Framework testing, running tests, and creating new tests for the Chronicle project. ## Table of Contents - [VSCode Setup](#vscode-setup) diff --git a/tests/browser/browser_auth.robot b/tests/browser/browser_auth.robot index 430f9fcc..90820c71 100644 --- a/tests/browser/browser_auth.robot +++ b/tests/browser/browser_auth.robot @@ -25,7 +25,7 @@ Test Browser Can Access Login Page Fill Text id=password ${ADMIN_PASSWORD} Click button[type="submit"] # Verify that we are logged in by checking for the presence of the dashboard - Get Element text=Friend-Lite Dashboard + Get Element text=Chronicle Dashboard Log Successfully accessed login page and logged in INFO diff --git a/tests/infrastructure/infra_tests.robot b/tests/infrastructure/infra_tests.robot index d10c9476..aa64ecb0 100644 --- a/tests/infrastructure/infra_tests.robot +++ b/tests/infrastructure/infra_tests.robot @@ -26,8 +26,8 @@ Suite Setup Suite Setup Suite Teardown Suite Teardown Test Setup Test Cleanup *** Variables *** -${WORKERS_CONTAINER} advanced-workers-test-1 -${REDIS_CONTAINER} advanced-redis-test-1 +# Container names are now loaded from test_env.py via .env.test +# These local variables can override if needed, but default to env values *** Keywords *** @@ -258,11 +258,13 @@ WebSocket Disconnect Conversation End Reason Test ${device_name}= Set Variable disconnect ${stream_id}= Open Audio Stream device_name=${device_name} - # Send audio fast (no realtime pacing) to simulate disconnect before END signal - Send Audio Chunks To Stream ${stream_id} ${TEST_AUDIO_FILE} num_chunks=100 + # Send enough audio to trigger speech detection (test audio has speech) + # Test audio is 4 minutes long at 16kHz, sending 200 chunks ensures enough speech + Send Audio Chunks To Stream ${stream_id} ${TEST_AUDIO_FILE} num_chunks=200 # Wait for conversation job to be created and conversation_id to be populated - ${conv_jobs}= Wait Until Keyword Succeeds 30s 2s + # Transcription + speech analysis takes time (30-60s with queue) + ${conv_jobs}= Wait Until Keyword Succeeds 60s 3s ... Job Type Exists For Client open_conversation ${device_name} # Wait for conversation_id in job meta (created asynchronously) @@ -287,3 +289,4 @@ WebSocket Disconnect Conversation End Reason Test Should Not Be Equal ${conversation}[completed_at] ${None} [Teardown] Run Keyword And Ignore Error Close Audio Stream ${stream_id} + diff --git a/tests/integration/websocket_streaming_tests.robot b/tests/integration/websocket_streaming_tests.robot index f2375261..6f2c7a9a 100644 --- a/tests/integration/websocket_streaming_tests.robot +++ b/tests/integration/websocket_streaming_tests.robot @@ -211,8 +211,7 @@ Segment Timestamps Match Cropped Audio # Uses default EXPECTED_SEGMENT_TIMES from test_data.py # To use a different dataset: Verify Segments Match Expected Timestamps ${segments} ${EXPECTED_SEGMENT_TIMES_SHORT} # To use custom tolerance: Verify Segments Match Expected Timestamps ${segments} ${EXPECTED_SEGMENT_TIMES} ${tolerance}=1.0 - Verify Segments Match Expected Timestamps ${segments} - + Verify Segments Match Expected Timestamps ${segments} expected_segments=${EXPECTED_SEGMENT_TIMES} Log To Console โœ“ Validated ${segment_count} segments with proper cropped timestamps matching expected data diff --git a/tests/libs/audio_stream_library.py b/tests/libs/audio_stream_library.py index 7c2ddcee..25399175 100644 --- a/tests/libs/audio_stream_library.py +++ b/tests/libs/audio_stream_library.py @@ -103,6 +103,34 @@ def send_audio_chunks( ) +def send_audio_stop_event(stream_id: str) -> None: + """Send audio-stop event without closing the WebSocket connection. + + This is used to test the user_stopped end_reason scenario where + the user manually stops recording but the connection remains open. + """ + session = _manager._sessions.get(stream_id) + if not session: + raise ValueError(f"Stream {stream_id} not found") + + import asyncio + + async def _send_stop(): + try: + await session.client.send_audio_stop() + session.audio_stopped = True + except Exception as e: + session.error = str(e) + raise + + # Run in the stream's event loop + future = asyncio.run_coroutine_threadsafe(_send_stop(), session.loop) + future.result(timeout=5) # Wait for audio-stop to be sent + + if session.error: + raise RuntimeError(f"Failed to send audio-stop: {session.error}") + + def stop_audio_stream(stream_id: str) -> int: """Stop an audio stream and close the connection.""" return _manager.stop_stream(stream_id) diff --git a/tests/resources/transcript_verification.robot b/tests/resources/transcript_verification.robot index 068f63f4..a1965f5b 100644 --- a/tests/resources/transcript_verification.robot +++ b/tests/resources/transcript_verification.robot @@ -254,32 +254,6 @@ Verify Segments Match Expected Timestamps Log All ${actual_count} segments matched expected timestamps within ${tolerance}s tolerance INFO - Verify Transcript Content - [Documentation] Verify transcript contains expected content and quality - [Arguments] ${conversation} ${expected_keywords} ${min_length}=50 - - Dictionary Should Contain Key ${conversation} transcript - ${transcript}= Set Variable ${conversation}[transcript] - Should Not Be Empty ${transcript} - - # Check length - ${transcript_length}= Get Length ${transcript} - Should Be True ${transcript_length} >= ${min_length} Transcript too short: ${transcript_length} - - # Check for expected keywords - ${transcript_lower}= Convert To Lower Case ${transcript} - FOR ${keyword} IN @{expected_keywords} - ${keyword_lower}= Convert To Lower Case ${keyword} - Should Contain ${transcript_lower} ${keyword_lower} Missing keyword: ${keyword} - END - - # Verify segments exist - Dictionary Should Contain Key ${conversation} segments - ${segments}= Set Variable ${conversation}[segments] - ${segment_count}= Get Length ${segments} - Should Be True ${segment_count} > 0 No segments found - - Log Transcript verification passed: ${transcript_length} chars, ${segment_count} segments INFO Verify Transcript Content [Documentation] Verify transcript contains expected content and quality @@ -308,29 +282,4 @@ Verify Transcript Content Log Transcript verification passed: ${transcript_length} chars, ${segment_count} segments INFO - Verify Transcript Content - [Documentation] Verify transcript contains expected content and quality - [Arguments] ${conversation} ${expected_keywords} ${min_length}=50 - - Dictionary Should Contain Key ${conversation} transcript - ${transcript}= Set Variable ${conversation}[transcript] - Should Not Be Empty ${transcript} - - # Check length - ${transcript_length}= Get Length ${transcript} - Should Be True ${transcript_length} >= ${min_length} Transcript too short: ${transcript_length} - - # Check for expected keywords - ${transcript_lower}= Convert To Lower Case ${transcript} - FOR ${keyword} IN @{expected_keywords} - ${keyword_lower}= Convert To Lower Case ${keyword} - Should Contain ${transcript_lower} ${keyword_lower} Missing keyword: ${keyword} - END - - # Verify segments exist - Dictionary Should Contain Key ${conversation} segments - ${segments}= Set Variable ${conversation}[segments] - ${segment_count}= Get Length ${segments} - Should Be True ${segment_count} > 0 No segments found - - Log Transcript verification passed: ${transcript_length} chars, ${segment_count} segments INFO + \ No newline at end of file diff --git a/tests/resources/websocket_keywords.robot b/tests/resources/websocket_keywords.robot index 25b8499c..f1ee54b4 100644 --- a/tests/resources/websocket_keywords.robot +++ b/tests/resources/websocket_keywords.robot @@ -89,6 +89,15 @@ Send Audio Chunks To Stream Log Sent ${chunks_sent} chunks to stream ${stream_id} RETURN ${chunks_sent} +Send Audio Stop Event + [Documentation] Send audio-stop event without closing the WebSocket + ... This simulates a user manually stopping recording + [Arguments] ${stream_id} + + # Call the Python library method directly + Send Audio Stop Event ${stream_id} + Log Sent audio-stop event to stream ${stream_id} + Close Audio Stream [Documentation] Stop an audio stream and close the connection [Arguments] ${stream_id} diff --git a/tests/setup/setup_keywords.robot b/tests/setup/setup_keywords.robot index e3809c1c..3fe7bd17 100644 --- a/tests/setup/setup_keywords.robot +++ b/tests/setup/setup_keywords.robot @@ -106,7 +106,7 @@ Start Docker Services # Clean up any stopped/stuck containers first Run Process docker compose -f ${compose_file} down -v cwd=${working_dir} shell=True - Run Process docker rm -f advanced-mongo-test-1 advanced-redis-test-1 advanced-qdrant-test-1 advanced-friend-backend-test-1 advanced-workers-test-1 shell=True + Run Process docker rm -f ${MONGO_CONTAINER} ${REDIS_CONTAINER} ${QDRANT_CONTAINER} ${BACKEND_CONTAINER} ${WORKERS_CONTAINER} ${WEBUI_CONTAINER} shell=True # Start containers IF ${build} diff --git a/tests/setup/test_env.py b/tests/setup/test_env.py index c250262b..d1f7f03c 100644 --- a/tests/setup/test_env.py +++ b/tests/setup/test_env.py @@ -55,4 +55,12 @@ "retry_count": 3, "retry_delay": 1, "default_timeout": 30 -} \ No newline at end of file +} + +# Docker Container Names (from .env.test) +BACKEND_CONTAINER = os.getenv('BACKEND_CONTAINER', 'advanced-chronicle-backend-test-1') +WORKERS_CONTAINER = os.getenv('WORKERS_CONTAINER', 'advanced-workers-test-1') +MONGO_CONTAINER = os.getenv('MONGO_CONTAINER', 'advanced-mongo-test-1') +REDIS_CONTAINER = os.getenv('REDIS_CONTAINER', 'advanced-redis-test-1') +QDRANT_CONTAINER = os.getenv('QDRANT_CONTAINER', 'advanced-qdrant-test-1') +WEBUI_CONTAINER = os.getenv('WEBUI_CONTAINER', 'advanced-webui-test-1') \ No newline at end of file diff --git a/tests/setup/test_manager_keywords.robot b/tests/setup/test_manager_keywords.robot index a7ad5783..65506551 100644 --- a/tests/setup/test_manager_keywords.robot +++ b/tests/setup/test_manager_keywords.robot @@ -34,14 +34,16 @@ Clear Test Databases Log To Console Clearing test databases and audio files... # Clear MongoDB collections but preserve admin user and fixtures - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.users.deleteMany({'email': {\\$ne:'${ADMIN_EMAIL}'}})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.users.deleteMany({'email': {\\$ne:'${ADMIN_EMAIL}'}})" shell=True - # Clear conversations and audio_chunks except those tagged as fixtures - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.deleteMany({\\$or: [{'is_fixture': {\\$exists: false}}, {'is_fixture': false}]})" shell=True - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.audio_chunks.deleteMany({\\$or: [{'is_fixture': {\\$exists: false}}, {'is_fixture': false}]})" shell=True + # Clear conversations except those tagged as fixtures + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.deleteMany({\\$or: [{'is_fixture': {\\$exists: false}}, {'is_fixture': false}]})" shell=True + + # Clear job references from remaining conversations to prevent "No such job" errors + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.updateMany({}, {\\$unset: {'transcription_job_id': '', 'speaker_job_id': '', 'memory_job_id': ''}})" shell=True # Count fixtures for logging - ${result}= Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.countDocuments({'is_fixture': true})" --quiet shell=True + ${result}= Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.countDocuments({'is_fixture': true})" --quiet shell=True ${fixture_count}= Strip String ${result.stdout} IF '${fixture_count}' != '0' @@ -51,7 +53,7 @@ Clear Test Databases END # Clear admin user's registered_clients dict to prevent client_id counter increments - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.users.updateOne({'email':'${ADMIN_EMAIL}'}, {\\$set: {'registered_clients': {}}})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.users.updateOne({'email':'${ADMIN_EMAIL}'}, {\\$set: {'registered_clients': {}}})" shell=True # Clear Qdrant collections # Note: Fixture memories will be lost here unless we implement Qdrant metadata filtering @@ -65,13 +67,13 @@ Clear Test Databases Log To Console Audio files cleared (fixtures/ subfolder preserved) # Clear container audio files (except fixtures subfolder) - Run Process bash -c docker exec advanced-friend-backend-test-1 find /app/audio_chunks -maxdepth 1 -name "*.wav" -delete || true shell=True - Run Process bash -c docker exec advanced-friend-backend-test-1 find /app/debug_dir -name "*" -type f -delete || true shell=True + Run Process bash -c docker exec ${BACKEND_CONTAINER} find /app/audio_chunks -maxdepth 1 -name "*.wav" -delete || true shell=True + Run Process bash -c docker exec ${BACKEND_CONTAINER} find /app/debug_dir -name "*" -type f -delete || true shell=True # Clear Redis queues and job registries (preserve worker registrations, failed and completed jobs) # Delete all rq:* keys except worker registrations (rq:worker:*), failed jobs (rq:failed:*), and completed jobs (rq:finished:*) ${redis_clear_script}= Set Variable redis-cli --scan --pattern "rq:*" | grep -Ev "^rq:(worker|failed|finished)" | xargs -r redis-cli DEL; redis-cli --scan --pattern "audio:*" | xargs -r redis-cli DEL; redis-cli --scan --pattern "consumer:*" | xargs -r redis-cli DEL - Run Process docker exec advanced-redis-test-1 sh -c ${redis_clear_script} shell=True + Run Process docker exec ${REDIS_CONTAINER} sh -c ${redis_clear_script} shell=True Log To Console Redis queues and job registries cleared (worker registrations preserved) Clear All Test Data @@ -79,9 +81,8 @@ Clear All Test Data Log To Console Clearing ALL test data including admin user and fixtures... # Wipe all MongoDB collections - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.users.deleteMany({})" shell=True - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.deleteMany({})" shell=True - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.audio_chunks.deleteMany({})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.users.deleteMany({})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.deleteMany({})" shell=True Log To Console MongoDB completely cleared # Clear Qdrant @@ -93,7 +94,7 @@ Clear All Test Data Run Process bash -c rm -rf ${EXECDIR}/backends/advanced/data/test_debug_dir/* || true shell=True # Clear all Redis data - Run Process docker exec advanced-redis-test-1 redis-cli FLUSHALL shell=True + Run Process docker exec ${REDIS_CONTAINER} redis-cli FLUSHALL shell=True Log To Console All test data cleared @@ -125,13 +126,9 @@ Create Fixture Conversation Should Not Be Empty ${transcript} Fixture conversation has no transcript # Tag this conversation as a fixture in MongoDB so cleanup preserves it - ${result}= Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.updateOne({'conversation_id': '${conversation_id}'}, {\\$set: {'is_fixture': true}})" shell=True + ${result}= Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.updateOne({'conversation_id': '${conversation_id}'}, {\\$set: {'is_fixture': true}})" shell=True Should Be Equal As Integers ${result.rc} 0 Failed to tag conversation as fixture: ${result.stderr} - # Also tag audio_chunks - ${result2}= Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.audio_chunks.updateMany({'conversation_id': '${conversation_id}'}, {\\$set: {'is_fixture': true}})" shell=True - Should Be Equal As Integers ${result2.rc} 0 Failed to tag audio chunks as fixture: ${result2.stderr} - Log To Console โœ“ Audio files stored in fixtures/ subfolder ${transcript_len}= Get Length ${transcript} diff --git a/tests/tags.md b/tests/tags.md index e41874f5..6ddb6fba 100644 --- a/tests/tags.md +++ b/tests/tags.md @@ -1,10 +1,10 @@ # Robot Framework Test Tags Reference -This document defines the standard tags used across the Friend-Lite test suite. +This document defines the standard tags used across the Chronicle test suite. ## Simplified Tag Set -Friend-Lite uses a **minimal, focused tag set** for test organization. Only 11 tags are permitted. +Chronicle uses a **minimal, focused tag set** for test organization. Only 11 tags are permitted. ## Tag Format diff --git a/wizard.py b/wizard.py index 25ef890f..6e6ad6cb 100755 --- a/wizard.py +++ b/wizard.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Root Setup Orchestrator +Chronicle Root Setup Orchestrator Handles service selection and delegation only - no configuration duplication """ @@ -100,7 +100,7 @@ def check_service_exists(service_name, service_config): def select_services(): """Let user select which services to setup""" - console.print("๐Ÿš€ [bold cyan]Friend-Lite Service Setup[/bold cyan]") + console.print("๐Ÿš€ [bold cyan]Chronicle Service Setup[/bold cyan]") console.print("Select which services to configure:\n") selected = [] @@ -302,7 +302,7 @@ def setup_git_hooks(): def main(): """Main orchestration logic""" - console.print("๐ŸŽ‰ [bold green]Welcome to Friend-Lite![/bold green]\n") + console.print("๐ŸŽ‰ [bold green]Welcome to Chronicle![/bold green]\n") # Setup git hooks first setup_git_hooks() @@ -411,7 +411,7 @@ def main(): console.print("4. Stop services when done:") console.print(" [cyan]uv run --with-requirements setup-requirements.txt python services.py stop --all[/cyan]") - console.print(f"\n๐Ÿš€ [bold]Enjoy Friend-Lite![/bold]") + console.print(f"\n๐Ÿš€ [bold]Enjoy Chronicle![/bold]") # Show individual service usage console.print(f"\n๐Ÿ’ก [dim]Tip: You can also setup services individually:[/dim]") From 88d3df628328226cc1d8b5ec3175b299beb8cfde Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Fri, 5 Dec 2025 00:48:43 +0000 Subject: [PATCH 12/31] Changed name to chronicle # Conflicts: # README-K8S.md # backends/advanced/src/advanced_omi_backend/auth.py # backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py # backends/advanced/webui/package-lock.json # backends/advanced/webui/package.json # quickstart.md # tests/infrastructure/infra_tests.robot # tests/integration/websocket_streaming_tests.robot --- .env.template | 16 +- CLAUDE.md | 26 +-- Docs/features.md | 10 +- Docs/getting-started.md | 32 ++-- Docs/init-system.md | 8 +- Docs/ports-and-access.md | 4 +- Makefile | 36 ++-- README-K8S.md | 42 ++--- README.md | 2 +- app/README.md | 18 +- app/app.json | 10 +- app/app/components/DeviceDetails.tsx | 2 +- app/app/components/DeviceListItem.tsx | 2 +- app/app/hooks/useAudioListener.ts | 2 +- app/app/hooks/useDeviceConnection.ts | 2 +- app/app/hooks/useDeviceScanning.ts | 2 +- app/app/index.tsx | 4 +- app/package.json | 4 +- backends/README.md | 4 +- backends/advanced/Docs/HTTPS_SETUP.md | 10 +- backends/advanced/Docs/README.md | 4 +- backends/advanced/Docs/UI.md | 2 +- backends/advanced/Docs/architecture.md | 6 +- backends/advanced/Docs/auth.md | 4 +- backends/advanced/Docs/memories.md | 8 +- backends/advanced/Docs/quickstart.md | 32 ++-- backends/advanced/docker-compose-test.yml | 114 ++++++------ backends/advanced/docker-compose.yml | 12 +- backends/advanced/init-https.sh | 6 +- backends/advanced/init.py | 24 +-- .../scripts/create_mycelia_api_key.py | 4 +- .../scripts/sync_friendlite_mycelia.py | 74 ++++---- backends/advanced/setup-https.sh | 6 +- .../src/advanced_omi_backend/app_config.py | 6 +- .../src/advanced_omi_backend/app_factory.py | 2 +- .../advanced/src/advanced_omi_backend/auth.py | 10 +- .../src/advanced_omi_backend/chat_service.py | 2 +- .../advanced_omi_backend/clients/__init__.py | 2 +- .../src/advanced_omi_backend/config.py | 2 +- .../controllers/system_controller.py | 10 +- .../controllers/websocket_controller.py | 2 +- .../src/advanced_omi_backend/database.py | 2 +- .../middleware/app_middleware.py | 2 +- .../advanced_omi_backend/models/__init__.py | 4 +- .../advanced_omi_backend/models/audio_file.py | 2 +- .../models/conversation.py | 4 +- .../routers/api_router.py | 2 +- .../routers/modules/__init__.py | 2 +- .../routers/modules/chat_routes.py | 2 +- .../routers/modules/client_routes.py | 2 +- .../routers/modules/conversation_routes.py | 2 +- .../routers/modules/health_routes.py | 24 +-- .../routers/modules/memory_routes.py | 2 +- .../routers/modules/system_routes.py | 2 +- .../routers/modules/user_routes.py | 2 +- .../routers/modules/websocket_routes.py | 2 +- .../advanced_omi_backend/services/__init__.py | 2 +- .../services/memory/__init__.py | 4 +- .../services/memory/config.py | 14 +- .../services/memory/providers/__init__.py | 6 +- .../{friend_lite.py => chronicle.py} | 0 .../services/memory/providers/mcp_client.py | 18 +- .../services/memory/providers/mycelia.py | 8 +- .../memory/providers/openmemory_mcp.py | 22 +-- .../services/memory/service_factory.py | 14 +- .../services/mycelia_sync.py | 58 +++--- .../workers/conversation_jobs.py | 7 +- .../workers/memory_jobs.py | 4 +- backends/advanced/ssl/generate-ssl.sh | 2 +- backends/advanced/start-k8s.sh | 4 +- backends/advanced/start-workers.sh | 2 +- backends/advanced/start.sh | 4 +- .../tests/test_conversation_models.py | 12 +- backends/advanced/tests/test_integration.py | 4 +- backends/advanced/upload_files.py | 6 +- backends/advanced/webui/README.md | 4 +- backends/advanced/webui/package-lock.json | 174 +++++++++--------- backends/advanced/webui/package.json | 4 +- .../webui/src/components/layout/Layout.tsx | 4 +- .../advanced/webui/src/pages/LoginPage.tsx | 2 +- .../advanced/webui/src/pages/Memories.tsx | 4 +- .../webui/src/pages/MemoriesRouter.tsx | 4 +- backends/advanced/webui/src/pages/System.tsx | 2 +- backends/advanced/webui/tsconfig.json | 1 + backends/charts/advanced-backend/Chart.yaml | 4 +- .../templates/deployment.yaml | 8 +- .../templates/workers-deployment.yaml | 4 +- backends/charts/advanced-backend/values.yaml | 4 +- backends/charts/webui/Chart.yaml | 4 +- .../charts/webui/templates/deployment.yaml | 4 +- backends/charts/webui/values.yaml | 2 +- extras/asr-services/README.md | 4 +- extras/asr-services/quickstart.md | 6 +- .../tests/test_parakeet_service.py | 2 +- extras/havpe-relay/README.md | 2 +- extras/havpe-relay/main.py | 2 +- extras/local-omi-bt/connect-omi.py | 6 +- extras/openmemory-mcp/README.md | 38 ++-- extras/openmemory-mcp/run.sh | 8 +- extras/openmemory-mcp/test_standalone.py | 2 +- extras/speaker-omni-experimental/README.md | 6 +- .../charts/templates/speaker-deployment.yaml | 2 +- .../charts/templates/webui-deployment.yaml | 4 +- extras/speaker-recognition/charts/values.yaml | 2 +- extras/speaker-recognition/init.py | 2 +- extras/speaker-recognition/quickstart.md | 2 +- .../simple_speaker_recognition/__init__.py | 2 +- .../speaker-recognition/ssl/generate-ssl.sh | 2 +- .../tests/test_speaker_service_integration.py | 2 +- k8s-manifests/cross-namespace-rbac.yaml | 8 +- quickstart.md | 30 +-- run-test.sh | 6 +- scripts/generate-k8s-configs.py | 12 +- scripts/k8s/cluster-status.sh | 2 +- scripts/k8s/load-env.sh | 2 +- scripts/manage-audio-files.sh | 2 +- services.py | 4 +- skaffold.yaml | 4 +- status.py | 8 +- tests/.env.test | 12 +- tests/Makefile | 4 +- tests/README.md | 6 +- tests/TESTING_USER_GUIDE.md | 2 +- tests/browser/browser_auth.robot | 2 +- tests/infrastructure/infra_tests.robot | 4 +- tests/libs/audio_stream_library.py | 28 +++ tests/resources/websocket_keywords.robot | 9 + tests/setup/setup_keywords.robot | 2 +- tests/setup/test_env.py | 2 +- tests/setup/test_manager_keywords.robot | 33 ++-- tests/tags.md | 4 +- wizard.py | 8 +- 132 files changed, 696 insertions(+), 636 deletions(-) rename backends/advanced/src/advanced_omi_backend/services/memory/providers/{friend_lite.py => chronicle.py} (100%) diff --git a/.env.template b/.env.template index 97495493..328d3301 100644 --- a/.env.template +++ b/.env.template @@ -1,7 +1,7 @@ # ======================================== -# FRIEND-LITE MASTER CONFIGURATION +# CHRONICLE MASTER CONFIGURATION # ======================================== -# This is the master configuration template for the entire Friend-Lite project. +# This is the master configuration template for the entire Chronicle project. # Copy this file to .env and customize values, then run 'make config' to generate # all service-specific configuration files. @@ -11,7 +11,7 @@ # Infrastructure namespaces INFRASTRUCTURE_NAMESPACE=infrastructure -APPLICATION_NAMESPACE=friend-lite +APPLICATION_NAMESPACE=chronicle # Deployment mode: docker-compose, kubernetes, or distributed DEPLOYMENT_MODE=docker-compose @@ -24,7 +24,7 @@ CONTAINER_REGISTRY=localhost:32000 # ======================================== # Primary domain/IP for all services -# Examples: localhost, 192.168.1.100, friend-lite.example.com, 100.x.x.x (Tailscale) +# Examples: localhost, 192.168.1.100, chronicle.example.com, 100.x.x.x (Tailscale) DOMAIN=localhost # Service ports (Docker Compose mode) @@ -105,7 +105,7 @@ PARAKEET_ASR_URL=http://host.docker.internal:8767 # MongoDB configuration MONGODB_URI=mongodb://mongo:${MONGODB_PORT} -MONGODB_K8S_URI=mongodb://mongodb.${INFRASTRUCTURE_NAMESPACE}.svc.cluster.local:27017/friend-lite +MONGODB_K8S_URI=mongodb://mongodb.${INFRASTRUCTURE_NAMESPACE}.svc.cluster.local:27017/chronicle # Qdrant configuration QDRANT_BASE_URL=qdrant @@ -120,12 +120,12 @@ NEO4J_PASSWORD=neo4j-password # MEMORY PROVIDER CONFIGURATION # ======================================== -# Memory Provider: friend_lite or openmemory_mcp -MEMORY_PROVIDER=friend_lite +# Memory Provider: chronicle or openmemory_mcp +MEMORY_PROVIDER=chronicle # OpenMemory MCP configuration (when MEMORY_PROVIDER=openmemory_mcp) OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -OPENMEMORY_CLIENT_NAME=friend_lite +OPENMEMORY_CLIENT_NAME=chronicle OPENMEMORY_USER_ID=openmemory OPENMEMORY_TIMEOUT=30 diff --git a/CLAUDE.md b/CLAUDE.md index 0f579d33..ec326b6d 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -4,7 +4,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co ## Project Overview -Friend-Lite is at the core an AI-powered personal system - various devices, including but not limited to wearables from OMI can be used for at the very least audio capture, speaker specific transcription, memory extraction and retrieval. +Chronicle is at the core an AI-powered personal system - various devices, including but not limited to wearables from OMI can be used for at the very least audio capture, speaker specific transcription, memory extraction and retrieval. On top of that - it is being designed to support other services, that can help a user with these inputs such as reminders, action items, personal diagnosis etc. This supports a comprehensive web dashboard for management. @@ -147,7 +147,7 @@ docker compose up --build - **Job Tracker**: Tracks pipeline jobs with stage events (audio โ†’ transcription โ†’ memory) and completion status - **Task Management**: BackgroundTaskManager tracks all async tasks to prevent orphaned processes - **Unified Transcription**: Deepgram/Mistral transcription with fallback to offline ASR services -- **Memory System**: Pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Memory System**: Pluggable providers (Chronicle native or OpenMemory MCP) - **Authentication**: Email-based login with MongoDB ObjectId user system - **Client Management**: Auto-generated client IDs as `{user_id_suffix}-{device_name}`, centralized ClientManager - **Data Storage**: MongoDB (`audio_chunks` collection for conversations), vector storage (Qdrant or OpenMemory) @@ -161,7 +161,7 @@ Required: - LLM Service: Memory extraction and action items (OpenAI or Ollama) Recommended: - - Vector Storage: Qdrant (Friend-Lite provider) or OpenMemory MCP server + - Vector Storage: Qdrant (Chronicle provider) or OpenMemory MCP server - Transcription: Deepgram, Mistral, or offline ASR services Optional: @@ -179,8 +179,8 @@ Optional: 4. **Speech-Driven Conversation Creation**: User-facing conversations only created when speech is detected 5. **Dual Storage System**: Audio sessions always stored in `audio_chunks`, conversations created in `conversations` collection only with speech 6. **Versioned Processing**: Transcript and memory versions tracked with active version pointers -7. **Memory Processing**: Pluggable providers (Friend-Lite native with individual facts or OpenMemory MCP delegation) -8. **Memory Storage**: Direct Qdrant (Friend-Lite) or OpenMemory server (MCP provider) +7. **Memory Processing**: Pluggable providers (Chronicle native with individual facts or OpenMemory MCP delegation) +8. **Memory Storage**: Direct Qdrant (Chronicle) or OpenMemory server (MCP provider) 9. **Action Items**: Automatic task detection with "Simon says" trigger phrases 10. **Audio Optimization**: Speech segment extraction removes silence automatically 11. **Task Tracking**: BackgroundTaskManager ensures proper cleanup of all async operations @@ -230,11 +230,11 @@ DEEPGRAM_API_KEY=your-deepgram-key-here # Optional: TRANSCRIPTION_PROVIDER=deepgram # Memory Provider -MEMORY_PROVIDER=friend_lite # or openmemory_mcp +MEMORY_PROVIDER=chronicle # or openmemory_mcp # Database MONGODB_URI=mongodb://mongo:27017 -# Database name: friend-lite +# Database name: chronicle QDRANT_BASE_URL=qdrant # Network Configuration @@ -246,12 +246,12 @@ CORS_ORIGINS=http://localhost:3000,http://localhost:5173 ### Memory Provider Configuration -Friend-Lite supports two pluggable memory backends: +Chronicle supports two pluggable memory backends: -#### Friend-Lite Memory Provider (Default) +#### Chronicle Memory Provider (Default) ```bash -# Use Friend-Lite memory provider (default) -MEMORY_PROVIDER=friend_lite +# Use Chronicle memory provider (default) +MEMORY_PROVIDER=chronicle # LLM Configuration for memory extraction LLM_PROVIDER=openai @@ -269,7 +269,7 @@ MEMORY_PROVIDER=openmemory_mcp # OpenMemory MCP Server Configuration OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -OPENMEMORY_CLIENT_NAME=friend_lite +OPENMEMORY_CLIENT_NAME=chronicle OPENMEMORY_USER_ID=openmemory OPENMEMORY_TIMEOUT=30 @@ -279,7 +279,7 @@ OPENAI_API_KEY=your-openai-key-here ### Transcription Provider Configuration -Friend-Lite supports multiple transcription services: +Chronicle supports multiple transcription services: ```bash # Option 1: Deepgram (High quality, recommended) diff --git a/Docs/features.md b/Docs/features.md index 25c5671c..57e3413f 100644 --- a/Docs/features.md +++ b/Docs/features.md @@ -1,11 +1,11 @@ -# Friend-Lite Features & Architecture +# Chronicle Features & Architecture ## Core Features -Friend-Lite supports AI-powered personal systems through multiple OMI-compatible audio devices: +Chronicle supports AI-powered personal systems through multiple OMI-compatible audio devices: **Memory System:** -- **Advanced memory system** with pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Advanced memory system** with pluggable providers (Chronicle native or OpenMemory MCP) - **Memory extraction** from conversations with individual fact storage - **Semantic memory search** with relevance threshold filtering and live results - **Memory count display** with total count tracking from native providers @@ -38,7 +38,7 @@ DevKit2 streams audio via Bluetooth using OPUS codec. The processing pipeline in **AI Processing:** - LLM-based conversation analysis (OpenAI or local Ollama) -- **Dual memory system**: Friend-Lite native or OpenMemory MCP integration +- **Dual memory system**: Chronicle native or OpenMemory MCP integration - Enhanced memory extraction with individual fact storage - **Semantic search** with relevance scoring and threshold filtering - Smart deduplication and memory updates (ADD/UPDATE/DELETE) @@ -87,7 +87,7 @@ Choose one based on your needs: **Features:** - Audio processing pipeline with real-time WebSocket support -- **Pluggable memory system**: Choose between Friend-Lite native or OpenMemory MCP +- **Pluggable memory system**: Choose between Chronicle native or OpenMemory MCP - Enhanced memory extraction with individual fact storage (no generic fallbacks) - **Semantic memory search** with relevance threshold filtering and total count display - **Speaker-based memory filtering**: Optional control over processing based on participant presence diff --git a/Docs/getting-started.md b/Docs/getting-started.md index 2f647b7b..6483f00f 100644 --- a/Docs/getting-started.md +++ b/Docs/getting-started.md @@ -1,16 +1,16 @@ # Getting Started -# Friend-Lite Backend Quickstart Guide +# Chronicle Backend Quickstart Guide -> ๐Ÿ“– **New to friend-lite?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. +> ๐Ÿ“– **New to chronicle?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. ## Overview -Friend-Lite is an eco-system of services to support "AI wearable" agents/functionality. +Chronicle is an eco-system of services to support "AI wearable" agents/functionality. At the moment, the basic functionalities are: - Audio capture (via WebSocket, from OMI device, files, or a laptop) - Audio transcription -- **Advanced memory system** with pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Advanced memory system** with pluggable providers (Chronicle native or OpenMemory MCP) - **Enhanced memory extraction** with individual fact storage and smart updates - **Semantic memory search** with relevance threshold filtering and live results - Action item extraction @@ -38,13 +38,13 @@ cd backends/advanced - **Authentication**: Admin email/password setup - **Transcription Provider**: Choose Deepgram, Mistral, or Offline (Parakeet) - **LLM Provider**: Choose OpenAI or Ollama for memory extraction -- **Memory Provider**: Choose Friend-Lite Native or OpenMemory MCP +- **Memory Provider**: Choose Chronicle Native or OpenMemory MCP - **Optional Services**: Speaker Recognition and other extras - **Network Configuration**: Ports and host settings **Example flow:** ``` -๐Ÿš€ Friend-Lite Interactive Setup +๐Ÿš€ Chronicle Interactive Setup =============================================== โ–บ Authentication Setup @@ -126,13 +126,13 @@ ADMIN_EMAIL=admin@example.com **Memory Provider Configuration:** ```bash # Memory Provider (Choose One) -# Option 1: Friend-Lite Native (Default - Recommended) -MEMORY_PROVIDER=friend_lite +# Option 1: Chronicle Native (Default - Recommended) +MEMORY_PROVIDER=chronicle # Option 2: OpenMemory MCP (Cross-client compatibility) # MEMORY_PROVIDER=openmemory_mcp # OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -# OPENMEMORY_CLIENT_NAME=friend_lite +# OPENMEMORY_CLIENT_NAME=chronicle # OPENMEMORY_USER_ID=openmemory ``` @@ -325,8 +325,8 @@ curl -X POST "http://localhost:8000/api/process-audio-files" \ ### Memory & Intelligence #### Pluggable Memory System -- **Two memory providers**: Choose between Friend-Lite native or OpenMemory MCP -- **Friend-Lite Provider**: Full control with custom extraction, individual fact storage, smart deduplication +- **Two memory providers**: Choose between Chronicle native or OpenMemory MCP +- **Chronicle Provider**: Full control with custom extraction, individual fact storage, smart deduplication - **OpenMemory MCP Provider**: Cross-client compatibility (Claude Desktop, Cursor, Windsurf), professional processing #### Enhanced Memory Processing @@ -482,7 +482,7 @@ tailscale ip -4 ## Data Architecture -The friend-lite backend uses a **user-centric data architecture**: +The chronicle backend uses a **user-centric data architecture**: - **All memories are keyed by database user_id** (not client_id) - **Client information is stored in metadata** for reference and debugging @@ -495,12 +495,12 @@ For detailed information, see [User Data Architecture](user-data-architecture.md ### Choosing a Memory Provider -Friend-Lite offers two memory backends: +Chronicle offers two memory backends: -#### 1. Friend-Lite Native +#### 1. Chronicle Native ```bash # In your .env file -MEMORY_PROVIDER=friend_lite +MEMORY_PROVIDER=chronicle LLM_PROVIDER=openai OPENAI_API_KEY=your-openai-key-here ``` @@ -519,7 +519,7 @@ OPENAI_API_KEY=your-openai-key-here cd extras/openmemory-mcp docker compose up -d -# Then configure Friend-Lite +# Then configure Chronicle MEMORY_PROVIDER=openmemory_mcp OPENMEMORY_MCP_URL=http://host.docker.internal:8765 ``` diff --git a/Docs/init-system.md b/Docs/init-system.md index fb9c1763..98d7c49a 100644 --- a/Docs/init-system.md +++ b/Docs/init-system.md @@ -1,4 +1,4 @@ -# Friend-Lite Initialization System +# Chronicle Initialization System ## Quick Links @@ -10,14 +10,14 @@ ## Overview -Friend-Lite uses a unified initialization system with clean separation of concerns: +Chronicle uses a unified initialization system with clean separation of concerns: - **Configuration** (`wizard.py`) - Set up service configurations, API keys, and .env files - **Service Management** (`services.py`) - Start, stop, and manage running services The root orchestrator handles service selection and delegates configuration to individual service scripts. In general, setup scripts only configure and do not start services automatically. Exceptions: `extras/asr-services` and `extras/openmemory-mcp` are startup scripts. This prevents unnecessary resource usage and gives you control over when services actually run. -> **New to Friend-Lite?** Most users should start with the [Quick Start Guide](../quickstart.md) instead of this detailed reference. +> **New to Chronicle?** Most users should start with the [Quick Start Guide](../quickstart.md) instead of this detailed reference. ## Architecture @@ -133,7 +133,7 @@ Services use `host.docker.internal` for inter-container communication: ## Service Management -Friend-Lite now separates **configuration** from **service lifecycle management**: +Chronicle now separates **configuration** from **service lifecycle management**: ### Unified Service Management Use the `services.py` script for all service operations: diff --git a/Docs/ports-and-access.md b/Docs/ports-and-access.md index f93137b7..67c0fd28 100644 --- a/Docs/ports-and-access.md +++ b/Docs/ports-and-access.md @@ -1,11 +1,11 @@ -# Friend-Lite Port Configuration & User Journey +# Chronicle Port Configuration & User Journey ## User Journey: Git Clone to Running Services ### 1. Clone & Setup ```bash git clone -cd friend-lite +cd chronicle # Configure all services uv run --with-requirements setup-requirements.txt python init.py diff --git a/Makefile b/Makefile index 3d03a180..9c4dca6a 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ # ======================================== -# Friend-Lite Management System +# Chronicle Management System # ======================================== -# Central management interface for Friend-Lite project +# Central management interface for Chronicle project # Handles configuration, deployment, and maintenance tasks # Load environment variables from .env file @@ -25,7 +25,7 @@ K8S_SCRIPTS_DIR := $(SCRIPTS_DIR)/k8s .DEFAULT_GOAL := menu menu: ## Show interactive menu (default) - @echo "๐ŸŽฏ Friend-Lite Management System" + @echo "๐ŸŽฏ Chronicle Management System" @echo "================================" @echo @echo "๐Ÿ“‹ Quick Actions:" @@ -59,7 +59,7 @@ menu: ## Show interactive menu (default) @echo @echo "๐Ÿ”„ Mycelia Sync:" @echo " mycelia-sync-status ๐Ÿ“Š Show Mycelia OAuth sync status" - @echo " mycelia-sync-all ๐Ÿ”„ Sync all Friend-Lite users to Mycelia" + @echo " mycelia-sync-all ๐Ÿ”„ Sync all Chronicle users to Mycelia" @echo " mycelia-sync-user ๐Ÿ‘ค Sync specific user (EMAIL=user@example.com)" @echo " mycelia-check-orphans ๐Ÿ” Find orphaned Mycelia objects" @echo " mycelia-reassign-orphans โ™ป๏ธ Reassign orphans (EMAIL=admin@example.com)" @@ -75,7 +75,7 @@ menu: ## Show interactive menu (default) @echo "๐Ÿ’ก Tip: Run 'make help' for detailed help on any target" help: ## Show detailed help for all targets - @echo "๐ŸŽฏ Friend-Lite Management System - Detailed Help" + @echo "๐ŸŽฏ Chronicle Management System - Detailed Help" @echo "================================================" @echo @echo "๐Ÿ—๏ธ KUBERNETES SETUP:" @@ -110,9 +110,9 @@ help: ## Show detailed help for all targets @echo @echo "๐Ÿ”„ MYCELIA SYNC:" @echo " mycelia-sync-status Show Mycelia OAuth sync status for all users" - @echo " mycelia-sync-all Sync all Friend-Lite users to Mycelia OAuth" + @echo " mycelia-sync-all Sync all Chronicle users to Mycelia OAuth" @echo " mycelia-sync-user Sync specific user (EMAIL=user@example.com)" - @echo " mycelia-check-orphans Find Mycelia objects without Friend-Lite owner" + @echo " mycelia-check-orphans Find Mycelia objects without Chronicle owner" @echo " mycelia-reassign-orphans Reassign orphaned objects (EMAIL=admin@example.com)" @echo @echo "๐Ÿงช ROBOT FRAMEWORK TESTING:" @@ -158,7 +158,7 @@ setup-dev: ## Setup development environment (git hooks, pre-commit) setup-k8s: ## Initial Kubernetes setup (registry + infrastructure) @echo "๐Ÿ—๏ธ Starting Kubernetes initial setup..." - @echo "This will set up the complete infrastructure for Friend-Lite" + @echo "This will set up the complete infrastructure for Chronicle" @echo @echo "๐Ÿ“‹ Setup includes:" @echo " โ€ข Insecure registry configuration" @@ -230,10 +230,10 @@ config-k8s: ## Generate Kubernetes configuration files (ConfigMap/Secret only - @kubectl apply -f k8s-manifests/configmap.yaml -n $(APPLICATION_NAMESPACE) 2>/dev/null || echo "โš ๏ธ ConfigMap not applied (cluster not available?)" @kubectl apply -f k8s-manifests/secrets.yaml -n $(APPLICATION_NAMESPACE) 2>/dev/null || echo "โš ๏ธ Secret not applied (cluster not available?)" @echo "๐Ÿ“ฆ Copying ConfigMap and Secret to speech namespace..." - @kubectl get configmap friend-lite-config -n $(APPLICATION_NAMESPACE) -o yaml | \ + @kubectl get configmap chronicle-config -n $(APPLICATION_NAMESPACE) -o yaml | \ sed -e '/namespace:/d' -e '/resourceVersion:/d' -e '/uid:/d' -e '/creationTimestamp:/d' | \ kubectl apply -n speech -f - 2>/dev/null || echo "โš ๏ธ ConfigMap not copied to speech namespace" - @kubectl get secret friend-lite-secrets -n $(APPLICATION_NAMESPACE) -o yaml | \ + @kubectl get secret chronicle-secrets -n $(APPLICATION_NAMESPACE) -o yaml | \ sed -e '/namespace:/d' -e '/resourceVersion:/d' -e '/uid:/d' -e '/creationTimestamp:/d' | \ kubectl apply -n speech -f - 2>/dev/null || echo "โš ๏ธ Secret not copied to speech namespace" @echo "โœ… Kubernetes configuration files generated" @@ -353,13 +353,13 @@ audio-manage: ## Interactive audio file management mycelia-sync-status: ## Show Mycelia OAuth sync status for all users @echo "๐Ÿ“Š Checking Mycelia OAuth sync status..." - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --status + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --status -mycelia-sync-all: ## Sync all Friend-Lite users to Mycelia OAuth - @echo "๐Ÿ”„ Syncing all Friend-Lite users to Mycelia OAuth..." +mycelia-sync-all: ## Sync all Chronicle users to Mycelia OAuth + @echo "๐Ÿ”„ Syncing all Chronicle users to Mycelia OAuth..." @echo "โš ๏ธ This will create OAuth credentials for users without them" @read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1 - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --sync-all + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --sync-all mycelia-sync-user: ## Sync specific user to Mycelia OAuth (usage: make mycelia-sync-user EMAIL=user@example.com) @echo "๐Ÿ‘ค Syncing specific user to Mycelia OAuth..." @@ -367,11 +367,11 @@ mycelia-sync-user: ## Sync specific user to Mycelia OAuth (usage: make mycelia-s echo "โŒ EMAIL parameter is required. Usage: make mycelia-sync-user EMAIL=user@example.com"; \ exit 1; \ fi - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --email $(EMAIL) + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --email $(EMAIL) -mycelia-check-orphans: ## Find Mycelia objects without Friend-Lite owner +mycelia-check-orphans: ## Find Mycelia objects without Chronicle owner @echo "๐Ÿ” Checking for orphaned Mycelia objects..." - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --check-orphans + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --check-orphans mycelia-reassign-orphans: ## Reassign orphaned objects to user (usage: make mycelia-reassign-orphans EMAIL=admin@example.com) @echo "โ™ป๏ธ Reassigning orphaned Mycelia objects..." @@ -381,7 +381,7 @@ mycelia-reassign-orphans: ## Reassign orphaned objects to user (usage: make myce fi @echo "โš ๏ธ This will reassign all orphaned objects to: $(EMAIL)" @read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1 - @cd backends/advanced && uv run python scripts/sync_friendlite_mycelia.py --reassign-orphans --target-email $(EMAIL) + @cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --reassign-orphans --target-email $(EMAIL) # ======================================== # TESTING TARGETS diff --git a/README-K8S.md b/README-K8S.md index 3e282a33..0e8358c1 100644 --- a/README-K8S.md +++ b/README-K8S.md @@ -1,6 +1,6 @@ -# Friend-Lite Kubernetes Setup Guide +# Chronicle Kubernetes Setup Guide -This guide walks you through setting up Friend-Lite from scratch on a fresh Ubuntu system, including MicroK8s installation, Docker registry configuration, and deployment via Skaffold. +This guide walks you through setting up Chronicle from scratch on a fresh Ubuntu system, including MicroK8s installation, Docker registry configuration, and deployment via Skaffold. ## System Architecture @@ -245,7 +245,7 @@ This guide walks you through setting up Friend-Lite from scratch on a fresh Ubun ### **Directory Structure** ``` -friend-lite/ +chronicle/ โ”œโ”€โ”€ scripts/ # Kubernetes deployment and management scripts โ”‚ โ”œโ”€โ”€ deploy-all-services.sh # Deploy all services โ”‚ โ”œโ”€โ”€ cluster-status.sh # Check cluster health @@ -267,7 +267,7 @@ friend-lite/ 1. **Clone Repository** ```bash # Clone Friend-Lite repository with submodules - git clone --recursive https://github.com/yourusername/friend-lite.git + git clone --recursive https://github.com/chronicle-ai/chronicle.git cd friend-lite # If you already cloned without --recursive, initialize submodules: @@ -392,7 +392,7 @@ The following scripts are available in the `scripts/` folder to simplify common ./scripts/cluster-status.sh # Check status of specific namespace -./scripts/cluster-status.sh friend-lite +./scripts/cluster-status.sh chronicle ``` ### **Setup Scripts** @@ -487,14 +487,14 @@ This directory contains standalone Kubernetes manifests that are not managed by 3. **Verify Deployment** ```bash # Check all resources - kubectl get all -n friend-lite + kubectl get all -n chronicle kubectl get all -n root # Check Ingress - kubectl get ingress -n friend-lite + kubectl get ingress -n chronicle # Check services - kubectl get svc -n friend-lite + kubectl get svc -n chronicle ``` ## Multi-Node Cluster Management @@ -635,14 +635,14 @@ spec: 1. **Check Application Health** ```bash # Check backend health - curl -k https://friend-lite.192-168-1-42.nip.io:32623/health + curl -k https://chronicle.192-168-1-42.nip.io:32623/health # Check WebUI - curl -k https://friend-lite.192-168-1-42.nip.io:32623/ + curl -k https://chronicle.192-168-1-42.nip.io:32623/ ``` 2. **Access WebUI** - - Open browser to: `https://friend-lite.192-168-1-42.nip.io:32623/` + - Open browser to: `https://chronicle.192-168-1-42.nip.io:32623/` - Accept self-signed certificate warning - Create admin user account - Test audio recording functionality @@ -681,7 +681,7 @@ spec: kubectl get pods -n ingress-nginx # Check Ingress configuration (run on build machine) - kubectl describe ingress -n friend-lite + kubectl describe ingress -n chronicle ``` 4. **Build Issues** @@ -734,20 +734,20 @@ spec: ```bash # View logs (run on build machine) -kubectl logs -n friend-lite deployment/advanced-backend -kubectl logs -n friend-lite deployment/webui +kubectl logs -n chronicle deployment/advanced-backend +kubectl logs -n chronicle deployment/webui # Port forward for debugging (run on build machine) -kubectl port-forward -n friend-lite svc/advanced-backend 8000:8000 -kubectl port-forward -n friend-lite svc/webui 8080:80 +kubectl port-forward -n chronicle svc/advanced-backend 8000:8000 +kubectl port-forward -n chronicle svc/webui 8080:80 # Check resource usage (run on build machine) -kubectl top pods -n friend-lite +kubectl top pods -n chronicle kubectl top nodes # Restart deployments (run on build machine) -kubectl rollout restart deployment/advanced-backend -n friend-lite -kubectl rollout restart deployment/webui -n friend-lite +kubectl rollout restart deployment/advanced-backend -n chronicle +kubectl rollout restart deployment/webui -n chronicle ``` ## Maintenance @@ -778,7 +778,7 @@ kubectl rollout restart deployment/webui -n friend-lite cp skaffold.env skaffold.env.backup # Backup Kubernetes manifests (run on build machine) - kubectl get all -n friend-lite -o yaml > friend-lite-backup.yaml + kubectl get all -n chronicle -o yaml > chronicle-backup.yaml kubectl get all -n root -o yaml > infrastructure-backup.yaml ``` @@ -796,7 +796,7 @@ chmod +x init.sh ./init.sh ``` -This will guide you through setting up Friend-Lite using Docker Compose instead of Kubernetes. +This will guide you through setting up Chronicle using Docker Compose instead of Kubernetes. ## Speaker Recognition Deployment diff --git a/README.md b/README.md index 0a43076b..34027891 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Friend-Lite +# Chronicle Self-hostable AI system that captures audio/video data from OMI devices and other sources to generate memories, action items, and contextual insights about your conversations and daily interactions. diff --git a/app/README.md b/app/README.md index 6d3272f9..d73dd748 100644 --- a/app/README.md +++ b/app/README.md @@ -1,6 +1,6 @@ -# Friend-Lite Mobile App +# Chronicle Mobile App -React Native mobile application for connecting OMI devices and streaming audio to Friend-Lite backends. Supports cross-platform deployment on iOS and Android with Bluetooth integration. +React Native mobile application for connecting OMI devices and streaming audio to Chronicle backends. Supports cross-platform deployment on iOS and Android with Bluetooth integration. ## Features @@ -64,7 +64,7 @@ npx expo prebuild --clean cd ios && pod install && cd .. # Open in Xcode -open ios/friendlite.xcworkspace +open ios/chronicle.xcworkspace ``` Build and run from Xcode interface. @@ -154,7 +154,7 @@ Backend URL: wss://[ngrok-subdomain].ngrok.io/ws_pcm ## Phone Audio Streaming (NEW) ### Overview -Stream audio directly from your phone's microphone to Friend-Lite backend, bypassing Bluetooth devices. This feature provides a direct audio input method for users who want to use their phone as the audio source. +Stream audio directly from your phone's microphone to Chronicle backend, bypassing Bluetooth devices. This feature provides a direct audio input method for users who want to use their phone as the audio source. ### Features - **Direct Microphone Access**: Use phone's built-in microphone @@ -166,7 +166,7 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas ### Setup & Usage #### Enable Phone Audio Streaming -1. **Open Friend-Lite app** +1. **Open Chronicle app** 2. **Configure Backend Connection** (see Backend Configuration section) 3. **Grant Microphone Permissions** when prompted 4. **Tap "Stream Phone Audio" button** in main interface @@ -175,7 +175,7 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas #### Requirements - **iOS**: iOS 13+ with microphone permissions - **Android**: Android API 21+ with microphone permissions -- **Network**: Stable connection to Friend-Lite backend +- **Network**: Stable connection to Chronicle backend - **Backend**: Advanced backend running with `/ws_pcm` endpoint #### Switching Audio Sources @@ -197,8 +197,8 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas - **Restart Recording**: Stop and restart phone audio streaming #### Permission Issues -- **iOS**: Settings > Privacy & Security > Microphone > Friend-Lite -- **Android**: Settings > Apps > Friend-Lite > Permissions > Microphone +- **iOS**: Settings > Privacy & Security > Microphone > Chronicle +- **Android**: Settings > Apps > Chronicle > Permissions > Microphone #### No Audio Level Visualization - **Restart App**: Close and reopen the application @@ -210,7 +210,7 @@ Stream audio directly from your phone's microphone to Friend-Lite backend, bypas ### Device Connection 1. **Enable Bluetooth** on your mobile device -2. **Open Friend-Lite app** +2. **Open Chronicle app** 3. **Pair OMI device**: - Go to Device Settings - Scan for nearby OMI devices diff --git a/app/app.json b/app/app.json index 9acdac77..c2446e12 100644 --- a/app/app.json +++ b/app/app.json @@ -1,7 +1,7 @@ { "expo": { - "name": "friend-lite-app", - "slug": "friend-lite-app", + "name": "chronicle-app", + "slug": "chronicle-app", "version": "1.0.0", "orientation": "portrait", "icon": "./assets/icon.png", @@ -17,9 +17,9 @@ ], "ios": { "supportsTablet": true, - "bundleIdentifier": "com.cupbearer5517.friendlite", + "bundleIdentifier": "com.cupbearer5517.chronicle", "infoPlist": { - "NSMicrophoneUsageDescription": "Friend-Lite needs access to your microphone to stream audio to the backend for processing." + "NSMicrophoneUsageDescription": "Chronicle needs access to your microphone to stream audio to the backend for processing." } }, "android": { @@ -27,7 +27,7 @@ "foregroundImage": "./assets/adaptive-icon.png", "backgroundColor": "#ffffff" }, - "package": "com.cupbearer5517.friendlite", + "package": "com.cupbearer5517.chronicle", "permissions": [ "android.permission.BLUETOOTH", "android.permission.BLUETOOTH_ADMIN", diff --git a/app/app/components/DeviceDetails.tsx b/app/app/components/DeviceDetails.tsx index ebf204c3..3bd22b4a 100644 --- a/app/app/components/DeviceDetails.tsx +++ b/app/app/components/DeviceDetails.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet, TextInput } from 'react-native'; -import { BleAudioCodec } from 'friend-lite-react-native'; +import { BleAudioCodec } from 'chronicle-react-native'; interface DeviceDetailsProps { // Device Info diff --git a/app/app/components/DeviceListItem.tsx b/app/app/components/DeviceListItem.tsx index a8083035..3da559de 100644 --- a/app/app/components/DeviceListItem.tsx +++ b/app/app/components/DeviceListItem.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet } from 'react-native'; -import { OmiDevice } from 'friend-lite-react-native'; +import { OmiDevice } from 'chronicle-react-native'; interface DeviceListItemProps { device: OmiDevice; diff --git a/app/app/hooks/useAudioListener.ts b/app/app/hooks/useAudioListener.ts index 391ed125..1dcf225e 100644 --- a/app/app/hooks/useAudioListener.ts +++ b/app/app/hooks/useAudioListener.ts @@ -1,6 +1,6 @@ import { useState, useRef, useCallback, useEffect } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection } from 'friend-lite-react-native'; +import { OmiConnection } from 'chronicle-react-native'; import { Subscription, ConnectionPriority } from 'react-native-ble-plx'; // OmiConnection might use this type for subscriptions interface UseAudioListener { diff --git a/app/app/hooks/useDeviceConnection.ts b/app/app/hooks/useDeviceConnection.ts index e729169e..964e4d4e 100644 --- a/app/app/hooks/useDeviceConnection.ts +++ b/app/app/hooks/useDeviceConnection.ts @@ -1,6 +1,6 @@ import { useState, useCallback } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection, BleAudioCodec, OmiDevice } from 'friend-lite-react-native'; +import { OmiConnection, BleAudioCodec, OmiDevice } from 'chronicle-react-native'; interface UseDeviceConnection { connectedDevice: OmiDevice | null; diff --git a/app/app/hooks/useDeviceScanning.ts b/app/app/hooks/useDeviceScanning.ts index d7780266..f4c16ff3 100644 --- a/app/app/hooks/useDeviceScanning.ts +++ b/app/app/hooks/useDeviceScanning.ts @@ -1,6 +1,6 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { BleManager, State as BluetoothState } from 'react-native-ble-plx'; -import { OmiConnection, OmiDevice } from 'friend-lite-react-native'; // Assuming this is the correct import for Omi types +import { OmiConnection, OmiDevice } from 'chronicle-react-native'; // Assuming this is the correct import for Omi types interface UseDeviceScanning { devices: OmiDevice[]; diff --git a/app/app/index.tsx b/app/app/index.tsx index 8bb1234a..2b20cb7b 100644 --- a/app/app/index.tsx +++ b/app/app/index.tsx @@ -1,6 +1,6 @@ import React, { useRef, useCallback, useEffect, useState } from 'react'; import { StyleSheet, Text, View, SafeAreaView, ScrollView, Platform, FlatList, ActivityIndicator, Alert, Switch, Button, TouchableOpacity, KeyboardAvoidingView } from 'react-native'; -import { OmiConnection } from 'friend-lite-react-native'; // OmiDevice also comes from here +import { OmiConnection } from 'chronicle-react-native'; // OmiDevice also comes from here import { State as BluetoothState } from 'react-native-ble-plx'; // Import State from ble-plx // Hooks @@ -521,7 +521,7 @@ export default function App() { contentContainerStyle={styles.content} keyboardShouldPersistTaps="handled" > - Friend Lite + Chronicle {/* Backend Connection - moved to top */} **Note**: This documentation covers the modern React interface located in `./webui/`. The legacy Streamlit interface has been moved to `src/_webui_original/` for reference. diff --git a/backends/advanced/Docs/architecture.md b/backends/advanced/Docs/architecture.md index 8211cb32..d5edb6a3 100644 --- a/backends/advanced/Docs/architecture.md +++ b/backends/advanced/Docs/architecture.md @@ -1,11 +1,11 @@ -# Friend-Lite Backend Architecture +# Chronicle Backend Architecture > ๐Ÿ“– **Prerequisite**: Read [quickstart.md](./quickstart.md) first for basic system understanding. ## System Overview -Friend-Lite is a comprehensive real-time conversation processing system that captures audio streams, performs speech-to-text transcription, and extracts memories. The system features a FastAPI backend with WebSocket audio streaming, versioned transcript and memory processing, a React web dashboard with search capabilities, and user authentication with role-based access control. +Chronicle is a comprehensive real-time conversation processing system that captures audio streams, performs speech-to-text transcription, and extracts memories. The system features a FastAPI backend with WebSocket audio streaming, versioned transcript and memory processing, a React web dashboard with search capabilities, and user authentication with role-based access control. **Core Implementation**: The complete system is implemented in `src/advanced_omi_backend/main.py` with supporting services in dedicated modules, using a modular router/controller architecture pattern. @@ -1049,7 +1049,7 @@ src/advanced_omi_backend/ "memory_versions": [ { "version_id": "version_def", - "provider": "friend_lite", + "provider": "chronicle", "created_at": "2025-01-15T10:32:00Z", "memory_count": 5 } diff --git a/backends/advanced/Docs/auth.md b/backends/advanced/Docs/auth.md index 4a3f7267..2aa7d254 100644 --- a/backends/advanced/Docs/auth.md +++ b/backends/advanced/Docs/auth.md @@ -2,7 +2,7 @@ ## Overview -Friend-Lite uses a comprehensive authentication system built on `fastapi-users` with support for multiple authentication methods including JWT tokens and cookies. The system provides secure user management with proper data isolation and role-based access control using MongoDB ObjectIds for user identification. +Chronicle uses a comprehensive authentication system built on `fastapi-users` with support for multiple authentication methods including JWT tokens and cookies. The system provides secure user management with proper data isolation and role-based access control using MongoDB ObjectIds for user identification. ## Architecture Components @@ -269,7 +269,7 @@ echo $ADMIN_PASSWORD ### Debug Commands ```bash # Check user database -docker exec -it mongo-container mongosh friend-lite +docker exec -it mongo-container mongosh chronicle # View authentication logs docker compose logs friend-backend | grep -i auth diff --git a/backends/advanced/Docs/memories.md b/backends/advanced/Docs/memories.md index 06aa3f60..b2887dc9 100644 --- a/backends/advanced/Docs/memories.md +++ b/backends/advanced/Docs/memories.md @@ -2,7 +2,7 @@ > ๐Ÿ“– **Prerequisite**: Read [quickstart.md](./quickstart.md) first for system overview. -This document explains how to configure and customize the memory service in the friend-lite backend. +This document explains how to configure and customize the memory service in the chronicle backend. **Code References**: - **Main Implementation**: `src/memory/memory_service.py` @@ -65,7 +65,7 @@ OLLAMA_BASE_URL=http://192.168.0.110:11434 QDRANT_BASE_URL=localhost # Mem0 Organization Settings (optional) -MEM0_ORGANIZATION_ID=friend-lite-org +MEM0_ORGANIZATION_ID=chronicle-org MEM0_PROJECT_ID=audio-conversations MEM0_APP_ID=omi-backend @@ -391,7 +391,7 @@ process_memory.add( "timestamp": 1720616655, "conversation_context": "audio_transcription", "device_type": "audio_recording", - "organization_id": "friend-lite-org", + "organization_id": "chronicle-org", "project_id": "audio-conversations", "app_id": "omi-backend" } @@ -583,7 +583,7 @@ The memory service exposes these endpoints with enhanced search capabilities: - **Vector-based**: Uses embeddings for contextual understanding beyond keyword matching **Memory Count API**: -- **Friend-Lite Provider**: Native Qdrant count API provides accurate total counts +- **Chronicle Provider**: Native Qdrant count API provides accurate total counts - **OpenMemory MCP Provider**: Count support varies by OpenMemory implementation - **Response Format**: `{"memories": [...], "total_count": 42}` when supported diff --git a/backends/advanced/Docs/quickstart.md b/backends/advanced/Docs/quickstart.md index 523218bc..fc5a77b7 100644 --- a/backends/advanced/Docs/quickstart.md +++ b/backends/advanced/Docs/quickstart.md @@ -1,14 +1,14 @@ -# Friend-Lite Backend Quickstart Guide +# Chronicle Backend Quickstart Guide -> ๐Ÿ“– **New to friend-lite?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. +> ๐Ÿ“– **New to chronicle?** This is your starting point! After reading this, continue with [architecture.md](./architecture.md) for technical details. ## Overview -Friend-Lite is an eco-system of services to support "AI wearable" agents/functionality. +Chronicle is an eco-system of services to support "AI wearable" agents/functionality. At the moment, the basic functionalities are: - Audio capture (via WebSocket, from OMI device, files, or a laptop) - Audio transcription -- **Advanced memory system** with pluggable providers (Friend-Lite native or OpenMemory MCP) +- **Advanced memory system** with pluggable providers (Chronicle native or OpenMemory MCP) - **Enhanced memory extraction** with individual fact storage and smart updates - **Semantic memory search** with relevance threshold filtering and live results - Action item extraction @@ -36,13 +36,13 @@ cd backends/advanced - **Authentication**: Admin email/password setup - **Transcription Provider**: Choose Deepgram, Mistral, or Offline (Parakeet) - **LLM Provider**: Choose OpenAI or Ollama for memory extraction -- **Memory Provider**: Choose Friend-Lite Native or OpenMemory MCP +- **Memory Provider**: Choose Chronicle Native or OpenMemory MCP - **Optional Services**: Speaker Recognition and other extras - **Network Configuration**: Ports and host settings **Example flow:** ``` -๐Ÿš€ Friend-Lite Interactive Setup +๐Ÿš€ Chronicle Interactive Setup =============================================== โ–บ Authentication Setup @@ -124,13 +124,13 @@ ADMIN_EMAIL=admin@example.com **Memory Provider Configuration:** ```bash # Memory Provider (Choose One) -# Option 1: Friend-Lite Native (Default - Recommended) -MEMORY_PROVIDER=friend_lite +# Option 1: Chronicle Native (Default - Recommended) +MEMORY_PROVIDER=chronicle # Option 2: OpenMemory MCP (Cross-client compatibility) # MEMORY_PROVIDER=openmemory_mcp # OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -# OPENMEMORY_CLIENT_NAME=friend_lite +# OPENMEMORY_CLIENT_NAME=chronicle # OPENMEMORY_USER_ID=openmemory ``` @@ -323,8 +323,8 @@ curl -X POST "http://localhost:8000/api/audio/upload" \ ### Memory & Intelligence #### Pluggable Memory System -- **Two memory providers**: Choose between Friend-Lite native or OpenMemory MCP -- **Friend-Lite Provider**: Full control with custom extraction, individual fact storage, smart deduplication +- **Two memory providers**: Choose between Chronicle native or OpenMemory MCP +- **Chronicle Provider**: Full control with custom extraction, individual fact storage, smart deduplication - **OpenMemory MCP Provider**: Cross-client compatibility (Claude Desktop, Cursor, Windsurf), professional processing #### Enhanced Memory Processing @@ -480,7 +480,7 @@ tailscale ip -4 ## Data Architecture -The friend-lite backend uses a **user-centric data architecture**: +The chronicle backend uses a **user-centric data architecture**: - **All memories are keyed by database user_id** (not client_id) - **Client information is stored in metadata** for reference and debugging @@ -493,12 +493,12 @@ For detailed information, see [User Data Architecture](user-data-architecture.md ### Choosing a Memory Provider -Friend-Lite offers two memory backends: +Chronicle offers two memory backends: -#### 1. Friend-Lite Native +#### 1. Chronicle Native ```bash # In your .env file -MEMORY_PROVIDER=friend_lite +MEMORY_PROVIDER=chronicle LLM_PROVIDER=openai OPENAI_API_KEY=your-openai-key-here ``` @@ -517,7 +517,7 @@ OPENAI_API_KEY=your-openai-key-here cd extras/openmemory-mcp docker compose up -d -# Then configure Friend-Lite +# Then configure Chronicle MEMORY_PROVIDER=openmemory_mcp OPENMEMORY_MCP_URL=http://host.docker.internal:8765 ``` diff --git a/backends/advanced/docker-compose-test.yml b/backends/advanced/docker-compose-test.yml index c68465a8..f72ca54d 100644 --- a/backends/advanced/docker-compose-test.yml +++ b/backends/advanced/docker-compose-test.yml @@ -3,7 +3,7 @@ # Uses different ports to avoid conflicts with development environment services: - friend-backend-test: + chronicle-backend-test: build: context: . dockerfile: Dockerfile @@ -36,7 +36,7 @@ services: - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} # - PARAKEET_ASR_URL=${PARAKEET_ASR_URL} # Memory provider configuration - - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} + - MEMORY_PROVIDER=${MEMORY_PROVIDER:-chronicle} - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} - MYCELIA_URL=http://mycelia-backend-test:5173 @@ -76,7 +76,7 @@ services: ports: - "3001:80" # Avoid conflict with dev on 3000 depends_on: - friend-backend-test: + chronicle-backend-test: condition: service_healthy mongo-test: condition: service_healthy @@ -146,7 +146,7 @@ services: - ADMIN_PASSWORD=test-admin-password-123 - ADMIN_EMAIL=test-admin@example.com - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} - - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} + - MEMORY_PROVIDER=${MEMORY_PROVIDER:-chronicle} - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} - MYCELIA_URL=http://mycelia-backend-test:5173 @@ -158,7 +158,7 @@ services: # Wait for audio queue to drain before timing out (test mode) - WAIT_FOR_AUDIO_QUEUE_DRAIN=true depends_on: - friend-backend-test: + chronicle-backend-test: condition: service_healthy mongo-test: condition: service_healthy @@ -169,58 +169,58 @@ services: restart: unless-stopped # Mycelia - AI memory and timeline service (test environment) - mycelia-backend-test: - build: - context: ../../extras/mycelia/backend - dockerfile: Dockerfile.simple - ports: - - "5100:5173" # Test backend port - environment: - # Shared JWT secret for Friend-Lite authentication (test key) - - JWT_SECRET=test-jwt-signing-key-for-integration-tests - - SECRET_KEY=test-jwt-signing-key-for-integration-tests - # MongoDB connection (test database) - - MONGO_URL=mongodb://mongo-test:27017 - - MONGO_DB=mycelia_test - - DATABASE_NAME=mycelia_test - # Redis connection (ioredis uses individual host/port, not URL) - - REDIS_HOST=redis-test - - REDIS_PORT=6379 - volumes: - - ../../extras/mycelia/backend/app:/app/app # Mount source for development - depends_on: - mongo-test: - condition: service_healthy - redis-test: - condition: service_started - healthcheck: - test: ["CMD", "deno", "eval", "fetch('http://localhost:5173/health').then(r => r.ok ? Deno.exit(0) : Deno.exit(1))"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 5s - restart: unless-stopped - profiles: - - mycelia + # mycelia-backend-test: + # build: + # context: ../../extras/mycelia/backend + # dockerfile: Dockerfile.simple + # ports: + # - "5100:5173" # Test backend port + # environment: + # # Shared JWT secret for Chronicle authentication (test key) + # - JWT_SECRET=test-jwt-signing-key-for-integration-tests + # - SECRET_KEY=test-jwt-signing-key-for-integration-tests + # # MongoDB connection (test database) + # - MONGO_URL=mongodb://mongo-test:27017 + # - MONGO_DB=mycelia_test + # - DATABASE_NAME=mycelia_test + # # Redis connection (ioredis uses individual host/port, not URL) + # - REDIS_HOST=redis-test + # - REDIS_PORT=6379 + # volumes: + # - ../../extras/mycelia/backend/app:/app/app # Mount source for development + # depends_on: + # mongo-test: + # condition: service_healthy + # redis-test: + # condition: service_started + # healthcheck: + # test: ["CMD", "deno", "eval", "fetch('http://localhost:5173/health').then(r => r.ok ? Deno.exit(0) : Deno.exit(1))"] + # interval: 30s + # timeout: 10s + # retries: 3 + # start_period: 5s + # restart: unless-stopped + # profiles: + # - mycelia - mycelia-frontend-test: - build: - context: ../../extras/mycelia - dockerfile: frontend/Dockerfile.simple - args: - - VITE_API_URL=http://localhost:5100 - ports: - - "3002:8080" # Nginx serves on 8080 internally - environment: - - VITE_API_URL=http://localhost:5100 - volumes: - - ../../extras/mycelia/frontend/src:/app/src # Mount source for development - depends_on: - mycelia-backend-test: - condition: service_healthy - restart: unless-stopped - profiles: - - mycelia + # mycelia-frontend-test: + # build: + # context: ../../extras/mycelia + # dockerfile: frontend/Dockerfile.simple + # args: + # - VITE_API_URL=http://localhost:5100 + # ports: + # - "3002:8080" # Nginx serves on 8080 internally + # environment: + # - VITE_API_URL=http://localhost:5100 + # volumes: + # - ../../extras/mycelia/frontend/src:/app/src # Mount source for development + # depends_on: + # mycelia-backend-test: + # condition: service_healthy + # restart: unless-stopped + # profiles: + # - mycelia # caddy: # image: caddy:2-alpine @@ -234,7 +234,7 @@ services: # depends_on: # webui-test: # condition: service_started - # friend-backend-test: + # chronicle-backend-test: # condition: service_healthy # restart: unless-stopped diff --git a/backends/advanced/docker-compose.yml b/backends/advanced/docker-compose.yml index d9d58dca..ea2f936b 100644 --- a/backends/advanced/docker-compose.yml +++ b/backends/advanced/docker-compose.yml @@ -1,5 +1,5 @@ services: - friend-backend: + chronicle-backend: build: context: . dockerfile: Dockerfile @@ -52,7 +52,7 @@ services: restart: unless-stopped # Unified Worker Container - # No CUDA needed for friend-backend and workers, workers only orchestrate jobs and call external services + # No CUDA needed for chronicle-backend and workers, workers only orchestrate jobs and call external services # Runs all workers in a single container for efficiency: # - 3 RQ workers (transcription, memory, default queues) # - 1 Audio stream worker (Redis Streams consumer - must be single to maintain sequential chunks) @@ -102,7 +102,7 @@ services: # - "${WEBUI_PORT:-3010}:80" - 3010:80 depends_on: - friend-backend: + chronicle-backend: condition: service_healthy restart: unless-stopped @@ -119,7 +119,7 @@ services: - caddy_data:/data - caddy_config:/config depends_on: - friend-backend: + chronicle-backend: condition: service_healthy restart: unless-stopped profiles: @@ -138,7 +138,7 @@ services: - ./webui/src:/app/src - ./webui/public:/app/public depends_on: - friend-backend: + chronicle-backend: condition: service_healthy profiles: - dev @@ -216,7 +216,7 @@ services: # UNCOMMENT OUT FOR LOCAL DEMO - EXPOSES to internet # ngrok: # image: ngrok/ngrok:latest - # depends_on: [friend-backend, proxy] + # depends_on: [chronicle-backend, proxy] # ports: # - "4040:4040" # Ngrok web interface # environment: diff --git a/backends/advanced/init-https.sh b/backends/advanced/init-https.sh index 8cca1ba1..cfeebf61 100755 --- a/backends/advanced/init-https.sh +++ b/backends/advanced/init-https.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Initialize Friend-Lite Advanced Backend with HTTPS proxy +# Initialize Chronicle Advanced Backend with HTTPS proxy # Usage: ./init.sh if [ $# -ne 1 ]; then @@ -23,7 +23,7 @@ if ! echo "$TAILSCALE_IP" | grep -E '^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{ exit 1 fi -echo "๐Ÿš€ Initializing Friend-Lite Advanced Backend with Tailscale IP: $TAILSCALE_IP" +echo "๐Ÿš€ Initializing Chronicle Advanced Backend with Tailscale IP: $TAILSCALE_IP" echo "" # Check if nginx.conf.template exists @@ -98,7 +98,7 @@ echo " ๐Ÿ“ฑ Navigate to Live Record page" echo " ๐ŸŽค Microphone access will work over HTTPS" echo "" echo "๐Ÿ”ง Services included:" -echo " - Friend-Lite Backend: Internal (proxied through nginx)" +echo " - Chronicle Backend: Internal (proxied through nginx)" echo " - Web Dashboard: https://localhost/ or https://$TAILSCALE_IP/" echo " - WebSocket Audio: wss://localhost/ws_pcm or wss://$TAILSCALE_IP/ws_pcm" echo "" diff --git a/backends/advanced/init.py b/backends/advanced/init.py index 76d27aea..773ea11e 100644 --- a/backends/advanced/init.py +++ b/backends/advanced/init.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Advanced Backend Interactive Setup Script +Chronicle Advanced Backend Interactive Setup Script Interactive configuration for all services and API keys """ @@ -22,7 +22,7 @@ from rich.text import Text -class FriendLiteSetup: +class ChronicleSetup: def __init__(self, args=None): self.console = Console() self.config: Dict[str, Any] = {} @@ -265,26 +265,26 @@ def setup_memory(self): self.print_section("Memory Storage Configuration") choices = { - "1": "Friend-Lite Native (Qdrant + custom extraction)", + "1": "Chronicle Native (Qdrant + custom extraction)", "2": "OpenMemory MCP (cross-client compatible, external server)" } choice = self.prompt_choice("Choose your memory storage backend:", choices, "1") if choice == "1": - self.config["MEMORY_PROVIDER"] = "friend_lite" - self.console.print("[blue][INFO][/blue] Friend-Lite Native memory provider selected") + self.config["MEMORY_PROVIDER"] = "chronicle" + self.console.print("[blue][INFO][/blue] Chronicle Native memory provider selected") qdrant_url = self.prompt_value("Qdrant URL", "qdrant") self.config["QDRANT_BASE_URL"] = qdrant_url - self.console.print("[green][SUCCESS][/green] Friend-Lite memory provider configured") + self.console.print("[green][SUCCESS][/green] Chronicle memory provider configured") elif choice == "2": self.config["MEMORY_PROVIDER"] = "openmemory_mcp" self.console.print("[blue][INFO][/blue] OpenMemory MCP selected") mcp_url = self.prompt_value("OpenMemory MCP server URL", "http://host.docker.internal:8765") - client_name = self.prompt_value("OpenMemory client name", "friend_lite") + client_name = self.prompt_value("OpenMemory client name", "chronicle") user_id = self.prompt_value("OpenMemory user ID", "openmemory") self.config["OPENMEMORY_MCP_URL"] = mcp_url @@ -473,7 +473,7 @@ def show_summary(self): self.console.print(f"โœ… Admin Account: {self.config.get('ADMIN_EMAIL', 'Not configured')}") self.console.print(f"โœ… Transcription: {self.config.get('TRANSCRIPTION_PROVIDER', 'Not configured')}") self.console.print(f"โœ… LLM Provider: {self.config.get('LLM_PROVIDER', 'Not configured')}") - self.console.print(f"โœ… Memory Provider: {self.config.get('MEMORY_PROVIDER', 'friend_lite')}") + self.console.print(f"โœ… Memory Provider: {self.config.get('MEMORY_PROVIDER', 'chronicle')}") # Auto-determine URLs based on HTTPS configuration if self.config.get('HTTPS_ENABLED') == 'true': server_ip = self.config.get('SERVER_IP', 'localhost') @@ -523,8 +523,8 @@ def show_next_steps(self): def run(self): """Run the complete setup process""" - self.print_header("๐Ÿš€ Friend-Lite Interactive Setup") - self.console.print("This wizard will help you configure Friend-Lite with all necessary services.") + self.print_header("๐Ÿš€ Chronicle Interactive Setup") + self.console.print("This wizard will help you configure Chronicle with all necessary services.") self.console.print("We'll ask for your API keys and preferences step by step.") self.console.print() @@ -569,7 +569,7 @@ def run(self): def main(): """Main entry point""" - parser = argparse.ArgumentParser(description="Friend-Lite Advanced Backend Setup") + parser = argparse.ArgumentParser(description="Chronicle Advanced Backend Setup") parser.add_argument("--speaker-service-url", help="Speaker Recognition service URL (default: prompt user)") parser.add_argument("--parakeet-asr-url", @@ -581,7 +581,7 @@ def main(): args = parser.parse_args() - setup = FriendLiteSetup(args) + setup = ChronicleSetup(args) setup.run() diff --git a/backends/advanced/scripts/create_mycelia_api_key.py b/backends/advanced/scripts/create_mycelia_api_key.py index b12d81ed..1e4bcb90 100755 --- a/backends/advanced/scripts/create_mycelia_api_key.py +++ b/backends/advanced/scripts/create_mycelia_api_key.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -"""Create a proper Mycelia API key (not OAuth client) for Friend-Lite user.""" +"""Create a proper Mycelia API key (not OAuth client) for Chronicle user.""" import base64 import os @@ -72,7 +72,7 @@ def main(): "hashedKey": hashed_key, # Note: hashedKey, not hash! "salt": base64.b64encode(salt).decode('utf-8'), # Store as base64 like Mycelia "owner": USER_ID, - "name": "Friend-Lite Integration", + "name": "Chronicle Integration", "policies": [ { "resource": "**", diff --git a/backends/advanced/scripts/sync_friendlite_mycelia.py b/backends/advanced/scripts/sync_friendlite_mycelia.py index c7051f2c..3849a5a9 100644 --- a/backends/advanced/scripts/sync_friendlite_mycelia.py +++ b/backends/advanced/scripts/sync_friendlite_mycelia.py @@ -1,25 +1,25 @@ #!/usr/bin/env python3 """ -Sync Friend-Lite users with Mycelia OAuth credentials. +Sync Chronicle users with Mycelia OAuth credentials. -This script helps migrate existing Friend-Lite installations to use Mycelia, -or sync existing Mycelia installations with Friend-Lite users. +This script helps migrate existing Chronicle installations to use Mycelia, +or sync existing Mycelia installations with Chronicle users. Usage: # Dry run (preview changes) - python scripts/sync_friendlite_mycelia.py --dry-run + python scripts/sync_chronicle_mycelia.py --dry-run # Sync all users - python scripts/sync_friendlite_mycelia.py --sync-all + python scripts/sync_chronicle_mycelia.py --sync-all # Sync specific user - python scripts/sync_friendlite_mycelia.py --email admin@example.com + python scripts/sync_chronicle_mycelia.py --email admin@example.com # Check for orphaned Mycelia objects - python scripts/sync_friendlite_mycelia.py --check-orphans + python scripts/sync_chronicle_mycelia.py --check-orphans # Reassign orphaned objects to a user - python scripts/sync_friendlite_mycelia.py --reassign-orphans --target-email admin@example.com + python scripts/sync_chronicle_mycelia.py --reassign-orphans --target-email admin@example.com Environment Variables: MONGODB_URI or MONGO_URL - MongoDB connection string @@ -41,18 +41,18 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) -class FriendLiteMyceliaSync: - """Sync Friend-Lite users with Mycelia OAuth credentials.""" +class ChronicleMyceliaSync: + """Sync Chronicle users with Mycelia OAuth credentials.""" - def __init__(self, mongo_url: str, mycelia_db: str, friendlite_db: str): + def __init__(self, mongo_url: str, mycelia_db: str, chronicle_db: str): self.mongo_url = mongo_url self.mycelia_db = mycelia_db - self.friendlite_db = friendlite_db + self.chronicle_db = chronicle_db self.client = MongoClient(mongo_url) print(f"๐Ÿ“Š Connected to MongoDB:") print(f" URL: {mongo_url}") - print(f" Friend-Lite DB: {friendlite_db}") + print(f" Chronicle DB: {chronicle_db}") print(f" Mycelia DB: {mycelia_db}\n") def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: @@ -62,9 +62,9 @@ def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: h.update(api_key.encode('utf-8')) return base64.b64encode(h.digest()).decode('utf-8') - def get_all_friendlite_users(self) -> List[Dict]: - """Get all users from Friend-Lite database.""" - db = self.client[self.friendlite_db] + def get_all_chronicle_users(self) -> List[Dict]: + """Get all users from Chronicle database.""" + db = self.client[self.chronicle_db] users = list(db["users"].find({})) return users @@ -84,7 +84,7 @@ def get_mycelia_api_key_for_user(self, user_id: str) -> Optional[Dict]: return api_key def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = False) -> Tuple[str, str]: - """Create a Mycelia API key for a Friend-Lite user.""" + """Create a Mycelia API key for a Chronicle user.""" # Generate API key random_part = secrets.token_urlsafe(32) api_key = f"mycelia_{random_part}" @@ -96,7 +96,7 @@ def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = "hashedKey": hashed_key, "salt": base64.b64encode(salt).decode('utf-8'), "owner": user_id, - "name": f"Friend-Lite Auto ({user_email})", + "name": f"Chronicle Auto ({user_email})", "policies": [{"resource": "**", "action": "*", "effect": "allow"}], "openPrefix": open_prefix, "createdAt": datetime.utcnow(), @@ -111,8 +111,8 @@ def create_mycelia_api_key(self, user_id: str, user_email: str, dry_run: bool = result = db["api_keys"].insert_one(api_key_doc) client_id = str(result.inserted_id) - # Update Friend-Lite user document - fl_db = self.client[self.friendlite_db] + # Update Chronicle user document + fl_db = self.client[self.chronicle_db] fl_db["users"].update_one( {"_id": ObjectId(user_id)}, { @@ -156,13 +156,13 @@ def sync_user(self, user: Dict, dry_run: bool = False) -> bool: return False def sync_all_users(self, dry_run: bool = False): - """Sync all Friend-Lite users to Mycelia OAuth.""" - users = self.get_all_friendlite_users() + """Sync all Chronicle users to Mycelia OAuth.""" + users = self.get_all_chronicle_users() print(f"{'='*80}") print(f"SYNC ALL USERS") print(f"{'='*80}") - print(f"Found {len(users)} Friend-Lite users\n") + print(f"Found {len(users)} Chronicle users\n") if dry_run: print("๐Ÿ” DRY RUN MODE - No changes will be made\n") @@ -180,8 +180,8 @@ def sync_all_users(self, dry_run: bool = False): print(f"{'='*80}\n") def check_orphaned_objects(self): - """Find Mycelia objects with userId not matching any Friend-Lite user.""" - users = self.get_all_friendlite_users() + """Find Mycelia objects with userId not matching any Chronicle user.""" + users = self.get_all_chronicle_users() user_ids = {str(user["_id"]) for user in users} objects = self.get_all_mycelia_objects() @@ -189,7 +189,7 @@ def check_orphaned_objects(self): print(f"{'='*80}") print(f"ORPHANED OBJECTS CHECK") print(f"{'='*80}") - print(f"Friend-Lite users: {len(user_ids)}") + print(f"Chronicle users: {len(user_ids)}") print(f"Mycelia objects: {len(objects)}\n") orphaned = [] @@ -229,20 +229,20 @@ def check_orphaned_objects(self): return orphaned def reassign_orphaned_objects(self, target_email: str, dry_run: bool = False): - """Reassign all orphaned objects to a specific Friend-Lite user.""" + """Reassign all orphaned objects to a specific Chronicle user.""" # Get target user - fl_db = self.client[self.friendlite_db] + fl_db = self.client[self.chronicle_db] target_user = fl_db["users"].find_one({"email": target_email}) if not target_user: - print(f"โœ— User with email '{target_email}' not found in Friend-Lite") + print(f"โœ— User with email '{target_email}' not found in Chronicle") return target_user_id = str(target_user["_id"]) print(f"Target user: {target_email} (ID: {target_user_id})\n") # Find orphaned objects - users = self.get_all_friendlite_users() + users = self.get_all_chronicle_users() user_ids = {str(user["_id"]) for user in users} objects = self.get_all_mycelia_objects() @@ -291,7 +291,7 @@ def reassign_orphaned_objects(self, target_email: str, dry_run: bool = False): def display_sync_status(self): """Display current sync status.""" - users = self.get_all_friendlite_users() + users = self.get_all_chronicle_users() print(f"{'='*80}") print(f"SYNC STATUS") @@ -326,13 +326,13 @@ def display_sync_status(self): def main(): parser = argparse.ArgumentParser( - description="Sync Friend-Lite users with Mycelia OAuth credentials", + description="Sync Chronicle users with Mycelia OAuth credentials", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=__doc__ ) parser.add_argument("--dry-run", action="store_true", help="Preview changes without making them") - parser.add_argument("--sync-all", action="store_true", help="Sync all Friend-Lite users") + parser.add_argument("--sync-all", action="store_true", help="Sync all Chronicle users") parser.add_argument("--email", type=str, help="Sync specific user by email") parser.add_argument("--check-orphans", action="store_true", help="Check for orphaned Mycelia objects") parser.add_argument("--reassign-orphans", action="store_true", help="Reassign orphaned objects to target user") @@ -346,14 +346,14 @@ def main(): # Extract database name from MONGODB_URI if present if "/" in mongo_url and mongo_url.count("/") >= 3: - friendlite_db = mongo_url.split("/")[-1].split("?")[0] or "friend-lite" + chronicle_db = mongo_url.split("/")[-1].split("?")[0] or "chronicle" else: - friendlite_db = "friend-lite" + chronicle_db = "chronicle" mycelia_db = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia")) # Create sync service - sync = FriendLiteMyceliaSync(mongo_url, mycelia_db, friendlite_db) + sync = ChronicleMyceliaSync(mongo_url, mycelia_db, chronicle_db) # Execute requested action if args.status: @@ -361,7 +361,7 @@ def main(): elif args.sync_all: sync.sync_all_users(dry_run=args.dry_run) elif args.email: - fl_db = sync.client[friendlite_db] + fl_db = sync.client[chronicle_db] user = fl_db["users"].find_one({"email": args.email}) if user: sync.sync_user(user, dry_run=args.dry_run) diff --git a/backends/advanced/setup-https.sh b/backends/advanced/setup-https.sh index 51f98fe9..e0f733df 100755 --- a/backends/advanced/setup-https.sh +++ b/backends/advanced/setup-https.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Friend-Lite Advanced Backend Initialization Script +# Chronicle Advanced Backend Initialization Script # Comprehensive setup for all configuration files and optional services # Colors for output @@ -73,8 +73,8 @@ if [ ! -f "pyproject.toml" ] || [ ! -d "src" ]; then exit 1 fi -print_header "Friend-Lite Advanced Backend Initialization" -echo "This script will help you set up the Friend-Lite backend with all necessary configurations." +print_header "Chronicle Advanced Backend Initialization" +echo "This script will help you set up the Chronicle backend with all necessary configurations." echo "" # Function to prompt yes/no diff --git a/backends/advanced/src/advanced_omi_backend/app_config.py b/backends/advanced/src/advanced_omi_backend/app_config.py index 4bef6593..fcab2d12 100644 --- a/backends/advanced/src/advanced_omi_backend/app_config.py +++ b/backends/advanced/src/advanced_omi_backend/app_config.py @@ -1,5 +1,5 @@ """ -Application configuration for Friend-Lite backend. +Application configuration for Chronicle backend. Centralizes all application-level configuration including database connections, service configurations, and environment variables that were previously in main.py. @@ -67,7 +67,7 @@ def __init__(self): # External Services Configuration self.qdrant_base_url = os.getenv("QDRANT_BASE_URL", "qdrant") self.qdrant_port = os.getenv("QDRANT_PORT", "6333") - self.memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + self.memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() # Redis Configuration self.redis_url = os.getenv("REDIS_URL", "redis://localhost:6379/0") @@ -84,7 +84,7 @@ def __init__(self): self.max_workers = os.cpu_count() or 4 # Memory service configuration - self.memory_service_supports_threshold = self.memory_provider == "friend_lite" + self.memory_service_supports_threshold = self.memory_provider == "chronicle" # Global configuration instance diff --git a/backends/advanced/src/advanced_omi_backend/app_factory.py b/backends/advanced/src/advanced_omi_backend/app_factory.py index 65b1adbf..7ccda184 100644 --- a/backends/advanced/src/advanced_omi_backend/app_factory.py +++ b/backends/advanced/src/advanced_omi_backend/app_factory.py @@ -1,5 +1,5 @@ """ -Application factory for Friend-Lite backend. +Application factory for Chronicle backend. Creates and configures the FastAPI application with all routers, middleware, and service initializations. diff --git a/backends/advanced/src/advanced_omi_backend/auth.py b/backends/advanced/src/advanced_omi_backend/auth.py index d06e9e68..b0b7ffc1 100644 --- a/backends/advanced/src/advanced_omi_backend/auth.py +++ b/backends/advanced/src/advanced_omi_backend/auth.py @@ -120,13 +120,15 @@ def generate_jwt_for_user(user_id: str, user_email: str) -> str: >>> token = generate_jwt_for_user("507f1f77bcf86cd799439011", "user@example.com") >>> # Use token to call Mycelia API """ - # Create JWT payload matching Friend-Lite's standard format + + + # Create JWT payload matching Chronicle's standard format payload = { "sub": user_id, # Subject = user ID "email": user_email, - "iss": "friend-lite", # Issuer - "aud": "friend-lite", # Audience - "exp": datetime.utcnow() + timedelta(seconds=JWT_LIFETIME_SECONDS), + "iss": "chronicle", # Issuer + "aud": "chronicle", # Audience + "exp": datetime.utcnow() + timedelta(hours=24), # 24 hour expiration "iat": datetime.utcnow(), # Issued at } diff --git a/backends/advanced/src/advanced_omi_backend/chat_service.py b/backends/advanced/src/advanced_omi_backend/chat_service.py index 4ec5ecff..1cd1a2e3 100644 --- a/backends/advanced/src/advanced_omi_backend/chat_service.py +++ b/backends/advanced/src/advanced_omi_backend/chat_service.py @@ -1,5 +1,5 @@ """ -Chat service implementation for Friend-Lite with memory integration. +Chat service implementation for Chronicle with memory integration. This module provides: - Chat session management with MongoDB persistence diff --git a/backends/advanced/src/advanced_omi_backend/clients/__init__.py b/backends/advanced/src/advanced_omi_backend/clients/__init__.py index 099f3c45..70c41823 100644 --- a/backends/advanced/src/advanced_omi_backend/clients/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/clients/__init__.py @@ -1,4 +1,4 @@ -"""Client implementations for Friend-Lite backend. +"""Client implementations for Chronicle backend. This module provides reusable client implementations that can be used for: - Integration testing diff --git a/backends/advanced/src/advanced_omi_backend/config.py b/backends/advanced/src/advanced_omi_backend/config.py index 4fe83d60..2b07a8d4 100644 --- a/backends/advanced/src/advanced_omi_backend/config.py +++ b/backends/advanced/src/advanced_omi_backend/config.py @@ -1,5 +1,5 @@ """ -Configuration management for Friend-Lite backend. +Configuration management for Chronicle backend. Currently contains diarization settings because they were used in multiple places causing circular imports. Other configurations can be moved here as needed. diff --git a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py index 9341cc59..27b2810f 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py @@ -24,13 +24,13 @@ async def get_current_metrics(): """Get current system metrics.""" try: # Get memory provider configuration - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() # Get basic system metrics metrics = { "timestamp": int(time.time()), "memory_provider": memory_provider, - "memory_provider_supports_threshold": memory_provider == "friend_lite", + "memory_provider_supports_threshold": memory_provider == "chronicle", } return metrics @@ -470,10 +470,10 @@ async def delete_all_user_memories(user: User): async def get_memory_provider(): """Get current memory provider configuration.""" try: - current_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + current_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() # Get available providers - available_providers = ["friend_lite", "openmemory_mcp", "mycelia"] + available_providers = ["chronicle", "openmemory_mcp", "mycelia"] return { "current_provider": current_provider, @@ -493,7 +493,7 @@ async def set_memory_provider(provider: str): try: # Validate provider provider = provider.lower().strip() - valid_providers = ["friend_lite", "openmemory_mcp", "mycelia"] + valid_providers = ["chronicle", "openmemory_mcp", "mycelia"] if provider not in valid_providers: return JSONResponse( diff --git a/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py index a4338f2b..e138a6e5 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/websocket_controller.py @@ -1,6 +1,6 @@ """ -WebSocket controller for Friend-Lite backend. +WebSocket controller for Chronicle backend. This module handles WebSocket connections for audio streaming. """ diff --git a/backends/advanced/src/advanced_omi_backend/database.py b/backends/advanced/src/advanced_omi_backend/database.py index 822878e5..ae7650b0 100644 --- a/backends/advanced/src/advanced_omi_backend/database.py +++ b/backends/advanced/src/advanced_omi_backend/database.py @@ -1,5 +1,5 @@ """ -Database configuration and utilities for the Friend-Lite backend. +Database configuration and utilities for the Chronicle backend. This module provides centralized database access to avoid duplication across main.py and router modules. diff --git a/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py b/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py index be2f2705..eafeffec 100644 --- a/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py +++ b/backends/advanced/src/advanced_omi_backend/middleware/app_middleware.py @@ -1,5 +1,5 @@ """ -Middleware configuration for Friend-Lite backend. +Middleware configuration for Chronicle backend. Centralizes CORS configuration and global exception handlers. """ diff --git a/backends/advanced/src/advanced_omi_backend/models/__init__.py b/backends/advanced/src/advanced_omi_backend/models/__init__.py index 52c63c20..a19fa0db 100644 --- a/backends/advanced/src/advanced_omi_backend/models/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/models/__init__.py @@ -1,8 +1,8 @@ """ -Models package for Friend-Lite backend. +Models package for Chronicle backend. This package contains Pydantic models that define the structure and validation -for all data entities in the Friend-Lite system. +for all data entities in the Chronicle system. """ # Models can be imported directly from their files diff --git a/backends/advanced/src/advanced_omi_backend/models/audio_file.py b/backends/advanced/src/advanced_omi_backend/models/audio_file.py index de1c6f3f..00060037 100644 --- a/backends/advanced/src/advanced_omi_backend/models/audio_file.py +++ b/backends/advanced/src/advanced_omi_backend/models/audio_file.py @@ -1,5 +1,5 @@ """ -AudioFile models for Friend-Lite backend. +AudioFile models for Chronicle backend. This module contains the Beanie Document model for audio_chunks collection, which stores ALL audio files (both with and without speech). This is the diff --git a/backends/advanced/src/advanced_omi_backend/models/conversation.py b/backends/advanced/src/advanced_omi_backend/models/conversation.py index 55c31244..87dc731a 100644 --- a/backends/advanced/src/advanced_omi_backend/models/conversation.py +++ b/backends/advanced/src/advanced_omi_backend/models/conversation.py @@ -1,5 +1,5 @@ """ -Conversation models for Friend-Lite backend. +Conversation models for Chronicle backend. This module contains Beanie Document and Pydantic models for conversations, transcript versions, and memory versions. @@ -28,7 +28,7 @@ class TranscriptProvider(str, Enum): class MemoryProvider(str, Enum): """Supported memory providers.""" - FRIEND_LITE = "friend_lite" + CHRONICLE = "chronicle" OPENMEMORY_MCP = "openmemory_mcp" MYCELIA = "mycelia" diff --git a/backends/advanced/src/advanced_omi_backend/routers/api_router.py b/backends/advanced/src/advanced_omi_backend/routers/api_router.py index a510d396..528713c0 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/api_router.py +++ b/backends/advanced/src/advanced_omi_backend/routers/api_router.py @@ -1,5 +1,5 @@ """ -Main API router for Friend-Lite backend. +Main API router for Chronicle backend. This module aggregates all the functional router modules and provides a single entry point for the API endpoints. diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py b/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py index 371fd38d..a5669b06 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/__init__.py @@ -1,5 +1,5 @@ """ -Router modules for Friend-Lite API. +Router modules for Chronicle API. This package contains organized router modules for different functional areas: - user_routes: User management and authentication diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py index a1fea4fc..d0c64904 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/chat_routes.py @@ -1,5 +1,5 @@ """ -Chat API routes for Friend-Lite with streaming support and memory integration. +Chat API routes for Chronicle with streaming support and memory integration. This module provides: - RESTful chat session management endpoints diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py index 191ca39f..821ad52a 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/client_routes.py @@ -1,5 +1,5 @@ """ -Client management routes for Friend-Lite API. +Client management routes for Chronicle API. Handles active client monitoring and management. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py index e2b76f7d..8da0f5b0 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/conversation_routes.py @@ -1,5 +1,5 @@ """ -Conversation management routes for Friend-Lite API. +Conversation management routes for Chronicle API. Handles conversation CRUD operations, audio processing, and transcript management. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py index 06e0da1e..24865f90 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py @@ -1,5 +1,5 @@ """ -Health check routes for Friend-Lite backend. +Health check routes for Chronicle backend. This module provides health check endpoints for monitoring the application's status. """ @@ -118,7 +118,7 @@ async def health_check(): critical_services_healthy = True # Get configuration once at the start - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") speaker_service_url = os.getenv("SPEAKER_SERVICE_URL") openmemory_mcp_url = os.getenv("OPENMEMORY_MCP_URL") @@ -230,38 +230,38 @@ async def health_check(): overall_healthy = False # Check memory service (provider-dependent) - if memory_provider == "friend_lite": + if memory_provider == "chronicle": try: - # Test Friend-Lite memory service connection with timeout + # Test Chronicle memory service connection with timeout test_success = await asyncio.wait_for(memory_service.test_connection(), timeout=8.0) if test_success: health_status["services"]["memory_service"] = { - "status": "โœ… Friend-Lite Memory Connected", + "status": "โœ… Chronicle Memory Connected", "healthy": True, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } else: health_status["services"]["memory_service"] = { - "status": "โš ๏ธ Friend-Lite Memory Test Failed", + "status": "โš ๏ธ Chronicle Memory Test Failed", "healthy": False, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } overall_healthy = False except asyncio.TimeoutError: health_status["services"]["memory_service"] = { - "status": "โš ๏ธ Friend-Lite Memory Timeout (8s) - Check Qdrant", + "status": "โš ๏ธ Chronicle Memory Timeout (8s) - Check Qdrant", "healthy": False, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } overall_healthy = False except Exception as e: health_status["services"]["memory_service"] = { - "status": f"โš ๏ธ Friend-Lite Memory Failed: {str(e)}", + "status": f"โš ๏ธ Chronicle Memory Failed: {str(e)}", "healthy": False, - "provider": "friend_lite", + "provider": "chronicle", "critical": False, } overall_healthy = False diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py index 93ad0f6b..d0be9528 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py @@ -1,5 +1,5 @@ """ -Memory management routes for Friend-Lite API. +Memory management routes for Chronicle API. Handles memory CRUD operations, search, and debug functionality. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py index 10587b5c..e51c036c 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/system_routes.py @@ -1,5 +1,5 @@ """ -System and utility routes for Friend-Lite API. +System and utility routes for Chronicle API. Handles metrics, auth config, and other system utilities. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py index 808b8185..12ed5c63 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/user_routes.py @@ -1,5 +1,5 @@ """ -User management routes for Friend-Lite API. +User management routes for Chronicle API. Handles user CRUD operations and admin user management. """ diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py index 454cabb9..d9754a87 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/websocket_routes.py @@ -1,5 +1,5 @@ """ -WebSocket routes for Friend-Lite backend. +WebSocket routes for Chronicle backend. This module handles WebSocket connections for audio streaming. """ diff --git a/backends/advanced/src/advanced_omi_backend/services/__init__.py b/backends/advanced/src/advanced_omi_backend/services/__init__.py index 81d3c535..d656f34c 100644 --- a/backends/advanced/src/advanced_omi_backend/services/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/__init__.py @@ -1,5 +1,5 @@ """ -Services module for Friend-Lite backend. +Services module for Chronicle backend. This module contains business logic services and their provider implementations. """ diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py index c2413ff2..1b777028 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/__init__.py @@ -1,7 +1,7 @@ """Memory service package. This package provides memory management functionality with support for -multiple memory providers (Friend-Lite, Mycelia, OpenMemory MCP). +multiple memory providers (Chronicle, Mycelia, OpenMemory MCP). The memory service handles extraction, storage, and retrieval of memories from user conversations and interactions. @@ -10,7 +10,7 @@ - base.py: Abstract base classes and interfaces - config.py: Configuration management - service_factory.py: Provider selection and instantiation -- providers/friend_lite.py: Friend-Lite native provider (LLM + Qdrant) +- providers/chronicle.py: Chronicle native provider (LLM + Qdrant) - providers/mycelia.py: Mycelia backend provider - providers/openmemory_mcp.py: OpenMemory MCP provider - providers/llm_providers.py: LLM implementations (OpenAI, Ollama) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py index 3946deae..7560d88f 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -34,7 +34,7 @@ class VectorStoreProvider(Enum): class MemoryProvider(Enum): """Supported memory service providers.""" - FRIEND_LITE = "friend_lite" # Default sophisticated implementation + CHRONICLE = "chronicle" # Default sophisticated implementation OPENMEMORY_MCP = "openmemory_mcp" # OpenMemory MCP backend MYCELIA = "mycelia" # Mycelia memory backend @@ -42,7 +42,7 @@ class MemoryProvider(Enum): @dataclass class MemoryConfig: """Configuration for memory service.""" - memory_provider: MemoryProvider = MemoryProvider.FRIEND_LITE + memory_provider: MemoryProvider = MemoryProvider.CHRONICLE llm_provider: LLMProvider = LLMProvider.OPENAI vector_store_provider: VectorStoreProvider = VectorStoreProvider.QDRANT llm_config: Dict[str, Any] = None @@ -111,7 +111,7 @@ def create_qdrant_config( def create_openmemory_config( server_url: str = "http://localhost:8765", - client_name: str = "friend_lite", + client_name: str = "chronicle", user_id: str = "default", timeout: int = 30 ) -> Dict[str, Any]: @@ -145,7 +145,7 @@ def build_memory_config_from_env() -> MemoryConfig: """Build memory configuration from environment variables and YAML config.""" try: # Determine memory provider - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite").lower() + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() if memory_provider not in [p.value for p in MemoryProvider]: raise ValueError(f"Unsupported memory provider: {memory_provider}") @@ -155,7 +155,7 @@ def build_memory_config_from_env() -> MemoryConfig: if memory_provider_enum == MemoryProvider.OPENMEMORY_MCP: openmemory_config = create_openmemory_config( server_url=os.getenv("OPENMEMORY_MCP_URL", "http://localhost:8765"), - client_name=os.getenv("OPENMEMORY_CLIENT_NAME", "friend_lite"), + client_name=os.getenv("OPENMEMORY_CLIENT_NAME", "chronicle"), user_id=os.getenv("OPENMEMORY_USER_ID", "default"), timeout=int(os.getenv("OPENMEMORY_TIMEOUT", "30")) ) @@ -199,7 +199,7 @@ def build_memory_config_from_env() -> MemoryConfig: timeout_seconds=int(os.getenv("MYCELIA_TIMEOUT", "30")) ) - # For Friend-Lite provider, use existing complex configuration + # For Chronicle provider, use existing complex configuration # Import config loader from advanced_omi_backend.memory_config_loader import get_config_loader @@ -282,7 +282,7 @@ def build_memory_config_from_env() -> MemoryConfig: extraction_enabled = config_loader.is_memory_extraction_enabled() extraction_prompt = config_loader.get_memory_prompt() if extraction_enabled else None - memory_logger.info(f"๐Ÿ”ง Memory config: Provider=Friend-Lite, LLM={llm_provider}, VectorStore={vector_store_provider}, Extraction={extraction_enabled}") + memory_logger.info(f"๐Ÿ”ง Memory config: Provider=Chronicle, LLM={llm_provider}, VectorStore={vector_store_provider}, Extraction={extraction_enabled}") return MemoryConfig( memory_provider=memory_provider_enum, diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py index 43d438cf..3a71f7cf 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/__init__.py @@ -1,7 +1,7 @@ """Memory service provider implementations. This package contains all memory service provider implementations: -- friend_lite: Friend-Lite native implementation with LLM + vector store +- chronicle: Chronicle native implementation with LLM + vector store - openmemory_mcp: OpenMemory MCP backend integration - mycelia: Mycelia backend integration - llm_providers: LLM provider implementations (OpenAI, Ollama) @@ -9,7 +9,7 @@ - mcp_client: MCP client utilities """ -from .friend_lite import MemoryService as FriendLiteMemoryService +from .chronicle import MemoryService as ChronicleMemoryService from .openmemory_mcp import OpenMemoryMCPService from .mycelia import MyceliaMemoryService from .llm_providers import OpenAIProvider @@ -17,7 +17,7 @@ from .mcp_client import MCPClient, MCPError __all__ = [ - "FriendLiteMemoryService", + "ChronicleMemoryService", "OpenMemoryMCPService", "MyceliaMemoryService", "OpenAIProvider", diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py similarity index 100% rename from backends/advanced/src/advanced_omi_backend/services/memory/providers/friend_lite.py rename to backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py index c0b9deaf..971c41f3 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py @@ -28,7 +28,7 @@ class MCPClient: client: HTTP client instance """ - def __init__(self, server_url: str, client_name: str = "friend_lite", user_id: str = "default", user_email: str = "", timeout: int = 30): + def __init__(self, server_url: str, client_name: str = "chronicle", user_id: str = "default", user_email: str = "", timeout: int = 30): """Initialize client for OpenMemory. Args: @@ -127,7 +127,17 @@ async def add_memories(self, text: str) -> List[str]: response = await self.client.post( f"{self.server_url}/api/v1/memories/", - json=payload + json={ + "user_id": self.user_id, + "text": text, + "app": self.client_name, # Use app name (OpenMemory accepts name or UUID) + "metadata": { + "source": "chronicle", + "client": self.client_name, + "user_email": self.user_email + }, + "infer": True + } ) response_body = response.text[:500] if response.status_code != 200 else "..." @@ -220,7 +230,7 @@ async def search_memory(self, query: str, limit: int = 10) -> List[Dict[str, Any else: memories = [] - # Format memories for Friend-Lite + # Format memories for Chronicle formatted_memories = [] for memory in memories: formatted_memories.append({ @@ -374,7 +384,7 @@ async def get_memory(self, memory_id: str) -> Optional[Dict[str, Any]]: response.raise_for_status() result = response.json() - # Format memory for Friend-Lite + # Format memory for Chronicle if isinstance(result, dict): return { "id": result.get("id", memory_id), diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py index a4d271bb..6f9df0ba 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py @@ -52,7 +52,7 @@ class MyceliaMemoryService(MemoryServiceBase): """Memory service implementation using Mycelia backend. This class implements the MemoryServiceBase interface by delegating memory - operations to a Mycelia server using JWT authentication from Friend-Lite. + operations to a Mycelia server using JWT authentication from Chronicle. Args: api_url: Mycelia API endpoint URL @@ -196,7 +196,7 @@ async def _call_resource(self, action: str, jwt_token: str, **params) -> Dict[st Args: action: Action to perform (create, list, get, delete, etc.) - jwt_token: User's JWT token from Friend-Lite + jwt_token: User's JWT token from Chronicle **params: Additional parameters for the action Returns: @@ -246,7 +246,7 @@ async def _extract_memories_via_llm( return [] try: - # Get OpenAI client using Friend-Lite's utility + # Get OpenAI client using Chronicle's utility client = _get_openai_client( api_key=self.llm_config.get("api_key"), base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), @@ -304,7 +304,7 @@ async def _extract_temporal_entity_via_llm( return None try: - # Get OpenAI client using Friend-Lite's utility + # Get OpenAI client using Chronicle's utility client = _get_openai_client( api_key=self.llm_config.get("api_key"), base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py index 970806a9..2fe34164 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py @@ -2,7 +2,7 @@ This module provides a concrete implementation of the MemoryServiceBase interface that uses OpenMemory MCP as the backend for all memory operations. It maintains -compatibility with the existing Friend-Lite memory service API while leveraging +compatibility with the existing Chronicle memory service API while leveraging OpenMemory's standardized memory management capabilities. """ @@ -20,10 +20,10 @@ class OpenMemoryMCPService(MemoryServiceBase): """Memory service implementation using OpenMemory MCP as backend. - + This class implements the MemoryServiceBase interface by delegating memory operations to an OpenMemory MCP server. It handles the translation between - Friend-Lite's memory service API and the standardized MCP operations. + Chronicle's memory service API and the standardized MCP operations. Key features: - Maintains compatibility with existing MemoryServiceBase interface @@ -63,7 +63,7 @@ def __init__( """ super().__init__() self.server_url = server_url or os.getenv("OPENMEMORY_MCP_URL", "http://localhost:8765") - self.client_name = client_name or os.getenv("OPENMEMORY_CLIENT_NAME", "friend_lite") + self.client_name = client_name or os.getenv("OPENMEMORY_CLIENT_NAME", "chronicle") self.user_id = user_id or os.getenv("OPENMEMORY_USER_ID", "default") self.timeout = int(timeout or os.getenv("OPENMEMORY_TIMEOUT", "30")) self.mcp_client: Optional[MCPClient] = None @@ -146,10 +146,8 @@ async def add_memory( # OpenMemory will auto-create users if they don't exist original_user_id = self.mcp_client.user_id original_user_email = self.mcp_client.user_email - - # Update MCP client with Friend-Lite user details - self.mcp_client.user_id = user_id - self.mcp_client.user_email = user_email + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID + self.mcp_client.user_email = user_email # Use the actual user's email try: # Thin client approach: Send raw transcript to OpenMemory MCP server @@ -209,7 +207,7 @@ async def search_memories( # Update MCP client user context for this search operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID try: results = await self.mcp_client.search_memory( @@ -259,7 +257,7 @@ async def get_all_memories( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID try: results = await self.mcp_client.list_memories(limit=limit) @@ -299,7 +297,7 @@ async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Opt # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id or self.user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id or self.user_id # Use the actual Chronicle user's ID try: result = await self.mcp_client.get_memory(memory_id) @@ -346,7 +344,7 @@ async def update_memory( # Update MCP client user context for this operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id or self.user_id # Use the actual Friend-Lite user's ID + self.mcp_client.user_id = user_id or self.user_id # Use the actual Chronicle user's ID try: success = await self.mcp_client.update_memory( diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py index 37922186..dc57dbe9 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py @@ -1,7 +1,7 @@ """Memory service factory for creating appropriate memory service instances. This module provides a factory pattern for instantiating memory services -based on configuration. It supports both the sophisticated Friend-Lite +based on configuration. It supports both the sophisticated Chronicle implementation and the OpenMemory MCP backend. """ @@ -36,10 +36,10 @@ def create_memory_service(config: MemoryConfig) -> MemoryServiceBase: """ memory_logger.info(f"๐Ÿง  Creating memory service with provider: {config.memory_provider.value}") - if config.memory_provider == MemoryProvider.FRIEND_LITE: - # Use the sophisticated Friend-Lite implementation - from .providers.friend_lite import MemoryService as FriendLiteMemoryService - return FriendLiteMemoryService(config) + if config.memory_provider == MemoryProvider.CHRONICLE: + # Use the sophisticated Chronicle implementation + from .providers.chronicle import MemoryService as ChronicleMemoryService + return ChronicleMemoryService(config) elif config.memory_provider == MemoryProvider.OPENMEMORY_MCP: # Use OpenMemory MCP implementation @@ -156,7 +156,7 @@ def get_service_info() -> dict: # Try to determine provider from service type if "OpenMemoryMCP" in info["service_type"]: info["memory_provider"] = "openmemory_mcp" - elif "FriendLite" in info["service_type"] or "MemoryService" in info["service_type"]: - info["memory_provider"] = "friend_lite" + elif "Chronicle" in info["service_type"] or "MemoryService" in info["service_type"]: + info["memory_provider"] = "chronicle" return info \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py index 5b1a64b6..84011068 100644 --- a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -1,9 +1,9 @@ """ Mycelia OAuth Synchronization Service. -This module synchronizes Friend-Lite users with Mycelia OAuth API keys, +This module synchronizes Chronicle users with Mycelia OAuth API keys, ensuring that when users access Mycelia directly, they use credentials -that map to their Friend-Lite user ID. +that map to their Chronicle user ID. """ import base64 @@ -21,7 +21,7 @@ class MyceliaSyncService: - """Synchronize Friend-Lite users with Mycelia OAuth API keys.""" + """Synchronize Chronicle users with Mycelia OAuth API keys.""" def __init__(self): """Initialize the sync service.""" @@ -35,17 +35,15 @@ def __init__(self): # Test environment uses mycelia_test, production uses mycelia self.mycelia_db = os.getenv("MYCELIA_DB", os.getenv("DATABASE_NAME", "mycelia")) - # Friend-Lite database - extract from MONGODB_URI or use default - # Test env: test_db, Production: friend-lite + # Chronicle database - extract from MONGODB_URI or use default + # Test env: test_db, Production: chronicle if "/" in self.mongo_url and self.mongo_url.count("/") >= 3: # Extract database name from mongodb://host:port/database - self.friendlite_db = self.mongo_url.split("/")[-1].split("?")[0] or "friend-lite" + self.chronicle_db = self.mongo_url.split("/")[-1].split("?")[0] or "chronicle" else: - self.friendlite_db = "friend-lite" + self.chronicle_db = "chronicle" - logger.info( - f"MyceliaSyncService initialized: {self.mongo_url}, Mycelia DB: {self.mycelia_db}, Friend-Lite DB: {self.friendlite_db}" - ) + logger.info(f"MyceliaSyncService initialized: {self.mongo_url}, Mycelia DB: {self.mycelia_db}, Chronicle DB: {self.chronicle_db}") def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: """Hash API key with salt (matches Mycelia's implementation).""" @@ -56,10 +54,10 @@ def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: def _create_mycelia_api_key(self, user_id: str, user_email: str) -> Tuple[str, str]: """ - Create a Mycelia API key for a Friend-Lite user. + Create a Mycelia API key for a Chronicle user. Args: - user_id: Friend-Lite user ID (MongoDB ObjectId as string) + user_id: Chronicle user ID (MongoDB ObjectId as string) user_email: User email address Returns: @@ -84,9 +82,11 @@ def _create_mycelia_api_key(self, user_id: str, user_email: str) -> Tuple[str, s api_keys_collection = db["api_keys"] # Check if user already has an active API key - existing = api_keys_collection.find_one( - {"owner": user_id, "isActive": True, "name": f"Friend-Lite Auto ({user_email})"} - ) + existing = api_keys_collection.find_one({ + "owner": user_id, + "isActive": True, + "name": f"Chronicle Auto ({user_email})" + }) if existing: logger.info(f"User {user_email} already has Mycelia API key: {existing['_id']}") @@ -97,10 +97,16 @@ def _create_mycelia_api_key(self, user_id: str, user_email: str) -> Tuple[str, s # Create new API key document api_key_doc = { "hashedKey": hashed_key, - "salt": base64.b64encode(salt).decode("utf-8"), - "owner": user_id, # CRITICAL: owner = Friend-Lite user ID - "name": f"Friend-Lite Auto ({user_email})", - "policies": [{"resource": "**", "action": "*", "effect": "allow"}], + "salt": base64.b64encode(salt).decode('utf-8'), + "owner": user_id, # CRITICAL: owner = Chronicle user ID + "name": f"Chronicle Auto ({user_email})", + "policies": [ + { + "resource": "**", + "action": "*", + "effect": "allow" + } + ], "openPrefix": open_prefix, "createdAt": datetime.utcnow(), "isActive": True, @@ -116,10 +122,10 @@ def _create_mycelia_api_key(self, user_id: str, user_email: str) -> Tuple[str, s def sync_user_to_mycelia(self, user_id: str, user_email: str) -> Optional[Tuple[str, str]]: """ - Sync a Friend-Lite user to Mycelia OAuth. + Sync a Chronicle user to Mycelia OAuth. Args: - user_id: Friend-Lite user ID + user_id: Chronicle user ID user_email: User email Returns: @@ -129,10 +135,10 @@ def sync_user_to_mycelia(self, user_id: str, user_email: str) -> Optional[Tuple[ # Create Mycelia API key client_id, api_key = self._create_mycelia_api_key(user_id, user_email) - # Store credentials in Friend-Lite user document (if new key was created) + # Store credentials in Chronicle user document (if new key was created) if api_key: client = MongoClient(self.mongo_url) - db = client[self.friendlite_db] + db = client[self.chronicle_db] users_collection = db["users"] users_collection.update_one( @@ -171,9 +177,9 @@ def sync_admin_user(self) -> Optional[Tuple[str, str]]: logger.warning("ADMIN_EMAIL not set, skipping Mycelia sync") return None - # Get admin user from Friend-Lite database + # Get admin user from Chronicle database client = MongoClient(self.mongo_url) - db = client[self.friendlite_db] + db = client[self.chronicle_db] users_collection = db["users"] admin_user = users_collection.find_one({"email": admin_email}) @@ -229,7 +235,7 @@ async def sync_admin_on_startup(): logger.info("๐Ÿ”„ Starting Mycelia OAuth synchronization...") # Check if Mycelia sync is enabled - memory_provider = os.getenv("MEMORY_PROVIDER", "friend_lite") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") if memory_provider != "mycelia": logger.info("Mycelia sync skipped (MEMORY_PROVIDER != mycelia)") return diff --git a/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py index 1d4bd985..0059c816 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/conversation_jobs.py @@ -405,10 +405,15 @@ async def open_conversation_job( ) # Determine end reason based on how we exited the loop - # Check session completion_reason from Redis + # Check session completion_reason from Redis (set by WebSocket controller on disconnect) completion_reason = await redis_client.hget(session_key, "completion_reason") completion_reason_str = completion_reason.decode() if completion_reason else None + # Determine end_reason with proper precedence: + # 1. websocket_disconnect (explicit disconnect from client) + # 2. inactivity_timeout (no speech for SPEECH_INACTIVITY_THRESHOLD_SECONDS) + # 3. max_duration (conversation exceeded max runtime) + # 4. user_stopped (user manually stopped recording) if completion_reason_str == "websocket_disconnect": end_reason = "websocket_disconnect" elif timeout_triggered: diff --git a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py index 439cdf4c..31dba573 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py @@ -28,7 +28,7 @@ async def process_memory_job(conversation_id: str, *, redis_client=None) -> Dict V2 Architecture: 1. Extracts memories from conversation transcript 2. Checks primary speakers filter if configured - 3. Uses configured memory provider (friend_lite or openmemory_mcp) + 3. Uses configured memory provider (chronicle or openmemory_mcp) 4. Stores memory references in conversation document Note: Listening jobs are restarted by open_conversation_job (not here). @@ -145,7 +145,7 @@ async def process_memory_job(conversation_id: str, *, redis_client=None) -> Dict transcript_version_id = conversation_model.active_transcript_version or "unknown" # Determine memory provider from memory service - memory_provider = conversation_model.MemoryProvider.FRIEND_LITE # Default + memory_provider = conversation_model.MemoryProvider.CHRONICLE # Default try: memory_service_obj = get_memory_service() provider_name = memory_service_obj.__class__.__name__ diff --git a/backends/advanced/ssl/generate-ssl.sh b/backends/advanced/ssl/generate-ssl.sh index efc5d8c2..b0fd4b3d 100755 --- a/backends/advanced/ssl/generate-ssl.sh +++ b/backends/advanced/ssl/generate-ssl.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Generate self-signed SSL certificate for Friend-Lite Advanced Backend +# Generate self-signed SSL certificate for Chronicle Advanced Backend # Supports localhost, IP addresses, and domain names SERVER_ADDRESS="$1" diff --git a/backends/advanced/start-k8s.sh b/backends/advanced/start-k8s.sh index 487b82c8..a2f3d817 100755 --- a/backends/advanced/start-k8s.sh +++ b/backends/advanced/start-k8s.sh @@ -1,11 +1,11 @@ #!/bin/bash -# Friend-Lite Backend Kubernetes Startup Script +# Chronicle Backend Kubernetes Startup Script # Starts both the FastAPI backend and RQ workers for K8s deployment set -e -echo "๐Ÿš€ Starting Friend-Lite Backend (Kubernetes)..." +echo "๐Ÿš€ Starting Chronicle Backend (Kubernetes)..." # Debug environment variables echo "๐Ÿ” Environment check:" diff --git a/backends/advanced/start-workers.sh b/backends/advanced/start-workers.sh index 2e39848d..f62b5a42 100755 --- a/backends/advanced/start-workers.sh +++ b/backends/advanced/start-workers.sh @@ -4,7 +4,7 @@ set -e -echo "๐Ÿš€ Starting Friend-Lite Workers..." +echo "๐Ÿš€ Starting Chronicle Workers..." # Clean up any stale worker registrations from previous runs echo "๐Ÿงน Cleaning up stale worker registrations from Redis..." diff --git a/backends/advanced/start.sh b/backends/advanced/start.sh index 51946672..40fa4abf 100755 --- a/backends/advanced/start.sh +++ b/backends/advanced/start.sh @@ -1,11 +1,11 @@ #!/bin/bash -# Friend-Lite Backend Startup Script +# Chronicle Backend Startup Script # Starts both the FastAPI backend and RQ workers set -e -echo "๐Ÿš€ Starting Friend-Lite Backend..." +echo "๐Ÿš€ Starting Chronicle Backend..." # Function to handle shutdown shutdown() { diff --git a/backends/advanced/tests/test_conversation_models.py b/backends/advanced/tests/test_conversation_models.py index 197fddee..e4387c89 100644 --- a/backends/advanced/tests/test_conversation_models.py +++ b/backends/advanced/tests/test_conversation_models.py @@ -92,7 +92,7 @@ def test_memory_version_model(self): version_id="mem-v1", memory_count=5, transcript_version_id="trans-v1", - provider=MemoryProvider.FRIEND_LITE, + provider=MemoryProvider.CHRONICLE, model="gpt-4o-mini", created_at=datetime.now(), processing_time_seconds=45.2, @@ -102,7 +102,7 @@ def test_memory_version_model(self): assert version.version_id == "mem-v1" assert version.memory_count == 5 assert version.transcript_version_id == "trans-v1" - assert version.provider == MemoryProvider.FRIEND_LITE + assert version.provider == MemoryProvider.CHRONICLE assert version.model == "gpt-4o-mini" assert version.processing_time_seconds == 45.2 assert version.metadata["extraction_quality"] == "high" @@ -151,7 +151,7 @@ def test_add_memory_version(self): version_id="m1", memory_count=3, transcript_version_id="v1", - provider=MemoryProvider.FRIEND_LITE, + provider=MemoryProvider.CHRONICLE, model="gpt-4o-mini", processing_time_seconds=30.0 ) @@ -198,7 +198,7 @@ def test_active_version_properties(self): # Add versions segments = [SpeakerSegment(start=0.0, end=5.0, text="Test", speaker="Speaker A")] conversation.add_transcript_version("v1", "Test", segments, TranscriptProvider.DEEPGRAM) - conversation.add_memory_version("m1", 2, "v1", MemoryProvider.FRIEND_LITE) + conversation.add_memory_version("m1", 2, "v1", MemoryProvider.CHRONICLE) # Should return active versions active_transcript = conversation.active_transcript @@ -217,7 +217,7 @@ def test_provider_enums(self): assert TranscriptProvider.PARAKEET == "parakeet" # Test MemoryProvider enum - assert MemoryProvider.FRIEND_LITE == "friend_lite" + assert MemoryProvider.CHRONICLE == "chronicle" assert MemoryProvider.OPENMEMORY_MCP == "openmemory_mcp" def test_conversation_model_dump(self): @@ -227,7 +227,7 @@ def test_conversation_model_dump(self): # Add some versions segments = [SpeakerSegment(start=0.0, end=5.0, text="Test", speaker="Speaker A")] conversation.add_transcript_version("v1", "Test", segments, TranscriptProvider.DEEPGRAM) - conversation.add_memory_version("m1", 2, "v1", MemoryProvider.FRIEND_LITE) + conversation.add_memory_version("m1", 2, "v1", MemoryProvider.CHRONICLE) # Test model_dump() works conv_dict = conversation.model_dump() diff --git a/backends/advanced/tests/test_integration.py b/backends/advanced/tests/test_integration.py index a4422d4c..a8086d1b 100644 --- a/backends/advanced/tests/test_integration.py +++ b/backends/advanced/tests/test_integration.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -End-to-end integration test for Friend-Lite backend with unified transcription support. +End-to-end integration test for Chronicle backend with unified transcription support. This test validates the complete audio processing pipeline using isolated test environment: 1. Service startup with docker-compose-test.yml (isolated ports and databases) @@ -954,7 +954,7 @@ def validate_memory_extraction(self, upload_response: dict): client_memories = self.wait_for_memory_processing(memory_job_id, client_id) # Check if we're using OpenMemory MCP provider - memory_provider = os.environ.get("MEMORY_PROVIDER", "friend_lite") + memory_provider = os.environ.get("MEMORY_PROVIDER", "chronicle") if not client_memories: if memory_provider == "openmemory_mcp": diff --git a/backends/advanced/upload_files.py b/backends/advanced/upload_files.py index 44ca0e26..ead58e74 100755 --- a/backends/advanced/upload_files.py +++ b/backends/advanced/upload_files.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Upload audio files to the Friend-Lite backend for processing. +Upload audio files to the Chronicle backend for processing. """ import argparse @@ -376,7 +376,7 @@ def poll_job_status(job_id: str, token: str, base_url: str, total_files: int) -> def parse_args(): """Parse command line arguments.""" - parser = argparse.ArgumentParser(description="Upload audio files to Friend-Lite backend") + parser = argparse.ArgumentParser(description="Upload audio files to Chronicle backend") parser.add_argument( "files", nargs="*", @@ -394,7 +394,7 @@ def main(): """Main function to orchestrate the upload process.""" args = parse_args() - logger.info("Friend-Lite Audio File Upload Tool") + logger.info("Chronicle Audio File Upload Tool") logger.info("=" * 40) # Load environment variables diff --git a/backends/advanced/webui/README.md b/backends/advanced/webui/README.md index f093f66b..303b2780 100644 --- a/backends/advanced/webui/README.md +++ b/backends/advanced/webui/README.md @@ -1,6 +1,6 @@ -# Friend-Lite Web Dashboard +# Chronicle Web Dashboard -A modern React-based web interface for the Friend-Lite AI-powered personal audio system. +A modern React-based web interface for the Chronicle AI-powered personal audio system. ## Features diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 39cfba6b..4582a222 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -1,11 +1,11 @@ { - "name": "friend-lite-webui", + "name": "chronicle-webui", "version": "0.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "friend-lite-webui", + "name": "chronicle-webui", "version": "0.1.0", "dependencies": { "axios": "^1.6.2", @@ -20,7 +20,6 @@ }, "devDependencies": { "@types/d3": "^7.4.3", - "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -32,7 +31,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.83.0", + "sass-embedded": "^1.80.7", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" @@ -1991,13 +1990,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/frappe-gantt": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", - "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", @@ -5252,9 +5244,9 @@ "license": "MIT" }, "node_modules/sass": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.95.1.tgz", - "integrity": "sha512-uPoDh5NIEZV4Dp5GBodkmNY9tSQfXY02pmCcUo+FR1P+x953HGkpw+vV28D4IqYB6f8webZtwoSaZaiPtpTeMg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.93.3.tgz", + "integrity": "sha512-elOcIZRTM76dvxNAjqYrucTSI0teAF/L2Lv0s6f6b7FOwcwIuA357bIE871580AjHJuSvLIRUosgV+lIWx6Rgg==", "dev": true, "license": "MIT", "optional": true, @@ -5274,9 +5266,9 @@ } }, "node_modules/sass-embedded": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.95.1.tgz", - "integrity": "sha512-l086+s40Z0qP7ckj4T+rI/7tZcwAfcKCG9ah9A808yINWOxZFv0kO0u/UHhR4G9Aimeyax/JNvqh8RE7z1wngg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.93.3.tgz", + "integrity": "sha512-+VUy01yfDqNmIVMd/LLKl2TTtY0ovZN0rTonh+FhKr65mFwIYgU9WzgIZKS7U9/SPCQvWTsTGx9jyt+qRm/XFw==", "dev": true, "license": "MIT", "dependencies": { @@ -5296,30 +5288,30 @@ "node": ">=16.0.0" }, "optionalDependencies": { - "sass-embedded-all-unknown": "1.95.1", - "sass-embedded-android-arm": "1.95.1", - "sass-embedded-android-arm64": "1.95.1", - "sass-embedded-android-riscv64": "1.95.1", - "sass-embedded-android-x64": "1.95.1", - "sass-embedded-darwin-arm64": "1.95.1", - "sass-embedded-darwin-x64": "1.95.1", - "sass-embedded-linux-arm": "1.95.1", - "sass-embedded-linux-arm64": "1.95.1", - "sass-embedded-linux-musl-arm": "1.95.1", - "sass-embedded-linux-musl-arm64": "1.95.1", - "sass-embedded-linux-musl-riscv64": "1.95.1", - "sass-embedded-linux-musl-x64": "1.95.1", - "sass-embedded-linux-riscv64": "1.95.1", - "sass-embedded-linux-x64": "1.95.1", - "sass-embedded-unknown-all": "1.95.1", - "sass-embedded-win32-arm64": "1.95.1", - "sass-embedded-win32-x64": "1.95.1" + "sass-embedded-all-unknown": "1.93.3", + "sass-embedded-android-arm": "1.93.3", + "sass-embedded-android-arm64": "1.93.3", + "sass-embedded-android-riscv64": "1.93.3", + "sass-embedded-android-x64": "1.93.3", + "sass-embedded-darwin-arm64": "1.93.3", + "sass-embedded-darwin-x64": "1.93.3", + "sass-embedded-linux-arm": "1.93.3", + "sass-embedded-linux-arm64": "1.93.3", + "sass-embedded-linux-musl-arm": "1.93.3", + "sass-embedded-linux-musl-arm64": "1.93.3", + "sass-embedded-linux-musl-riscv64": "1.93.3", + "sass-embedded-linux-musl-x64": "1.93.3", + "sass-embedded-linux-riscv64": "1.93.3", + "sass-embedded-linux-x64": "1.93.3", + "sass-embedded-unknown-all": "1.93.3", + "sass-embedded-win32-arm64": "1.93.3", + "sass-embedded-win32-x64": "1.93.3" } }, "node_modules/sass-embedded-all-unknown": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.95.1.tgz", - "integrity": "sha512-ObGM3xSHEK2fu89GusvAdk1hId3D1R03CyQ6/AVTFSrcBFav1a3aWUmBWtImzf5LsVzliRnlAPPS6+rT/Ghb1A==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.93.3.tgz", + "integrity": "sha512-3okGgnE41eg+CPLtAPletu6nQ4N0ij7AeW+Sl5Km4j29XcmqZQeFwYjHe1AlKTEgLi/UAONk1O8i8/lupeKMbw==", "cpu": [ "!arm", "!arm64", @@ -5330,13 +5322,13 @@ "license": "MIT", "optional": true, "dependencies": { - "sass": "1.95.1" + "sass": "1.93.3" } }, "node_modules/sass-embedded-android-arm": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.95.1.tgz", - "integrity": "sha512-siaN1TVEjhBP4QJ5UlDBRhyKmMbFhbdcyHj0B4hIuNcinuVprP6tH1NT0NkHvkXh2egBmTvjzZgJ1ySsCB32JA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.93.3.tgz", + "integrity": "sha512-8xOw9bywfOD6Wv24BgCmgjkk6tMrsOTTHcb28KDxeJtFtoxiUyMbxo0vChpPAfp2Hyg2tFFKS60s0s4JYk+Raw==", "cpu": [ "arm" ], @@ -5351,9 +5343,9 @@ } }, "node_modules/sass-embedded-android-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.95.1.tgz", - "integrity": "sha512-E+3vZXhUOVHFiSITH2g53/ynxTG4zz8vTVrXGAKkZQwSe6aCO22uc1Pah23F3jOrDNF/YLrsyp82T/CIIczK3w==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.93.3.tgz", + "integrity": "sha512-uqUl3Kt1IqdGVAcAdbmC+NwuUJy8tM+2ZnB7/zrt6WxWVShVCRdFnWR9LT8HJr7eJN7AU8kSXxaVX/gedanPsg==", "cpu": [ "arm64" ], @@ -5368,9 +5360,9 @@ } }, "node_modules/sass-embedded-android-riscv64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.95.1.tgz", - "integrity": "sha512-UcPcr5JXVtInD+/XE+2DhwPsALUdRAHyippnnAP6MtdaT3+AnqqvzSVy9Gb6SKyeqEk4YxPmIlQpZCVODDT4eA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.93.3.tgz", + "integrity": "sha512-2jNJDmo+3qLocjWqYbXiBDnfgwrUeZgZFHJIwAefU7Fn66Ot7rsXl+XPwlokaCbTpj7eMFIqsRAZ/uDueXNCJg==", "cpu": [ "riscv64" ], @@ -5385,9 +5377,9 @@ } }, "node_modules/sass-embedded-android-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.95.1.tgz", - "integrity": "sha512-sW/TO+B0Wq9VDTa7YiO74DW4iF9jEYds+9yslaHtc69r/Ch+Zj+ZB6HeJysfmen91zn5CLJDGrnTSrIk+/COfQ==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.93.3.tgz", + "integrity": "sha512-y0RoAU6ZenQFcjM9PjQd3cRqRTjqwSbtWLL/p68y2oFyh0QGN0+LQ826fc0ZvU/AbqCsAizkqjzOn6cRZJxTTQ==", "cpu": [ "x64" ], @@ -5402,9 +5394,9 @@ } }, "node_modules/sass-embedded-darwin-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.95.1.tgz", - "integrity": "sha512-SWTCwszlBzjin35T2OiGZSDRbC/sqg5Mjepih18lelELrz14eB9LcFTZeiqDfdnwx6qQqPWj2VufCpExr8jElA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.93.3.tgz", + "integrity": "sha512-7zb/hpdMOdKteK17BOyyypemglVURd1Hdz6QGsggy60aUFfptTLQftLRg8r/xh1RbQAUKWFbYTNaM47J9yPxYg==", "cpu": [ "arm64" ], @@ -5419,9 +5411,9 @@ } }, "node_modules/sass-embedded-darwin-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.95.1.tgz", - "integrity": "sha512-0GZEgkE1e8E2h97lUtwgZbKHrJYmRE/KhWQBHv6ZueAto8DJcAFNFrIQiQoRJjraE6QTaw6ahSvc1YJ7gL4OQA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.93.3.tgz", + "integrity": "sha512-Ek1Vp8ZDQEe327Lz0b7h3hjvWH3u9XjJiQzveq74RPpJQ2q6d9LfWpjiRRohM4qK6o4XOHw1X10OMWPXJtdtWg==", "cpu": [ "x64" ], @@ -5436,9 +5428,9 @@ } }, "node_modules/sass-embedded-linux-arm": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.95.1.tgz", - "integrity": "sha512-zUAm/rztm5Uyy+DSs408VJg404siVgUuZyqId4tFwkPNC5WRKu25Z8bFMriyGaE4YfEqbNwFV07C16mJoGeVOA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.93.3.tgz", + "integrity": "sha512-yeiv2y+dp8B4wNpd3+JsHYD0mvpXSfov7IGyQ1tMIR40qv+ROkRqYiqQvAOXf76Qwh4Y9OaYZtLpnsPjfeq6mA==", "cpu": [ "arm" ], @@ -5453,9 +5445,9 @@ } }, "node_modules/sass-embedded-linux-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.95.1.tgz", - "integrity": "sha512-MQxa+qVX7Os2rMpJ/AvhWup+1cS0JieQgCfi9cz1Zckn4zaUhg35+m2FQhfKvzv4afeW5bubTMOQeTRMQujbXw==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.93.3.tgz", + "integrity": "sha512-RBrHWgfd8Dd8w4fbmdRVXRrhh8oBAPyeWDTKAWw8ZEmuXfVl4ytjDuyxaVilh6rR1xTRTNpbaA/YWApBlLrrNw==", "cpu": [ "arm64" ], @@ -5470,9 +5462,9 @@ } }, "node_modules/sass-embedded-linux-musl-arm": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.95.1.tgz", - "integrity": "sha512-gNdaGmM3nZ0jkFNmyXWyNlXZPdaMP+7n5Mk3yGFGShqRt/6T/bHh5SkyNnU2ZdP1z7R9poPItJhULrZJ42ETeA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.93.3.tgz", + "integrity": "sha512-fU0fwAwbp7sBE3h5DVU5UPzvaLg7a4yONfFWkkcCp6ZrOiPuGRHXXYriWQ0TUnWy4wE+svsVuWhwWgvlb/tkKg==", "cpu": [ "arm" ], @@ -5487,9 +5479,9 @@ } }, "node_modules/sass-embedded-linux-musl-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.95.1.tgz", - "integrity": "sha512-8lD5vHGzBjBRCMIr9CXCyjmy8Q1q+H4ygcYCIm/aPNYhrm9uPOzJfs8hv9kDRgRAASFkcPGlFw8tDH4QqiJ5wg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.93.3.tgz", + "integrity": "sha512-PS829l+eUng+9W4PFclXGb4uA2+965NHV3/Sa5U7qTywjeeUUYTZg70dJHSqvhrBEfCc2XJABeW3adLJbyQYkw==", "cpu": [ "arm64" ], @@ -5504,9 +5496,9 @@ } }, "node_modules/sass-embedded-linux-musl-riscv64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.95.1.tgz", - "integrity": "sha512-WjKfHxnFc/jOL5QtmgYuiWCc4616V15DkpE+7z41JWEawRXku6w++w7AR+Zx/jbz93FZ/AsZp27IS3XUt80u3Q==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.93.3.tgz", + "integrity": "sha512-cK1oBY+FWQquaIGEeQ5H74KTO8cWsSWwXb/WaildOO9U6wmUypTgUYKQ0o5o/29nZbWWlM1PHuwVYTSnT23Jjg==", "cpu": [ "riscv64" ], @@ -5521,9 +5513,9 @@ } }, "node_modules/sass-embedded-linux-musl-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.95.1.tgz", - "integrity": "sha512-3U6994SRUUmC8mPvSG/vNLUo2ZcGv3jHuPoBywTbJhGQI8gq0hef1MY8TU5mvtj9DhQYlah6MYktM4YrOQgqcQ==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.93.3.tgz", + "integrity": "sha512-A7wkrsHu2/I4Zpa0NMuPGkWDVV7QGGytxGyUq3opSXgAexHo/vBPlGoDXoRlSdex0cV+aTMRPjoGIfdmNlHwyg==", "cpu": [ "x64" ], @@ -5538,9 +5530,9 @@ } }, "node_modules/sass-embedded-linux-riscv64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.95.1.tgz", - "integrity": "sha512-CJ0tEEQnfpJEMCQrdubLsmuVc/c66EgaCAO0ZgSJ/KpxBKF3O1lHN6e1UErRf6VO0rh8ExAOh75po12Vu849Og==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.93.3.tgz", + "integrity": "sha512-vWkW1+HTF5qcaHa6hO80gx/QfB6GGjJUP0xLbnAoY4pwEnw5ulGv6RM8qYr8IDhWfVt/KH+lhJ2ZFxnJareisQ==", "cpu": [ "riscv64" ], @@ -5555,9 +5547,9 @@ } }, "node_modules/sass-embedded-linux-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.95.1.tgz", - "integrity": "sha512-nGnzrEpZZOsGOwrRVyX4t15M8ijZWhc4e4lLpOqaPm+lv23HFncfY05WxU5bRj0KAknrkeTM2IX/6veP2aeUdA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.93.3.tgz", + "integrity": "sha512-k6uFxs+e5jSuk1Y0niCwuq42F9ZC5UEP7P+RIOurIm8w/5QFa0+YqeW+BPWEW5M1FqVOsNZH3qGn4ahqvAEjPA==", "cpu": [ "x64" ], @@ -5572,9 +5564,9 @@ } }, "node_modules/sass-embedded-unknown-all": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.95.1.tgz", - "integrity": "sha512-bhywAcadVQoCotD4gVmyMBi2SENPvyLFPrXf33VK5mY487Nf/g5SgGCUuGmfTsbns4NBwbwR7PA/1fnJmeMtdA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.93.3.tgz", + "integrity": "sha512-o5wj2rLpXH0C+GJKt/VpWp6AnMsCCbfFmnMAttcrsa+U3yrs/guhZ3x55KAqqUsE8F47e3frbsDL+1OuQM5DAA==", "dev": true, "license": "MIT", "optional": true, @@ -5585,13 +5577,13 @@ "!win32" ], "dependencies": { - "sass": "1.95.1" + "sass": "1.93.3" } }, "node_modules/sass-embedded-win32-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.95.1.tgz", - "integrity": "sha512-RWWODCthWdMVODoq98lyIk9R56mgGJ4TFUjD9LSCe7fAYD/tiTkUabE4AUzkZqknQSYr0n0Q2uy7POSDIKvhVg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.93.3.tgz", + "integrity": "sha512-0dOfT9moy9YmBolodwYYXtLwNr4jL4HQC9rBfv6mVrD7ud8ue2kDbn+GVzj1hEJxvEexVSmDCf7MHUTLcGs9xQ==", "cpu": [ "arm64" ], @@ -5606,9 +5598,9 @@ } }, "node_modules/sass-embedded-win32-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.95.1.tgz", - "integrity": "sha512-jotHgOQnCb1XdjK0fhsyuhsfox7Y5EkrOc4h2caEpRcNCnsPTBZHqhuc8Lnw8HbKIhwKYkqWhexkjgz62MShhg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.93.3.tgz", + "integrity": "sha512-wHFVfxiS9hU/sNk7KReD+lJWRp3R0SLQEX4zfOnRP2zlvI2X4IQR5aZr9GNcuMP6TmNpX0nQPZTegS8+h9RrEg==", "cpu": [ "x64" ], diff --git a/backends/advanced/webui/package.json b/backends/advanced/webui/package.json index 120bbe9c..aa33d177 100644 --- a/backends/advanced/webui/package.json +++ b/backends/advanced/webui/package.json @@ -1,5 +1,5 @@ { - "name": "friend-lite-webui", + "name": "chronicle-webui", "private": true, "version": "0.1.0", "type": "module", @@ -34,7 +34,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.83.0", + "sass-embedded": "^1.80.7", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" diff --git a/backends/advanced/webui/src/components/layout/Layout.tsx b/backends/advanced/webui/src/components/layout/Layout.tsx index f4caf629..5995f823 100644 --- a/backends/advanced/webui/src/components/layout/Layout.tsx +++ b/backends/advanced/webui/src/components/layout/Layout.tsx @@ -31,7 +31,7 @@ export default function Layout() {

- Friend-Lite Dashboard + Chronicle Dashboard

@@ -102,7 +102,7 @@ export default function Layout() {
- ๐ŸŽต Friend-Lite Dashboard v1.0 | AI-powered personal audio system + ๐ŸŽต Chronicle Dashboard v1.0 | AI-powered personal audio system
diff --git a/backends/advanced/webui/src/pages/LoginPage.tsx b/backends/advanced/webui/src/pages/LoginPage.tsx index 717bb61d..7093e73a 100644 --- a/backends/advanced/webui/src/pages/LoginPage.tsx +++ b/backends/advanced/webui/src/pages/LoginPage.tsx @@ -58,7 +58,7 @@ export default function LoginPage() {

- Friend-Lite Dashboard + Chronicle Dashboard

Sign in to your account diff --git a/backends/advanced/webui/src/pages/Memories.tsx b/backends/advanced/webui/src/pages/Memories.tsx index 0c4973b6..732d1683 100644 --- a/backends/advanced/webui/src/pages/Memories.tsx +++ b/backends/advanced/webui/src/pages/Memories.tsx @@ -258,7 +258,7 @@ export default function Memories() { {memoryProvider && (

- Provider: {memoryProvider === 'friend_lite' ? 'Friend-Lite' : memoryProvider === 'openmemory_mcp' ? 'OpenMemory MCP' : memoryProvider} + Provider: {memoryProvider === 'chronicle' ? 'Chronicle' : memoryProvider === 'openmemory_mcp' ? 'OpenMemory MCP' : memoryProvider}

)}
@@ -313,7 +313,7 @@ export default function Memories() { - {/* Initial Search Threshold Slider - Show for Friend-Lite provider */} + {/* Initial Search Threshold Slider - Show for Chronicle provider */} {memoryProviderSupportsThreshold && (
diff --git a/backends/advanced/webui/src/pages/MemoriesRouter.tsx b/backends/advanced/webui/src/pages/MemoriesRouter.tsx index b39663f9..fe6285e9 100644 --- a/backends/advanced/webui/src/pages/MemoriesRouter.tsx +++ b/backends/advanced/webui/src/pages/MemoriesRouter.tsx @@ -4,7 +4,7 @@ import Memories from './Memories' /** * Memories page wrapper that stores JWT for cross-origin Mycelia access. - * Always displays Friend-Lite native Memories component (backend proxies to provider). + * Always displays Chronicle native Memories component (backend proxies to provider). */ export default function MemoriesRouter() { const { token } = useAuth() @@ -17,6 +17,6 @@ export default function MemoriesRouter() { }, [token]) // Always show the native Memories page (works for all providers) - // Friend-Lite backend will proxy to Mycelia when needed + // Chronicle backend will proxy to Mycelia when needed return } diff --git a/backends/advanced/webui/src/pages/System.tsx b/backends/advanced/webui/src/pages/System.tsx index c722ada9..5c52e057 100644 --- a/backends/advanced/webui/src/pages/System.tsx +++ b/backends/advanced/webui/src/pages/System.tsx @@ -359,7 +359,7 @@ export default function System() { > {availableProviders.map((provider) => ( diff --git a/backends/advanced/webui/tsconfig.json b/backends/advanced/webui/tsconfig.json index 7a7611e4..7355a7c8 100644 --- a/backends/advanced/webui/tsconfig.json +++ b/backends/advanced/webui/tsconfig.json @@ -16,6 +16,7 @@ /* Linting */ "strict": true, + "noImplicitAny": false, "noUnusedLocals": true, "noUnusedParameters": true, "noFallthroughCasesInSwitch": true diff --git a/backends/charts/advanced-backend/Chart.yaml b/backends/charts/advanced-backend/Chart.yaml index 01aad364..c70e0509 100644 --- a/backends/charts/advanced-backend/Chart.yaml +++ b/backends/charts/advanced-backend/Chart.yaml @@ -1,10 +1,10 @@ apiVersion: v2 name: advanced-backend -description: Friend-lite Advanced Backend Service +description: Chronicle Advanced Backend Service version: 0.1.0 appVersion: "1.0" keywords: - - friend-lite + - chronicle - backend - ai sources: diff --git a/backends/charts/advanced-backend/templates/deployment.yaml b/backends/charts/advanced-backend/templates/deployment.yaml index 4082bd65..0e40a7fb 100644 --- a/backends/charts/advanced-backend/templates/deployment.yaml +++ b/backends/charts/advanced-backend/templates/deployment.yaml @@ -25,9 +25,9 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: # Override specific values from Kubernetes/Helm if needed {{- range $key, $value := .Values.env }} @@ -70,9 +70,9 @@ spec: command: ["./start-workers.sh"] envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: {{- range $key, $value := .Values.env }} - name: {{ $key }} diff --git a/backends/charts/advanced-backend/templates/workers-deployment.yaml b/backends/charts/advanced-backend/templates/workers-deployment.yaml index effcc10d..22751d31 100644 --- a/backends/charts/advanced-backend/templates/workers-deployment.yaml +++ b/backends/charts/advanced-backend/templates/workers-deployment.yaml @@ -24,9 +24,9 @@ spec: command: ["./start-workers.sh"] envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: # Override specific values from Kubernetes/Helm if needed {{- range $key, $value := .Values.env }} diff --git a/backends/charts/advanced-backend/values.yaml b/backends/charts/advanced-backend/values.yaml index 8827a62d..a3a2812f 100644 --- a/backends/charts/advanced-backend/values.yaml +++ b/backends/charts/advanced-backend/values.yaml @@ -55,9 +55,9 @@ ingress: # hosts: defined in ingress-values.yaml and overridden by Skaffold # tls: - # - secretName: friend-lite-tls + # - secretName: chronicle-tls # hosts: - # - friend-lite.192-168-1-42.nip.io + # - chronicle.192-168-1-42.nip.io resources: limits: diff --git a/backends/charts/webui/Chart.yaml b/backends/charts/webui/Chart.yaml index cc3b2d32..869eba76 100644 --- a/backends/charts/webui/Chart.yaml +++ b/backends/charts/webui/Chart.yaml @@ -1,10 +1,10 @@ apiVersion: v2 name: webui -description: Friend-lite WebUI +description: Chronicle WebUI version: 0.1.0 appVersion: "1.0" keywords: - - friend-lite + - chronicle - webui - frontend sources: diff --git a/backends/charts/webui/templates/deployment.yaml b/backends/charts/webui/templates/deployment.yaml index a1358e2e..6d57204c 100644 --- a/backends/charts/webui/templates/deployment.yaml +++ b/backends/charts/webui/templates/deployment.yaml @@ -24,9 +24,9 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: {{- range $key, $value := .Values.env }} - name: {{ $key }} diff --git a/backends/charts/webui/values.yaml b/backends/charts/webui/values.yaml index c56229e4..1dce6ab4 100644 --- a/backends/charts/webui/values.yaml +++ b/backends/charts/webui/values.yaml @@ -38,7 +38,7 @@ ingress: - host: external.example.com # Overridden by Skaffold setValueTemplates paths: *commonPaths # tls: - # - secretName: friend-lite-tls + # - secretName: chronicle-tls # hosts: # - webui.example.com diff --git a/extras/asr-services/README.md b/extras/asr-services/README.md index b235b659..670d33b5 100644 --- a/extras/asr-services/README.md +++ b/extras/asr-services/README.md @@ -1,6 +1,6 @@ # ASR Services -Offline Automatic Speech Recognition (ASR) services for Friend-Lite using the Wyoming protocol. +Offline Automatic Speech Recognition (ASR) services for Chronicle using the Wyoming protocol. ## Overview @@ -100,7 +100,7 @@ services: ## Integration -### With Friend-Lite Backend +### With Chronicle Backend The ASR services integrate as fallback transcription when Deepgram is unavailable: ```bash # Backend configuration diff --git a/extras/asr-services/quickstart.md b/extras/asr-services/quickstart.md index 1fed4c12..994d17bc 100644 --- a/extras/asr-services/quickstart.md +++ b/extras/asr-services/quickstart.md @@ -40,8 +40,8 @@ python client.py --host localhost --port 8765 --audio test.wav python client.py --host localhost --port 8765 --microphone ``` -### Integration with Friend-Lite -Set the offline ASR URI in your Friend-Lite backend: +### Integration with Chronicle +Set the offline ASR URI in your Chronicle backend: ```bash # In your .env file OFFLINE_ASR_TCP_URI=tcp://localhost:8765 @@ -55,7 +55,7 @@ OFFLINE_ASR_TCP_URI=tcp://localhost:8765 ## Next Steps -1. **Configure Backend**: Update Friend-Lite to use offline ASR as fallback +1. **Configure Backend**: Update Chronicle to use offline ASR as fallback 2. **Test Integration**: Verify transcription works when Deepgram is unavailable 3. **Performance Tuning**: Monitor CPU/memory usage and adjust as needed 4. **Production Deploy**: Scale services based on load requirements diff --git a/extras/asr-services/tests/test_parakeet_service.py b/extras/asr-services/tests/test_parakeet_service.py index 0fd3462d..4c94af12 100644 --- a/extras/asr-services/tests/test_parakeet_service.py +++ b/extras/asr-services/tests/test_parakeet_service.py @@ -13,7 +13,7 @@ Run with: # Run the test (service management is automatic) - cd /home/ankush/workspaces/friend-lite/extras/asr-services + cd /home/ankush/workspaces/chronicle/extras/asr-services uv run pytest tests/test_parakeet_service.py -v -s """ diff --git a/extras/havpe-relay/README.md b/extras/havpe-relay/README.md index 5ab061e3..2793b36d 100644 --- a/extras/havpe-relay/README.md +++ b/extras/havpe-relay/README.md @@ -161,4 +161,4 @@ You can test the relay using the provided test listener (if needed): ## License -This project is part of the friend-lite ecosystem. +This project is part of the chronicle ecosystem. diff --git a/extras/havpe-relay/main.py b/extras/havpe-relay/main.py index 4494bb00..eac6d58b 100644 --- a/extras/havpe-relay/main.py +++ b/extras/havpe-relay/main.py @@ -526,7 +526,7 @@ async def main(): # Print startup banner with authentication info logger.info("๐ŸŽต ========================================") - logger.info("๐ŸŽต Friend-Lite HAVPE Relay with Authentication") + logger.info("๐ŸŽต Chronicle HAVPE Relay with Authentication") logger.info("๐ŸŽต ========================================") logger.info(f"๐ŸŽง ESP32 Server: {args.host}:{args.port}") logger.info(f"๐Ÿ“ก Backend API: {BACKEND_URL}") diff --git a/extras/local-omi-bt/connect-omi.py b/extras/local-omi-bt/connect-omi.py index a689bb4b..302a17d7 100644 --- a/extras/local-omi-bt/connect-omi.py +++ b/extras/local-omi-bt/connect-omi.py @@ -10,8 +10,8 @@ from bleak.backends.device import BLEDevice from dotenv import load_dotenv, set_key from easy_audio_interfaces.filesystem import RollingFileSink -from friend_lite.bluetooth import listen_to_omi, print_devices -from friend_lite.decoder import OmiOpusDecoder +from chronicle.bluetooth import listen_to_omi, print_devices +from chronicle.decoder import OmiOpusDecoder from wyoming.audio import AudioChunk # Setup logging @@ -49,7 +49,7 @@ async def as_audio_chunks(it) -> AsyncGenerator[AudioChunk, None]: async for data in it: yield AudioChunk(audio=data, rate=16000, width=2, channels=1) -# Add this to friend-lite sdk +# Add this to chronicle sdk async def list_devices(prefix: str = "OMI") -> list[BLEDevice]: devices = await BleakScanner.discover() filtered_devices = [] diff --git a/extras/openmemory-mcp/README.md b/extras/openmemory-mcp/README.md index 82d033e0..940a33e5 100644 --- a/extras/openmemory-mcp/README.md +++ b/extras/openmemory-mcp/README.md @@ -1,6 +1,6 @@ # OpenMemory MCP Service -This directory contains a local deployment of the OpenMemory MCP (Model Context Protocol) server, which can be used as an alternative memory provider for Friend-Lite. +This directory contains a local deployment of the OpenMemory MCP (Model Context Protocol) server, which can be used as an alternative memory provider for Chronicle. ## What is OpenMemory MCP? @@ -30,9 +30,9 @@ cp .env.template .env ./run.sh --with-ui ``` -### 3. Configure Friend-Lite +### 3. Configure Chronicle -In your Friend-Lite backend `.env` file: +In your Chronicle backend `.env` file: ```bash # Use OpenMemory MCP instead of built-in memory processing @@ -52,7 +52,7 @@ The deployment includes: 2. **Qdrant Vector Database** (port 6334) - Stores memory embeddings - Enables semantic search - - Isolated from main Friend-Lite Qdrant + - Isolated from main Chronicle Qdrant 3. **OpenMemory UI** (port 3001, optional) - Web interface for memory management @@ -69,16 +69,16 @@ The deployment includes: - **UI** (if enabled): http://localhost:3001 -## How It Works with Friend-Lite +## How It Works with Chronicle -When configured with `MEMORY_PROVIDER=openmemory_mcp`, Friend-Lite will: +When configured with `MEMORY_PROVIDER=openmemory_mcp`, Chronicle will: 1. Send raw conversation transcripts to OpenMemory MCP 2. OpenMemory extracts memories using OpenAI 3. Memories are stored in the dedicated Qdrant instance -4. Friend-Lite can search memories via the MCP protocol +4. Chronicle can search memories via the MCP protocol -This replaces Friend-Lite's built-in memory processing with OpenMemory's implementation. +This replaces Chronicle's built-in memory processing with OpenMemory's implementation. ## Managing Services @@ -98,7 +98,7 @@ docker compose restart ## Testing -### Standalone Test (No Friend-Lite Dependencies) +### Standalone Test (No Chronicle Dependencies) Test the OpenMemory MCP server directly: @@ -117,9 +117,9 @@ This test verifies: - Memory deletion - MCP protocol endpoints -### Integration Test (With Friend-Lite) +### Integration Test (With Chronicle) -Test the integration between Friend-Lite and OpenMemory MCP: +Test the integration between Chronicle and OpenMemory MCP: ```bash # From backends/advanced directory @@ -134,7 +134,7 @@ This test verifies: - MCP client functionality - OpenMemoryMCPService implementation - Service factory integration -- Memory operations through Friend-Lite interface +- Memory operations through Chronicle interface ## Troubleshooting @@ -143,35 +143,35 @@ This test verifies: If ports are already in use, edit `docker-compose.yml`: - Change `8765:8765` to another port for MCP server - Change `6334:6333` to another port for Qdrant -- Update Friend-Lite's `OPENMEMORY_MCP_URL` accordingly +- Update Chronicle's `OPENMEMORY_MCP_URL` accordingly ### Memory Not Working 1. Check OpenMemory logs: `docker compose logs openmemory-mcp` 2. Verify OPENAI_API_KEY is set correctly -3. Ensure Friend-Lite backend is configured with correct URL +3. Ensure Chronicle backend is configured with correct URL 4. Test MCP endpoint: `curl http://localhost:8765/api/v1/memories?user_id=test` ### Connection Issues -- Ensure containers are on same network if running Friend-Lite in Docker +- Ensure containers are on same network if running Chronicle in Docker - Use `host.docker.internal` instead of `localhost` when connecting from Docker containers ## Advanced Configuration ### Using with Docker Network -If Friend-Lite backend is also running in Docker: +If Chronicle backend is also running in Docker: ```yaml -# In Friend-Lite docker-compose.yml +# In Chronicle docker-compose.yml networks: default: external: name: openmemory-mcp_openmemory-network ``` -Then use container names in Friend-Lite .env: +Then use container names in Chronicle .env: ```bash OPENMEMORY_MCP_URL=http://openmemory-mcp:8765 ``` @@ -184,4 +184,4 @@ OpenMemory uses OpenAI by default. To use different models, you would need to mo - [OpenMemory Documentation](https://docs.mem0.ai/open-memory/introduction) - [MCP Protocol Spec](https://github.com/mem0ai/mem0/tree/main/openmemory) -- [Friend-Lite Memory Docs](../../backends/advanced/MEMORY_PROVIDERS.md) \ No newline at end of file +- [Chronicle Memory Docs](../../backends/advanced/MEMORY_PROVIDERS.md) \ No newline at end of file diff --git a/extras/openmemory-mcp/run.sh b/extras/openmemory-mcp/run.sh index 1cc0bf21..1092207a 100755 --- a/extras/openmemory-mcp/run.sh +++ b/extras/openmemory-mcp/run.sh @@ -2,7 +2,7 @@ set -e -echo "๐Ÿš€ Starting OpenMemory MCP installation for Friend-Lite..." +echo "๐Ÿš€ Starting OpenMemory MCP installation for Chronicle..." # Set environment variables OPENAI_API_KEY="${OPENAI_API_KEY:-}" @@ -64,9 +64,9 @@ if docker ps | grep -q openmemory-mcp; then curl -s http://localhost:8765/openapi.json | jq '.paths | keys[]' fi echo "" - echo "๐Ÿ“š Integration with Friend-Lite:" - echo " Set MEMORY_PROVIDER=openmemory_mcp in your Friend-Lite .env" - echo " Set OPENMEMORY_MCP_URL=http://localhost:8765 in your Friend-Lite .env" + echo "๐Ÿ“š Integration with Chronicle:" + echo " Set MEMORY_PROVIDER=openmemory_mcp in your Chronicle .env" + echo " Set OPENMEMORY_MCP_URL=http://localhost:8765 in your Chronicle .env" echo "" echo "๐Ÿ” Check logs: docker compose logs -f" echo "๐Ÿ›‘ Stop services: docker compose down" diff --git a/extras/openmemory-mcp/test_standalone.py b/extras/openmemory-mcp/test_standalone.py index 58f011a4..08720f4e 100755 --- a/extras/openmemory-mcp/test_standalone.py +++ b/extras/openmemory-mcp/test_standalone.py @@ -2,7 +2,7 @@ """Standalone test script for OpenMemory MCP server. This script tests the OpenMemory MCP server directly using its REST API, -without any dependencies on Friend-Lite backend code. +without any dependencies on Chronicle backend code. """ import asyncio diff --git a/extras/speaker-omni-experimental/README.md b/extras/speaker-omni-experimental/README.md index 0f0d34c7..5c7e3a30 100644 --- a/extras/speaker-omni-experimental/README.md +++ b/extras/speaker-omni-experimental/README.md @@ -355,7 +355,7 @@ Approximate processing times (7B model on RTX 4090): ## ๐Ÿ”ฎ Integration Path -This experimental system can be integrated with the existing Friend-Lite backend: +This experimental system can be integrated with the existing Chronicle backend: 1. **Standalone Testing**: Use this directory for initial family testing 2. **API Wrapper**: Create FastAPI endpoint similar to traditional speaker service @@ -372,7 +372,7 @@ This experimental system can be integrated with the existing Friend-Lite backend ## ๐Ÿ”— Related Files - `../speaker-recognition/`: Traditional PyAnnote-based system -- `../../backends/advanced-backend/`: Main Friend-Lite backend +- `../../backends/advanced-backend/`: Main Chronicle backend - `../../extras/test-audios/`: Sample audio files for testing ## ๐Ÿ“ Development Notes @@ -394,4 +394,4 @@ This is an experimental system. Feedback and improvements welcome: ## ๐Ÿ“„ License -Part of the Friend-Lite project. See main repository license. \ No newline at end of file +Part of the Chronicle project. See main repository license. \ No newline at end of file diff --git a/extras/speaker-recognition/charts/templates/speaker-deployment.yaml b/extras/speaker-recognition/charts/templates/speaker-deployment.yaml index 94417297..d77f6204 100644 --- a/extras/speaker-recognition/charts/templates/speaker-deployment.yaml +++ b/extras/speaker-recognition/charts/templates/speaker-deployment.yaml @@ -34,7 +34,7 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: name: {{ .Values.secrets.name | default (printf "%s-secrets" .Release.Name) }} env: diff --git a/extras/speaker-recognition/charts/templates/webui-deployment.yaml b/extras/speaker-recognition/charts/templates/webui-deployment.yaml index aca7f872..ab8ba3e5 100644 --- a/extras/speaker-recognition/charts/templates/webui-deployment.yaml +++ b/extras/speaker-recognition/charts/templates/webui-deployment.yaml @@ -28,9 +28,9 @@ spec: protocol: TCP envFrom: - configMapRef: - name: friend-lite-config + name: chronicle-config - secretRef: - name: friend-lite-secrets + name: chronicle-secrets env: {{- range $key, $value := .Values.webui.env }} - name: {{ $key }} diff --git a/extras/speaker-recognition/charts/values.yaml b/extras/speaker-recognition/charts/values.yaml index a1ea8f34..afccf104 100644 --- a/extras/speaker-recognition/charts/values.yaml +++ b/extras/speaker-recognition/charts/values.yaml @@ -94,6 +94,6 @@ secrets: hfToken: "" deepgramApiKey: "" # Fixed secret name to prevent regeneration - uses existing secret from Makefile - name: "friend-lite-secrets" + name: "chronicle-secrets" # Don't create the secret, use existing one from Makefile create: false \ No newline at end of file diff --git a/extras/speaker-recognition/init.py b/extras/speaker-recognition/init.py index 8d1dd547..8267e35b 100755 --- a/extras/speaker-recognition/init.py +++ b/extras/speaker-recognition/init.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Speaker Recognition Setup Script +Chronicle Speaker Recognition Setup Script Interactive configuration for speaker recognition service """ diff --git a/extras/speaker-recognition/quickstart.md b/extras/speaker-recognition/quickstart.md index c594069a..47a99f56 100644 --- a/extras/speaker-recognition/quickstart.md +++ b/extras/speaker-recognition/quickstart.md @@ -47,7 +47,7 @@ docker compose up --build -d **Important**: Accept the SSL certificate warning when prompted. Self-signed certificates are used for local development. -**Note**: Speaker Recognition runs on port 8444 (HTTPS) and 8081 (HTTP) to avoid conflicts with the main Friend-Lite backend which uses the standard ports 443/80. +**Note**: Speaker Recognition runs on port 8444 (HTTPS) and 8081 (HTTP) to avoid conflicts with the main Chronicle backend which uses the standard ports 443/80. **Need to customize other settings?** Copy `.env.template` to `.env` and modify: ```bash diff --git a/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py b/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py index bb656b8f..c2893e0f 100644 --- a/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py +++ b/extras/speaker-recognition/src/simple_speaker_recognition/__init__.py @@ -18,7 +18,7 @@ """ __version__ = "0.1.0" -__author__ = "Friend-Lite Team" +__author__ = "Chronicle Team" # Import core classes for convenience from .core.audio_backend import AudioBackend diff --git a/extras/speaker-recognition/ssl/generate-ssl.sh b/extras/speaker-recognition/ssl/generate-ssl.sh index c1e832c5..6ef71bd8 100755 --- a/extras/speaker-recognition/ssl/generate-ssl.sh +++ b/extras/speaker-recognition/ssl/generate-ssl.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -# Generate self-signed SSL certificate for Friend-Lite Advanced Backend +# Generate self-signed SSL certificate for Chronicle Advanced Backend # Supports localhost, IP addresses, and domain names SERVER_ADDRESS="$1" diff --git a/extras/speaker-recognition/tests/test_speaker_service_integration.py b/extras/speaker-recognition/tests/test_speaker_service_integration.py index 5d8872da..58e55b61 100644 --- a/extras/speaker-recognition/tests/test_speaker_service_integration.py +++ b/extras/speaker-recognition/tests/test_speaker_service_integration.py @@ -34,7 +34,7 @@ CLEANUP_CONTAINERS = os.environ.get("CLEANUP_CONTAINERS", "true").lower() == "true" REBUILD = os.environ.get("REBUILD", "false").lower() == "true" -REPO_ROOT = Path(__file__).resolve().parents[3] # Go up to friend-lite root +REPO_ROOT = Path(__file__).resolve().parents[3] # Go up to chronicle root SPEAKER_DIR = REPO_ROOT / "extras" / "speaker-recognition" TEST_ASSETS_DIR = SPEAKER_DIR / "tests" / "assets" diff --git a/k8s-manifests/cross-namespace-rbac.yaml b/k8s-manifests/cross-namespace-rbac.yaml index 6beb54c6..632cbf2f 100644 --- a/k8s-manifests/cross-namespace-rbac.yaml +++ b/k8s-manifests/cross-namespace-rbac.yaml @@ -6,16 +6,16 @@ metadata: name: speech-config-reader namespace: speech --- -# Role in friend-lite namespace to read ConfigMap/Secret +# Role in chronicle namespace to read ConfigMap/Secret apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: config-reader - namespace: friend-lite + namespace: chronicle rules: - apiGroups: [""] resources: ["configmaps", "secrets"] - resourceNames: ["friend-lite-config", "friend-lite-secrets"] + resourceNames: ["chronicle-config", "chronicle-secrets"] verbs: ["get", "list"] --- # RoleBinding to allow speech service account to read config @@ -23,7 +23,7 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: speech-config-access - namespace: friend-lite + namespace: chronicle subjects: - kind: ServiceAccount name: speech-config-reader diff --git a/quickstart.md b/quickstart.md index 8d7897f2..2f77e3fc 100644 --- a/quickstart.md +++ b/quickstart.md @@ -1,10 +1,10 @@ -# Friend-Lite Quick Start +# Chronicle Quick Start ## What You're Building (Complete Beginners Start Here!) You're setting up your own personal AI that: - **Runs on your home computer** - processes audio, stores memories, runs AI models -- **Connects to your phone** - where you use the Friend-Lite app and OMI device +- **Connects to your phone** - where you use the Chronicle app and OMI device - **Works everywhere** - your phone can access your home AI from anywhere Think of it like having Siri/Alexa, but it's **your own AI** running on **your hardware** with **your data**. @@ -13,12 +13,12 @@ Think of it like having Siri/Alexa, but it's **your own AI** running on **your h ### On Your Home Computer - **Docker** - Runs all the AI services (like having multiple apps in containers) -- **Friend-Lite Backend** - The main AI brain (transcription, memory, processing) +- **Chronicle Backend** - The main AI brain (transcription, memory, processing) - **Tailscale** - Creates secure tunnel so your phone can reach home ### On Your Phone - **Tailscale** - Connects securely to your home computer -- **Friend-Lite Mobile App** - Interface for your OMI device and conversations +- **Chronicle Mobile App** - Interface for your OMI device and conversations ### AI Services (Choose Your Path) @@ -101,14 +101,14 @@ The setup wizard will automatically download and configure: *Note: First-time setup will download AI models (this can take time and storage space)* -## Step 3: Download and Setup Friend-Lite +## Step 3: Download and Setup Chronicle ### On Your Home Computer **Download the code:** ```bash -git clone --recursive https://github.com/AnkushMalaker/friend-lite.git -cd friend-lite +git clone https://github.com/chronicle-ai/chronicle.git +cd chronicle ``` *The `--recursive` flag downloads the Mycelia submodule (see note below)* @@ -207,26 +207,26 @@ Before connecting your phone, make sure everything works: *Your browser will warn about "unsafe certificate" - click "Advanced" โ†’ "Proceed anyway"* -2. You should see the Friend-Lite dashboard +2. You should see the Chronicle dashboard 3. Click "Live Recording" in the sidebar 4. Test your microphone - record a short clip 5. Check that it gets transcribed and appears in "Conversations" 6. **Only proceed to phone setup when this works perfectly!** -## Step 5: Install Friend-Lite on Your Phone +## Step 5: Install Chronicle on Your Phone **No development setup needed - just download and install!** ### Android Users -1. Go to [GitHub Releases](https://github.com/AnkushMalaker/friend-lite/releases) -2. Find the latest release and download `friend-lite-android.apk` +1. Go to [GitHub Releases](https://github.com/AnkushMalaker/chronicle/releases) +2. Find the latest release and download `chronicle-android.apk` 3. Install APK on your phone: - Enable "Install from unknown sources" in Android settings - Tap the downloaded APK file to install ### iPhone Users -1. Go to [GitHub Releases](https://github.com/AnkushMalaker/friend-lite/releases) -2. Find the latest release and download `friend-lite-ios.ipa` +1. Go to [GitHub Releases](https://github.com/AnkushMalaker/chronicle/releases) +2. Find the latest release and download `chronicle-ios.ipa` 3. Install using sideloading tool: - **AltStore** (recommended): [altstore.io](https://altstore.io) - **Sideloadly**: [sideloadly.io](https://sideloadly.io) @@ -235,7 +235,7 @@ Before connecting your phone, make sure everything works: ### Configure the App 1. **First**: Make sure Tailscale is running on your phone -2. Open Friend-Lite app +2. Open Chronicle app 3. Go to Settings โ†’ Backend Configuration 4. Enter Backend URL: `https://[your-tailscale-ip]` @@ -250,7 +250,7 @@ Before connecting your phone, make sure everything works: ## Step 6: Connect Your OMI Device 1. Turn on your OMI/Friend device (make sure it's charged) -2. Open Friend-Lite app on your phone +2. Open Chronicle app on your phone 3. Go to "Devices" tab โ†’ "Add New Device" 4. Follow Bluetooth pairing instructions 5. Once connected, start a conversation! diff --git a/run-test.sh b/run-test.sh index fce082e5..ebc39a07 100755 --- a/run-test.sh +++ b/run-test.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Friend-Lite Local Test Runner +# Chronicle Local Test Runner # Runs the same tests as GitHub CI but configured for local development # Usage: ./run-test.sh [advanced-backend|speaker-recognition|all] @@ -63,12 +63,12 @@ run_speaker_recognition_tests() { } # Main execution -print_info "Friend-Lite Local Test Runner" +print_info "Chronicle Local Test Runner" print_info "==============================" # Check if we're in the right directory if [ ! -f "CLAUDE.md" ]; then - print_error "Please run this script from the friend-lite root directory" + print_error "Please run this script from the chronicle root directory" exit 1 fi diff --git a/scripts/generate-k8s-configs.py b/scripts/generate-k8s-configs.py index 9b800fff..2eea45aa 100755 --- a/scripts/generate-k8s-configs.py +++ b/scripts/generate-k8s-configs.py @@ -12,7 +12,7 @@ from env_utils import get_resolved_env_vars, classify_secrets -def generate_k8s_manifests(namespace: str = "friend-lite"): +def generate_k8s_manifests(namespace: str = "chronicle"): """Generate Kubernetes ConfigMap and Secret manifests""" print(f"Generating Kubernetes ConfigMap and Secret for namespace {namespace}...") @@ -30,10 +30,10 @@ def generate_k8s_manifests(namespace: str = "friend-lite"): f.write("apiVersion: v1\n") f.write("kind: ConfigMap\n") f.write("metadata:\n") - f.write(f" name: friend-lite-config\n") + f.write(f" name: chronicle-config\n") f.write(f" namespace: {namespace}\n") f.write(" labels:\n") - f.write(" app.kubernetes.io/name: friend-lite\n") + f.write(" app.kubernetes.io/name: chronicle\n") f.write(" app.kubernetes.io/component: config\n") f.write("data:\n") @@ -50,10 +50,10 @@ def generate_k8s_manifests(namespace: str = "friend-lite"): f.write("kind: Secret\n") f.write("type: Opaque\n") f.write("metadata:\n") - f.write(f" name: friend-lite-secrets\n") + f.write(f" name: chronicle-secrets\n") f.write(f" namespace: {namespace}\n") f.write(" labels:\n") - f.write(" app.kubernetes.io/name: friend-lite\n") + f.write(" app.kubernetes.io/name: chronicle\n") f.write(" app.kubernetes.io/component: secrets\n") f.write("data:\n") @@ -74,7 +74,7 @@ def generate_k8s_manifests(namespace: str = "friend-lite"): def main(): """Main entry point""" - namespace = sys.argv[1] if len(sys.argv) > 1 else "friend-lite" + namespace = sys.argv[1] if len(sys.argv) > 1 else "chronicle" generate_k8s_manifests(namespace) if __name__ == "__main__": diff --git a/scripts/k8s/cluster-status.sh b/scripts/k8s/cluster-status.sh index 9733066f..8f3cb644 100644 --- a/scripts/k8s/cluster-status.sh +++ b/scripts/k8s/cluster-status.sh @@ -5,7 +5,7 @@ # # Usage: ./scripts/cluster-status.sh [namespace] # Example: ./scripts/cluster-status.sh -# Example: ./scripts/cluster-status.sh friend-lite +# Example: ./scripts/cluster-status.sh chronicle set -e diff --git a/scripts/k8s/load-env.sh b/scripts/k8s/load-env.sh index 97f873eb..a9ab113f 100644 --- a/scripts/k8s/load-env.sh +++ b/scripts/k8s/load-env.sh @@ -31,7 +31,7 @@ load_config_env() { export SPEAKER_NODE="${SPEAKER_NODE:-}" export CONTAINER_REGISTRY="${CONTAINER_REGISTRY:-localhost:32000}" export INFRASTRUCTURE_NAMESPACE="${INFRASTRUCTURE_NAMESPACE:-root}" - export APPLICATION_NAMESPACE="${APPLICATION_NAMESPACE:-friend-lite}" + export APPLICATION_NAMESPACE="${APPLICATION_NAMESPACE:-chronicle}" export STORAGE_CLASS="${STORAGE_CLASS:-openebs-hostpath}" } diff --git a/scripts/manage-audio-files.sh b/scripts/manage-audio-files.sh index 981d38cf..f02547f1 100755 --- a/scripts/manage-audio-files.sh +++ b/scripts/manage-audio-files.sh @@ -5,7 +5,7 @@ set -e -NAMESPACE="friend-lite" +NAMESPACE="chronicle" POD_NAME="" AUDIO_CHUNKS_DIR="/app/data/audio_chunks" DATA_DIR="/app/data" diff --git a/services.py b/services.py index a3d734d4..ba5fed2f 100755 --- a/services.py +++ b/services.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Service Management +Chronicle Service Management Start, stop, and manage configured services """ @@ -265,7 +265,7 @@ def show_status(): console.print("\n๐Ÿ’ก [dim]Use 'python services.py start --all' to start all configured services[/dim]") def main(): - parser = argparse.ArgumentParser(description="Friend-Lite Service Management") + parser = argparse.ArgumentParser(description="Chronicle Service Management") subparsers = parser.add_subparsers(dest='command', help='Available commands') # Start command diff --git a/skaffold.yaml b/skaffold.yaml index f40d4407..279566ce 100644 --- a/skaffold.yaml +++ b/skaffold.yaml @@ -1,7 +1,7 @@ apiVersion: skaffold/v4beta13 kind: Config metadata: - name: friend-lite + name: chronicle build: tagPolicy: dateTime: @@ -149,7 +149,7 @@ profiles: image.repository: "{{.IMAGE_REPO_advanced_backend}}" image.tag: "{{.IMAGE_TAG_advanced_backend}}" # Override specific Kubernetes-specific values (not in env file) - env.MONGODB_URI: "mongodb://mongodb.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local:27017/friend-lite" + env.MONGODB_URI: "mongodb://mongodb.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local:27017/chronicle" env.QDRANT_BASE_URL: "qdrant.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local" env.REDIS_URL: "redis://redis-master.{{.INFRASTRUCTURE_NAMESPACE}}.svc.cluster.local:6379/0" persistence.storageClass: "openebs-hostpath" diff --git a/status.py b/status.py index 1ae9a353..babf6cb7 100644 --- a/status.py +++ b/status.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Health Status Checker +Chronicle Health Status Checker Show runtime health status of all services """ @@ -162,7 +162,7 @@ def get_service_health(service_name: str) -> Dict[str, Any]: def show_quick_status(): """Show quick status overview""" - console.print("\n๐Ÿฅ [bold]Friend-Lite Health Status[/bold]\n") + console.print("\n๐Ÿฅ [bold]Chronicle Health Status[/bold]\n") table = Table(title="Service Status Overview") table.add_column("Service", style="cyan", no_wrap=True) @@ -215,7 +215,7 @@ def show_quick_status(): def show_detailed_status(): """Show detailed status with backend health breakdown""" - console.print("\n๐Ÿฅ [bold]Friend-Lite Detailed Health Status[/bold]\n") + console.print("\n๐Ÿฅ [bold]Chronicle Detailed Health Status[/bold]\n") # Get all service statuses for service_name, service_info in SERVICES.items(): @@ -320,7 +320,7 @@ def show_json_status(): def main(): parser = argparse.ArgumentParser( - description="Friend-Lite Health Status Checker", + description="Chronicle Health Status Checker", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=""" Examples: diff --git a/tests/.env.test b/tests/.env.test index 4317e347..974dcee2 100644 --- a/tests/.env.test +++ b/tests/.env.test @@ -11,4 +11,14 @@ ADMIN_PASSWORD=test-admin-password-123 # Test configuration TEST_TIMEOUT=120 -TEST_DEVICE_NAME=robot-test \ No newline at end of file +TEST_DEVICE_NAME=robot-test + +MEMORY_PROVIDER=chronicle + +# Docker container names (test environment) +BACKEND_CONTAINER=advanced-chronicle-backend-test-1 +WORKERS_CONTAINER=advanced-workers-test-1 +MONGO_CONTAINER=advanced-mongo-test-1 +REDIS_CONTAINER=advanced-redis-test-1 +QDRANT_CONTAINER=advanced-qdrant-test-1 +WEBUI_CONTAINER=advanced-webui-test-1 \ No newline at end of file diff --git a/tests/Makefile b/tests/Makefile index ac370508..707743e4 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -1,4 +1,4 @@ -# Friend-Lite Test Makefile +# Chronicle Test Makefile # Shortcuts for running tests .PHONY: help all clean @@ -8,7 +8,7 @@ OUTPUTDIR ?= results TEST_DIR = endpoints integration infrastructure help: - @echo "Friend-Lite Test Targets:" + @echo "Chronicle Test Targets:" @echo "" @echo "Running Tests:" @echo " make all - Run all tests" diff --git a/tests/README.md b/tests/README.md index 1a6c7480..a16a0281 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,6 +1,6 @@ -# Friend-Lite API Tests +# Chronicle API Tests -Comprehensive Robot Framework test suite for the Friend-Lite advanced backend API endpoints. +Comprehensive Robot Framework test suite for the Chronicle advanced backend API endpoints. ## Quick Start @@ -87,7 +87,7 @@ If you already have the backend running, you can use the Makefile directly: ## Running Tests ### Prerequisites -1. Friend-Lite backend running at `http://localhost:8001` (or set `API_URL` in `.env`) +1. Chronicle backend running at `http://localhost:8001` (or set `API_URL` in `.env`) 2. Admin user credentials configured in `.env` 3. Robot Framework and RequestsLibrary installed diff --git a/tests/TESTING_USER_GUIDE.md b/tests/TESTING_USER_GUIDE.md index e2b3b8ad..d1ebbd9d 100644 --- a/tests/TESTING_USER_GUIDE.md +++ b/tests/TESTING_USER_GUIDE.md @@ -1,6 +1,6 @@ # Robot Framework Testing User Guide -A beginner-friendly guide to setting up VSCode for Robot Framework testing, running tests, and creating new tests for the Friend-Lite project. +A beginner-friendly guide to setting up VSCode for Robot Framework testing, running tests, and creating new tests for the Chronicle project. ## Table of Contents - [VSCode Setup](#vscode-setup) diff --git a/tests/browser/browser_auth.robot b/tests/browser/browser_auth.robot index 430f9fcc..90820c71 100644 --- a/tests/browser/browser_auth.robot +++ b/tests/browser/browser_auth.robot @@ -25,7 +25,7 @@ Test Browser Can Access Login Page Fill Text id=password ${ADMIN_PASSWORD} Click button[type="submit"] # Verify that we are logged in by checking for the presence of the dashboard - Get Element text=Friend-Lite Dashboard + Get Element text=Chronicle Dashboard Log Successfully accessed login page and logged in INFO diff --git a/tests/infrastructure/infra_tests.robot b/tests/infrastructure/infra_tests.robot index f5429988..48b1a057 100644 --- a/tests/infrastructure/infra_tests.robot +++ b/tests/infrastructure/infra_tests.robot @@ -262,7 +262,8 @@ WebSocket Disconnect Conversation End Reason Test Send Audio Chunks To Stream ${stream_id} ${TEST_AUDIO_FILE} num_chunks=200 # Wait for conversation job to be created and conversation_id to be populated - ${conv_jobs}= Wait Until Keyword Succeeds 30s 2s + # Transcription + speech analysis takes time (30-60s with queue) + ${conv_jobs}= Wait Until Keyword Succeeds 60s 3s ... Job Type Exists For Client open_conversation ${device_name} # Wait for conversation_id in job meta (created asynchronously) @@ -287,3 +288,4 @@ WebSocket Disconnect Conversation End Reason Test Should Not Be Equal ${conversation}[completed_at] ${None} [Teardown] Run Keyword And Ignore Error Close Audio Stream ${stream_id} + diff --git a/tests/libs/audio_stream_library.py b/tests/libs/audio_stream_library.py index 7c2ddcee..25399175 100644 --- a/tests/libs/audio_stream_library.py +++ b/tests/libs/audio_stream_library.py @@ -103,6 +103,34 @@ def send_audio_chunks( ) +def send_audio_stop_event(stream_id: str) -> None: + """Send audio-stop event without closing the WebSocket connection. + + This is used to test the user_stopped end_reason scenario where + the user manually stops recording but the connection remains open. + """ + session = _manager._sessions.get(stream_id) + if not session: + raise ValueError(f"Stream {stream_id} not found") + + import asyncio + + async def _send_stop(): + try: + await session.client.send_audio_stop() + session.audio_stopped = True + except Exception as e: + session.error = str(e) + raise + + # Run in the stream's event loop + future = asyncio.run_coroutine_threadsafe(_send_stop(), session.loop) + future.result(timeout=5) # Wait for audio-stop to be sent + + if session.error: + raise RuntimeError(f"Failed to send audio-stop: {session.error}") + + def stop_audio_stream(stream_id: str) -> int: """Stop an audio stream and close the connection.""" return _manager.stop_stream(stream_id) diff --git a/tests/resources/websocket_keywords.robot b/tests/resources/websocket_keywords.robot index 25b8499c..f1ee54b4 100644 --- a/tests/resources/websocket_keywords.robot +++ b/tests/resources/websocket_keywords.robot @@ -89,6 +89,15 @@ Send Audio Chunks To Stream Log Sent ${chunks_sent} chunks to stream ${stream_id} RETURN ${chunks_sent} +Send Audio Stop Event + [Documentation] Send audio-stop event without closing the WebSocket + ... This simulates a user manually stopping recording + [Arguments] ${stream_id} + + # Call the Python library method directly + Send Audio Stop Event ${stream_id} + Log Sent audio-stop event to stream ${stream_id} + Close Audio Stream [Documentation] Stop an audio stream and close the connection [Arguments] ${stream_id} diff --git a/tests/setup/setup_keywords.robot b/tests/setup/setup_keywords.robot index e3809c1c..3fe7bd17 100644 --- a/tests/setup/setup_keywords.robot +++ b/tests/setup/setup_keywords.robot @@ -106,7 +106,7 @@ Start Docker Services # Clean up any stopped/stuck containers first Run Process docker compose -f ${compose_file} down -v cwd=${working_dir} shell=True - Run Process docker rm -f advanced-mongo-test-1 advanced-redis-test-1 advanced-qdrant-test-1 advanced-friend-backend-test-1 advanced-workers-test-1 shell=True + Run Process docker rm -f ${MONGO_CONTAINER} ${REDIS_CONTAINER} ${QDRANT_CONTAINER} ${BACKEND_CONTAINER} ${WORKERS_CONTAINER} ${WEBUI_CONTAINER} shell=True # Start containers IF ${build} diff --git a/tests/setup/test_env.py b/tests/setup/test_env.py index a333f476..51589fd2 100644 --- a/tests/setup/test_env.py +++ b/tests/setup/test_env.py @@ -69,4 +69,4 @@ REDIS_CONTAINER = f"{COMPOSE_PROJECT_NAME}-redis-test-1" BACKEND_CONTAINER = f"{COMPOSE_PROJECT_NAME}-friend-backend-test-1" MONGO_CONTAINER = f"{COMPOSE_PROJECT_NAME}-mongo-test-1" -QDRANT_CONTAINER = f"{COMPOSE_PROJECT_NAME}-qdrant-test-1" \ No newline at end of file +QDRANT_CONTAINER = f"{COMPOSE_PROJECT_NAME}-qdrant-test-1" diff --git a/tests/setup/test_manager_keywords.robot b/tests/setup/test_manager_keywords.robot index a7ad5783..65506551 100644 --- a/tests/setup/test_manager_keywords.robot +++ b/tests/setup/test_manager_keywords.robot @@ -34,14 +34,16 @@ Clear Test Databases Log To Console Clearing test databases and audio files... # Clear MongoDB collections but preserve admin user and fixtures - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.users.deleteMany({'email': {\\$ne:'${ADMIN_EMAIL}'}})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.users.deleteMany({'email': {\\$ne:'${ADMIN_EMAIL}'}})" shell=True - # Clear conversations and audio_chunks except those tagged as fixtures - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.deleteMany({\\$or: [{'is_fixture': {\\$exists: false}}, {'is_fixture': false}]})" shell=True - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.audio_chunks.deleteMany({\\$or: [{'is_fixture': {\\$exists: false}}, {'is_fixture': false}]})" shell=True + # Clear conversations except those tagged as fixtures + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.deleteMany({\\$or: [{'is_fixture': {\\$exists: false}}, {'is_fixture': false}]})" shell=True + + # Clear job references from remaining conversations to prevent "No such job" errors + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.updateMany({}, {\\$unset: {'transcription_job_id': '', 'speaker_job_id': '', 'memory_job_id': ''}})" shell=True # Count fixtures for logging - ${result}= Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.countDocuments({'is_fixture': true})" --quiet shell=True + ${result}= Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.countDocuments({'is_fixture': true})" --quiet shell=True ${fixture_count}= Strip String ${result.stdout} IF '${fixture_count}' != '0' @@ -51,7 +53,7 @@ Clear Test Databases END # Clear admin user's registered_clients dict to prevent client_id counter increments - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.users.updateOne({'email':'${ADMIN_EMAIL}'}, {\\$set: {'registered_clients': {}}})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.users.updateOne({'email':'${ADMIN_EMAIL}'}, {\\$set: {'registered_clients': {}}})" shell=True # Clear Qdrant collections # Note: Fixture memories will be lost here unless we implement Qdrant metadata filtering @@ -65,13 +67,13 @@ Clear Test Databases Log To Console Audio files cleared (fixtures/ subfolder preserved) # Clear container audio files (except fixtures subfolder) - Run Process bash -c docker exec advanced-friend-backend-test-1 find /app/audio_chunks -maxdepth 1 -name "*.wav" -delete || true shell=True - Run Process bash -c docker exec advanced-friend-backend-test-1 find /app/debug_dir -name "*" -type f -delete || true shell=True + Run Process bash -c docker exec ${BACKEND_CONTAINER} find /app/audio_chunks -maxdepth 1 -name "*.wav" -delete || true shell=True + Run Process bash -c docker exec ${BACKEND_CONTAINER} find /app/debug_dir -name "*" -type f -delete || true shell=True # Clear Redis queues and job registries (preserve worker registrations, failed and completed jobs) # Delete all rq:* keys except worker registrations (rq:worker:*), failed jobs (rq:failed:*), and completed jobs (rq:finished:*) ${redis_clear_script}= Set Variable redis-cli --scan --pattern "rq:*" | grep -Ev "^rq:(worker|failed|finished)" | xargs -r redis-cli DEL; redis-cli --scan --pattern "audio:*" | xargs -r redis-cli DEL; redis-cli --scan --pattern "consumer:*" | xargs -r redis-cli DEL - Run Process docker exec advanced-redis-test-1 sh -c ${redis_clear_script} shell=True + Run Process docker exec ${REDIS_CONTAINER} sh -c ${redis_clear_script} shell=True Log To Console Redis queues and job registries cleared (worker registrations preserved) Clear All Test Data @@ -79,9 +81,8 @@ Clear All Test Data Log To Console Clearing ALL test data including admin user and fixtures... # Wipe all MongoDB collections - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.users.deleteMany({})" shell=True - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.deleteMany({})" shell=True - Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.audio_chunks.deleteMany({})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.users.deleteMany({})" shell=True + Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.deleteMany({})" shell=True Log To Console MongoDB completely cleared # Clear Qdrant @@ -93,7 +94,7 @@ Clear All Test Data Run Process bash -c rm -rf ${EXECDIR}/backends/advanced/data/test_debug_dir/* || true shell=True # Clear all Redis data - Run Process docker exec advanced-redis-test-1 redis-cli FLUSHALL shell=True + Run Process docker exec ${REDIS_CONTAINER} redis-cli FLUSHALL shell=True Log To Console All test data cleared @@ -125,13 +126,9 @@ Create Fixture Conversation Should Not Be Empty ${transcript} Fixture conversation has no transcript # Tag this conversation as a fixture in MongoDB so cleanup preserves it - ${result}= Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.conversations.updateOne({'conversation_id': '${conversation_id}'}, {\\$set: {'is_fixture': true}})" shell=True + ${result}= Run Process docker exec ${MONGO_CONTAINER} mongosh test_db --eval "db.conversations.updateOne({'conversation_id': '${conversation_id}'}, {\\$set: {'is_fixture': true}})" shell=True Should Be Equal As Integers ${result.rc} 0 Failed to tag conversation as fixture: ${result.stderr} - # Also tag audio_chunks - ${result2}= Run Process docker exec advanced-mongo-test-1 mongosh test_db --eval "db.audio_chunks.updateMany({'conversation_id': '${conversation_id}'}, {\\$set: {'is_fixture': true}})" shell=True - Should Be Equal As Integers ${result2.rc} 0 Failed to tag audio chunks as fixture: ${result2.stderr} - Log To Console โœ“ Audio files stored in fixtures/ subfolder ${transcript_len}= Get Length ${transcript} diff --git a/tests/tags.md b/tests/tags.md index e41874f5..6ddb6fba 100644 --- a/tests/tags.md +++ b/tests/tags.md @@ -1,10 +1,10 @@ # Robot Framework Test Tags Reference -This document defines the standard tags used across the Friend-Lite test suite. +This document defines the standard tags used across the Chronicle test suite. ## Simplified Tag Set -Friend-Lite uses a **minimal, focused tag set** for test organization. Only 11 tags are permitted. +Chronicle uses a **minimal, focused tag set** for test organization. Only 11 tags are permitted. ## Tag Format diff --git a/wizard.py b/wizard.py index 25ef890f..6e6ad6cb 100755 --- a/wizard.py +++ b/wizard.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Friend-Lite Root Setup Orchestrator +Chronicle Root Setup Orchestrator Handles service selection and delegation only - no configuration duplication """ @@ -100,7 +100,7 @@ def check_service_exists(service_name, service_config): def select_services(): """Let user select which services to setup""" - console.print("๐Ÿš€ [bold cyan]Friend-Lite Service Setup[/bold cyan]") + console.print("๐Ÿš€ [bold cyan]Chronicle Service Setup[/bold cyan]") console.print("Select which services to configure:\n") selected = [] @@ -302,7 +302,7 @@ def setup_git_hooks(): def main(): """Main orchestration logic""" - console.print("๐ŸŽ‰ [bold green]Welcome to Friend-Lite![/bold green]\n") + console.print("๐ŸŽ‰ [bold green]Welcome to Chronicle![/bold green]\n") # Setup git hooks first setup_git_hooks() @@ -411,7 +411,7 @@ def main(): console.print("4. Stop services when done:") console.print(" [cyan]uv run --with-requirements setup-requirements.txt python services.py stop --all[/cyan]") - console.print(f"\n๐Ÿš€ [bold]Enjoy Friend-Lite![/bold]") + console.print(f"\n๐Ÿš€ [bold]Enjoy Chronicle![/bold]") # Show individual service usage console.print(f"\n๐Ÿ’ก [dim]Tip: You can also setup services individually:[/dim]") From 9439656430861ffcc2e8d9f5a280144e7871000e Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Thu, 11 Dec 2025 14:18:28 +0000 Subject: [PATCH 13/31] changed mobile app package to friend-lite for the moment --- app/app.json | 10 +++++----- app/app/components/DeviceDetails.tsx | 2 +- app/app/components/DeviceListItem.tsx | 2 +- app/app/hooks/useAudioListener.ts | 2 +- app/app/hooks/useDeviceConnection.ts | 2 +- app/app/hooks/useDeviceScanning.ts | 2 +- app/app/index.tsx | 4 ++-- app/package.json | 4 ++-- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/app/app.json b/app/app.json index c2446e12..66fbb8c2 100644 --- a/app/app.json +++ b/app/app.json @@ -1,7 +1,7 @@ { "expo": { - "name": "chronicle-app", - "slug": "chronicle-app", + "name": "friend-lite-app", + "slug": "friend-lite-app", "version": "1.0.0", "orientation": "portrait", "icon": "./assets/icon.png", @@ -17,9 +17,9 @@ ], "ios": { "supportsTablet": true, - "bundleIdentifier": "com.cupbearer5517.chronicle", + "bundleIdentifier": "com.cupbearer5517.friendlite", "infoPlist": { - "NSMicrophoneUsageDescription": "Chronicle needs access to your microphone to stream audio to the backend for processing." + "NSMicrophoneUsageDescription": "Friend Lite needs access to your microphone to stream audio to the backend for processing." } }, "android": { @@ -27,7 +27,7 @@ "foregroundImage": "./assets/adaptive-icon.png", "backgroundColor": "#ffffff" }, - "package": "com.cupbearer5517.chronicle", + "package": "com.cupbearer5517.friendlite", "permissions": [ "android.permission.BLUETOOTH", "android.permission.BLUETOOTH_ADMIN", diff --git a/app/app/components/DeviceDetails.tsx b/app/app/components/DeviceDetails.tsx index 3bd22b4a..ebf204c3 100644 --- a/app/app/components/DeviceDetails.tsx +++ b/app/app/components/DeviceDetails.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet, TextInput } from 'react-native'; -import { BleAudioCodec } from 'chronicle-react-native'; +import { BleAudioCodec } from 'friend-lite-react-native'; interface DeviceDetailsProps { // Device Info diff --git a/app/app/components/DeviceListItem.tsx b/app/app/components/DeviceListItem.tsx index 3da559de..a8083035 100644 --- a/app/app/components/DeviceListItem.tsx +++ b/app/app/components/DeviceListItem.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet } from 'react-native'; -import { OmiDevice } from 'chronicle-react-native'; +import { OmiDevice } from 'friend-lite-react-native'; interface DeviceListItemProps { device: OmiDevice; diff --git a/app/app/hooks/useAudioListener.ts b/app/app/hooks/useAudioListener.ts index 1dcf225e..391ed125 100644 --- a/app/app/hooks/useAudioListener.ts +++ b/app/app/hooks/useAudioListener.ts @@ -1,6 +1,6 @@ import { useState, useRef, useCallback, useEffect } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection } from 'chronicle-react-native'; +import { OmiConnection } from 'friend-lite-react-native'; import { Subscription, ConnectionPriority } from 'react-native-ble-plx'; // OmiConnection might use this type for subscriptions interface UseAudioListener { diff --git a/app/app/hooks/useDeviceConnection.ts b/app/app/hooks/useDeviceConnection.ts index 964e4d4e..e729169e 100644 --- a/app/app/hooks/useDeviceConnection.ts +++ b/app/app/hooks/useDeviceConnection.ts @@ -1,6 +1,6 @@ import { useState, useCallback } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection, BleAudioCodec, OmiDevice } from 'chronicle-react-native'; +import { OmiConnection, BleAudioCodec, OmiDevice } from 'friend-lite-react-native'; interface UseDeviceConnection { connectedDevice: OmiDevice | null; diff --git a/app/app/hooks/useDeviceScanning.ts b/app/app/hooks/useDeviceScanning.ts index f4c16ff3..d7780266 100644 --- a/app/app/hooks/useDeviceScanning.ts +++ b/app/app/hooks/useDeviceScanning.ts @@ -1,6 +1,6 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { BleManager, State as BluetoothState } from 'react-native-ble-plx'; -import { OmiConnection, OmiDevice } from 'chronicle-react-native'; // Assuming this is the correct import for Omi types +import { OmiConnection, OmiDevice } from 'friend-lite-react-native'; // Assuming this is the correct import for Omi types interface UseDeviceScanning { devices: OmiDevice[]; diff --git a/app/app/index.tsx b/app/app/index.tsx index 2b20cb7b..8bb1234a 100644 --- a/app/app/index.tsx +++ b/app/app/index.tsx @@ -1,6 +1,6 @@ import React, { useRef, useCallback, useEffect, useState } from 'react'; import { StyleSheet, Text, View, SafeAreaView, ScrollView, Platform, FlatList, ActivityIndicator, Alert, Switch, Button, TouchableOpacity, KeyboardAvoidingView } from 'react-native'; -import { OmiConnection } from 'chronicle-react-native'; // OmiDevice also comes from here +import { OmiConnection } from 'friend-lite-react-native'; // OmiDevice also comes from here import { State as BluetoothState } from 'react-native-ble-plx'; // Import State from ble-plx // Hooks @@ -521,7 +521,7 @@ export default function App() { contentContainerStyle={styles.content} keyboardShouldPersistTaps="handled" > - Chronicle + Friend Lite {/* Backend Connection - moved to top */} Date: Thu, 11 Dec 2025 15:13:37 +0000 Subject: [PATCH 14/31] rabbit aI fixes --- backends/advanced/.env.template | 8 ++++---- .../src/advanced_omi_backend/models/conversation.py | 1 + .../src/advanced_omi_backend/services/memory/config.py | 8 +++++++- .../services/memory/service_factory.py | 4 +++- .../src/advanced_omi_backend/services/mycelia_sync.py | 2 +- backends/advanced/webui/package-lock.json | 8 ++++++++ 6 files changed, 24 insertions(+), 7 deletions(-) diff --git a/backends/advanced/.env.template b/backends/advanced/.env.template index 60d2c99e..e9f1e3bf 100644 --- a/backends/advanced/.env.template +++ b/backends/advanced/.env.template @@ -99,9 +99,9 @@ QDRANT_BASE_URL=qdrant # MEMORY PROVIDER CONFIGURATION # ======================================== -# Memory Provider: "friend_lite" (default), "openmemory_mcp", or "mycelia" +# Memory Provider: "chronicle" (default), "openmemory_mcp", or "mycelia" # -# Friend-Lite (default): In-house memory system with full control +# Chronicle (default): In-house memory system with full control # - Custom LLM-powered extraction with individual fact storage # - Smart deduplication and memory updates (ADD/UPDATE/DELETE) # - Direct Qdrant vector storage @@ -121,7 +121,7 @@ QDRANT_BASE_URL=qdrant # - Requires Mycelia server setup (extras/mycelia) # # See MEMORY_PROVIDERS.md for detailed comparison -MEMORY_PROVIDER=friend_lite +MEMORY_PROVIDER=chronicle # ---------------------------------------- # OpenMemory MCP Configuration @@ -131,7 +131,7 @@ MEMORY_PROVIDER=friend_lite # cd extras/openmemory-mcp && docker compose up -d # # OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -# OPENMEMORY_CLIENT_NAME=friend_lite +# OPENMEMORY_CLIENT_NAME=chronicle # OPENMEMORY_USER_ID=openmemory # OPENMEMORY_TIMEOUT=30 diff --git a/backends/advanced/src/advanced_omi_backend/models/conversation.py b/backends/advanced/src/advanced_omi_backend/models/conversation.py index 87dc731a..01dd5d96 100644 --- a/backends/advanced/src/advanced_omi_backend/models/conversation.py +++ b/backends/advanced/src/advanced_omi_backend/models/conversation.py @@ -31,6 +31,7 @@ class MemoryProvider(str, Enum): CHRONICLE = "chronicle" OPENMEMORY_MCP = "openmemory_mcp" MYCELIA = "mycelia" + FRIEND_LITE = "friend_lite" # Legacy value class ConversationStatus(str, Enum): """Conversation processing status.""" diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py index 7560d88f..f3943f29 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -146,9 +146,15 @@ def build_memory_config_from_env() -> MemoryConfig: try: # Determine memory provider memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() + + # Map legacy provider names to current names + if memory_provider in ("friend-lite", "friend_lite"): + memory_logger.info(f"๐Ÿ”ง Mapping legacy provider '{memory_provider}' to 'chronicle'") + memory_provider = "chronicle" + if memory_provider not in [p.value for p in MemoryProvider]: raise ValueError(f"Unsupported memory provider: {memory_provider}") - + memory_provider_enum = MemoryProvider(memory_provider) # For OpenMemory MCP, configuration is much simpler diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py index dc57dbe9..5607d8ff 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py @@ -156,7 +156,9 @@ def get_service_info() -> dict: # Try to determine provider from service type if "OpenMemoryMCP" in info["service_type"]: info["memory_provider"] = "openmemory_mcp" - elif "Chronicle" in info["service_type"] or "MemoryService" in info["service_type"]: + elif info["service_type"] == "ChronicleMemoryService": info["memory_provider"] = "chronicle" + elif info["service_type"] == "MyceliaMemoryService": + info["memory_provider"] = "mycelia" return info \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py index 84011068..87f3b944 100644 --- a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -235,7 +235,7 @@ async def sync_admin_on_startup(): logger.info("๐Ÿ”„ Starting Mycelia OAuth synchronization...") # Check if Mycelia sync is enabled - memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() if memory_provider != "mycelia": logger.info("Mycelia sync skipped (MEMORY_PROVIDER != mycelia)") return diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 4582a222..7cf02b1a 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -20,6 +20,7 @@ }, "devDependencies": { "@types/d3": "^7.4.3", + "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -1990,6 +1991,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/frappe-gantt": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", + "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", From 144c58407776ec5319404d76196248217276748a Mon Sep 17 00:00:00 2001 From: 01PrathamS Date: Mon, 8 Dec 2025 11:55:47 +0530 Subject: [PATCH 15/31] UPDATE: init with default model gpt-5-mini --- backends/advanced/init.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backends/advanced/init.py b/backends/advanced/init.py index 756ca371..773ea11e 100644 --- a/backends/advanced/init.py +++ b/backends/advanced/init.py @@ -227,7 +227,7 @@ def setup_llm(self): else: api_key = self.prompt_value("OpenAI API key (leave empty to skip)", "") - model = self.prompt_value("OpenAI model", "gpt-4o-mini") + model = self.prompt_value("OpenAI model", "gpt-5-mini") base_url = self.prompt_value("OpenAI base URL (for proxies/compatible APIs)", "https://api.openai.com/v1") if api_key: From b132561b90a7ce0e68a3fd2ae3f6c4a71db1062b Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 9 Dec 2025 11:03:16 +0000 Subject: [PATCH 16/31] Added changes from rabbitAI # Conflicts: # README-K8S.md # quickstart.md --- README-K8S.md | 2 ++ .../scripts/create_mycelia_api_key.py | 3 +- .../src/advanced_omi_backend/config.py | 1 + .../controllers/memory_controller.py | 32 +++++++++++++++-- .../routers/modules/memory_routes.py | 10 ++++++ .../services/mycelia_sync.py | 12 ++++--- .../workers/memory_jobs.py | 1 - .../advanced/webui/src/hooks/useD3Zoom.ts | 14 ++++++-- .../advanced/webui/src/pages/MemoryDetail.tsx | 20 +++++------ backends/advanced/webui/src/services/api.ts | 13 +++---- quickstart.md | 34 +++++++++++++++++++ 11 files changed, 115 insertions(+), 27 deletions(-) diff --git a/README-K8S.md b/README-K8S.md index 9d83350f..328d7fe5 100644 --- a/README-K8S.md +++ b/README-K8S.md @@ -275,6 +275,8 @@ chronicle/ ls -la backends/advanced/.env.template ``` + > **Note:** The `--recursive` flag downloads the optional Mycelia submodule (an alternative memory backend with timeline visualization). Most deployments use the default Friend-Lite memory system and don't need Mycelia. + 2. **Install Required Tools** **kubectl** (required for Skaffold and Helm): diff --git a/backends/advanced/scripts/create_mycelia_api_key.py b/backends/advanced/scripts/create_mycelia_api_key.py index a517af7b..1e4bcb90 100755 --- a/backends/advanced/scripts/create_mycelia_api_key.py +++ b/backends/advanced/scripts/create_mycelia_api_key.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 """Create a proper Mycelia API key (not OAuth client) for Chronicle user.""" +import base64 import os import sys import secrets @@ -20,7 +21,6 @@ def hash_api_key_with_salt(api_key: str, salt: bytes) -> str: """Hash API key with salt (matches Mycelia's hashApiKey function).""" # SHA256(salt + apiKey) in base64 - import base64 h = hashlib.sha256() h.update(salt) h.update(api_key.encode('utf-8')) @@ -68,7 +68,6 @@ def main(): ) # Create API key document (matches Mycelia's format) - import base64 api_key_doc = { "hashedKey": hashed_key, # Note: hashedKey, not hash! "salt": base64.b64encode(salt).decode('utf-8'), # Store as base64 like Mycelia diff --git a/backends/advanced/src/advanced_omi_backend/config.py b/backends/advanced/src/advanced_omi_backend/config.py index ce018d6e..2b07a8d4 100644 --- a/backends/advanced/src/advanced_omi_backend/config.py +++ b/backends/advanced/src/advanced_omi_backend/config.py @@ -137,6 +137,7 @@ def get_speech_detection_settings(): return { "min_words": int(os.getenv("SPEECH_DETECTION_MIN_WORDS", DEFAULT_SPEECH_DETECTION_SETTINGS["min_words"])), "min_confidence": float(os.getenv("SPEECH_DETECTION_MIN_CONFIDENCE", DEFAULT_SPEECH_DETECTION_SETTINGS["min_confidence"])), + "min_duration": float(os.getenv("SPEECH_DETECTION_MIN_DURATION", DEFAULT_SPEECH_DETECTION_SETTINGS["min_duration"])), } diff --git a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py index 220ba815..f52167de 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/memory_controller.py @@ -9,6 +9,7 @@ from fastapi.responses import JSONResponse from advanced_omi_backend.services.memory import get_memory_service +from advanced_omi_backend.services.memory.base import MemoryEntry from advanced_omi_backend.users import User logger = logging.getLogger(__name__) @@ -32,7 +33,7 @@ async def get_memories(user: User, limit: int, user_id: Optional[str] = None): total_count = await memory_service.count_memories(target_user_id) # Convert MemoryEntry objects to dicts for JSON serialization - memories_dicts = [mem.to_dict() if hasattr(mem, 'to_dict') else mem for mem in memories] + memories_dicts = [mem.to_dict() for mem in memories] return { "memories": memories_dicts, @@ -91,7 +92,7 @@ async def search_memories(query: str, user: User, limit: int, score_threshold: f search_results = await memory_service.search_memories(query, target_user_id, limit, score_threshold) # Convert MemoryEntry objects to dicts for JSON serialization - results_dicts = [result.to_dict() if hasattr(result, 'to_dict') else result for result in search_results] + results_dicts = [result.to_dict() for result in search_results] return { "query": query, @@ -252,3 +253,30 @@ async def get_all_memories_admin(user: User, limit: int): return JSONResponse( status_code=500, content={"message": f"Error fetching admin memories: {str(e)}"} ) + + +async def get_memory_by_id(memory_id: str, user: User, user_id: Optional[str] = None): + """Get a single memory by ID. Users can only access their own memories, admins can access any.""" + try: + memory_service = get_memory_service() + + # Determine which user's memory to fetch + target_user_id = user.user_id + if user.is_superuser and user_id: + target_user_id = user_id + + # Get the specific memory + memory = await memory_service.get_memory(memory_id, target_user_id) + + if memory: + # Convert MemoryEntry to dict for JSON serialization + memory_dict = memory.to_dict() + return {"memory": memory_dict} + else: + return JSONResponse(status_code=404, content={"message": "Memory not found"}) + + except Exception as e: + audio_logger.error(f"Error fetching memory {memory_id}: {e}", exc_info=True) + return JSONResponse( + status_code=500, content={"message": f"Error fetching memory: {str(e)}"} + ) diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py index 1d28a674..a0bdd27d 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py @@ -57,6 +57,16 @@ async def search_memories( return await memory_controller.search_memories(query, current_user, limit, score_threshold, user_id) +@router.get("/{memory_id}") +async def get_memory_by_id( + memory_id: str, + current_user: User = Depends(current_active_user), + user_id: Optional[str] = Query(default=None, description="User ID filter (admin only)"), +): + """Get a single memory by ID. Users can only access their own memories, admins can access any.""" + return await memory_controller.get_memory_by_id(memory_id, current_user, user_id) + + @router.post("") async def add_memory( request: AddMemoryRequest, diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py index 93f4e342..319cfed3 100644 --- a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -200,14 +200,18 @@ def sync_admin_user(self) -> Optional[Tuple[str, str]]: if result: client_id, api_key = result if api_key: + # Credentials created successfully - don't log them logger.info("="*70) - logger.info("๐Ÿ”‘ MYCELIA OAUTH CREDENTIALS (Save these!)") + logger.info("๐Ÿ”‘ MYCELIA OAUTH CREDENTIALS CREATED") logger.info("="*70) logger.info(f"User: {admin_email}") logger.info(f"Client ID: {client_id}") - logger.info(f"Client Secret: {api_key}") - logger.info("="*70) - logger.info("Configure Mycelia frontend at http://localhost:3002/settings") + logger.info("") + logger.info("๐Ÿ” To retrieve credentials for Mycelia configuration:") + logger.info(" cd backends/advanced/scripts") + logger.info(" python create_mycelia_api_key.py") + logger.info("") + logger.info("๐Ÿ“ This will display the API key needed for Mycelia frontend setup") logger.info("="*70) return result diff --git a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py index 22ffaaf2..3bfb84ff 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py @@ -142,7 +142,6 @@ async def process_memory_job( # Determine memory provider from memory service memory_provider = conversation_model.MemoryProvider.CHRONICLE # Default try: - from advanced_omi_backend.services.memory import get_memory_service memory_service_obj = get_memory_service() provider_name = memory_service_obj.__class__.__name__ if "OpenMemory" in provider_name: diff --git a/backends/advanced/webui/src/hooks/useD3Zoom.ts b/backends/advanced/webui/src/hooks/useD3Zoom.ts index 8f60b204..c33db069 100644 --- a/backends/advanced/webui/src/hooks/useD3Zoom.ts +++ b/backends/advanced/webui/src/hooks/useD3Zoom.ts @@ -16,6 +16,7 @@ export function useD3Zoom(options: UseD3ZoomOptions = {}) { const svgRef = useRef(null) const [transform, setTransform] = useState(d3.zoomIdentity) + const initializedRef = useRef(false) const handleZoom = useCallback( (event: d3.D3ZoomEvent) => { @@ -54,6 +55,16 @@ export function useD3Zoom(options: UseD3ZoomOptions = {}) { [handleZoom, scaleExtent, wheelDelta] ) + // Set initial transform once on mount + useEffect(() => { + if (!svgRef.current || initializedRef.current) return + + const svg = d3.select(svgRef.current) + svg.property('__zoom', d3.zoomIdentity) + initializedRef.current = true + }, []) + + // Setup zoom behavior (only when zoomBehavior changes) useEffect(() => { if (!svgRef.current) return @@ -67,12 +78,11 @@ export function useD3Zoom(options: UseD3ZoomOptions = {}) { } svg.call(zoomBehavior as any) - svg.property('__zoom', transform) return () => { svg.on('.zoom', null) } - }, [zoomBehavior, transform]) + }, [zoomBehavior]) return { svgRef, diff --git a/backends/advanced/webui/src/pages/MemoryDetail.tsx b/backends/advanced/webui/src/pages/MemoryDetail.tsx index 73750958..ed65cf8a 100644 --- a/backends/advanced/webui/src/pages/MemoryDetail.tsx +++ b/backends/advanced/webui/src/pages/MemoryDetail.tsx @@ -50,22 +50,22 @@ export default function MemoryDetail() { console.log('๐Ÿ” MemoryDetail: Loading memory', id) setLoading(true) setError(null) - const response = await memoriesApi.getAll(user.id) - const memoriesData = response.data.memories || response.data || [] - console.log('๐Ÿ“ฆ MemoryDetail: Loaded memories', memoriesData.length) + const response = await memoriesApi.getById(id, user.id) + const memoryData = response.data.memory + console.log('๐Ÿ“ฆ MemoryDetail: Loaded memory', memoryData?.id) - // Find the specific memory by ID - const foundMemory = memoriesData.find((m: Memory) => m.id === id) - console.log('๐ŸŽฏ MemoryDetail: Found memory?', !!foundMemory, foundMemory?.id) - - if (foundMemory) { - setMemory(foundMemory) + if (memoryData) { + setMemory(memoryData) } else { setError('Memory not found') } } catch (err: any) { console.error('โŒ Failed to load memory:', err) - setError(err.message || 'Failed to load memory') + if (err.response?.status === 404) { + setError('Memory not found') + } else { + setError(err.message || 'Failed to load memory') + } } finally { setLoading(false) } diff --git a/backends/advanced/webui/src/services/api.ts b/backends/advanced/webui/src/services/api.ts index 2617cdaa..e43902ad 100644 --- a/backends/advanced/webui/src/services/api.ts +++ b/backends/advanced/webui/src/services/api.ts @@ -105,15 +105,16 @@ export const conversationsApi = { export const memoriesApi = { getAll: (userId?: string) => api.get('/api/memories', { params: userId ? { user_id: userId } : {} }), + getById: (id: string, userId?: string) => api.get(`/api/memories/${id}`, { params: userId ? { user_id: userId } : {} }), getUnfiltered: (userId?: string) => api.get('/api/memories/unfiltered', { params: userId ? { user_id: userId } : {} }), - search: (query: string, userId?: string, limit: number = 20, scoreThreshold?: number) => - api.get('/api/memories/search', { - params: { - query, - ...(userId && { user_id: userId }), + search: (query: string, userId?: string, limit: number = 20, scoreThreshold?: number) => + api.get('/api/memories/search', { + params: { + query, + ...(userId && { user_id: userId }), limit, ...(scoreThreshold !== undefined && { score_threshold: scoreThreshold / 100 }) // Convert percentage to decimal - } + } }), delete: (id: string) => api.delete(`/api/memories/${id}`), deleteAll: () => api.delete('/api/admin/memory/delete-all'), diff --git a/quickstart.md b/quickstart.md index 3c6f2bcb..09cc6c26 100644 --- a/quickstart.md +++ b/quickstart.md @@ -111,6 +111,40 @@ git clone https://github.com/AnkushMalaker/chronicle.git cd chronicle ``` +*The `--recursive` flag downloads the Mycelia submodule (see note below)* + +**If you already cloned without `--recursive`:** +```bash +git submodule update --init --recursive +``` + +
+๐Ÿ“ฆ About the Mycelia Submodule (Optional - Click to expand) + +**What is Mycelia?** +Mycelia is an optional self-hosted AI memory system that Friend-Lite can use as an alternative memory backend. It provides: +- Timeline-based memory visualization +- Advanced audio processing with speaker diarization +- MongoDB-based full-text search +- MCP (Model Context Protocol) integration + +**Do I need it?** +**Most users don't need Mycelia!** The default Friend-Lite memory system works great for most use cases. Only consider Mycelia if you: +- Want timeline-based memory visualization +- Need advanced speaker diarization features +- Want to use MongoDB for memory storage instead of Qdrant + +**Runtime Dependencies (if using Mycelia):** +- **Deno** - JavaScript/TypeScript runtime (install: `curl -fsSL https://deno.land/install.sh | sh`) +- **MongoDB** - Database for memory storage +- **Redis** - Caching layer +- **FFmpeg** - Audio processing (usually pre-installed) + +**Build/Setup:** +If you choose Mycelia as your memory provider during setup wizard, the wizard will configure the necessary services automatically. No manual build steps required! + +
+ **Run the setup wizard:** ```bash uv run --with-requirements setup-requirements.txt python wizard.py From 9dfc9cbb529a7a67e958d4442e10d4d562a640f9 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 9 Dec 2025 12:25:20 +0000 Subject: [PATCH 17/31] applied changes from rabbitAI --- .../advanced/src/advanced_omi_backend/auth.py | 14 +- .../routers/modules/memory_routes.py | 20 +- .../services/memory/prompts.py | 6 +- .../services/memory/providers/mycelia.py | 191 +++++++----- .../services/mycelia_sync.py | 45 ++- .../workers/memory_jobs.py | 108 ++++--- .../workers/transcription_jobs.py | 274 +++++++++--------- .../webui/src/pages/FrappeGanttTimeline.tsx | 22 +- .../webui/src/pages/MyceliaTimeline.tsx | 46 ++- .../webui/src/pages/ReactGanttTimeline.tsx | 9 +- 10 files changed, 438 insertions(+), 297 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/auth.py b/backends/advanced/src/advanced_omi_backend/auth.py index 4648e276..3a383db2 100644 --- a/backends/advanced/src/advanced_omi_backend/auth.py +++ b/backends/advanced/src/advanced_omi_backend/auth.py @@ -3,8 +3,10 @@ import logging import os import re +from datetime import datetime, timedelta from typing import Literal, Optional, overload +import jwt from beanie import PydanticObjectId from dotenv import load_dotenv from fastapi import Depends, Request @@ -21,6 +23,7 @@ logger = logging.getLogger(__name__) load_dotenv() +JWT_LIFETIME_SECONDS = int(os.getenv("JWT_LIFETIME_SECONDS", "86400")) @overload @@ -82,7 +85,7 @@ async def get_user_manager(user_db=Depends(get_user_db)): # Transport configurations cookie_transport = CookieTransport( - cookie_max_age=86400, # 24 hours (matches JWT lifetime) + cookie_max_age=JWT_LIFETIME_SECONDS, # Matches JWT lifetime cookie_secure=COOKIE_SECURE, # Set to False in development if not using HTTPS cookie_httponly=True, cookie_samesite="lax", @@ -94,8 +97,8 @@ async def get_user_manager(user_db=Depends(get_user_db)): def get_jwt_strategy() -> JWTStrategy: """Get JWT strategy for token generation and validation.""" return JWTStrategy( - secret=SECRET_KEY, lifetime_seconds=86400 - ) # 24 hours for device compatibility + secret=SECRET_KEY, lifetime_seconds=JWT_LIFETIME_SECONDS + ) def generate_jwt_for_user(user_id: str, user_email: str) -> str: @@ -115,16 +118,13 @@ def generate_jwt_for_user(user_id: str, user_email: str) -> str: >>> token = generate_jwt_for_user("507f1f77bcf86cd799439011", "user@example.com") >>> # Use token to call Mycelia API """ - from datetime import datetime, timedelta - import jwt - # Create JWT payload matching Chronicle's standard format payload = { "sub": user_id, # Subject = user ID "email": user_email, "iss": "chronicle", # Issuer "aud": "chronicle", # Audience - "exp": datetime.utcnow() + timedelta(hours=24), # 24 hour expiration + "exp": datetime.utcnow() + timedelta(seconds=JWT_LIFETIME_SECONDS), "iat": datetime.utcnow(), # Issued at } diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py index a0bdd27d..d0be9528 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/memory_routes.py @@ -57,16 +57,6 @@ async def search_memories( return await memory_controller.search_memories(query, current_user, limit, score_threshold, user_id) -@router.get("/{memory_id}") -async def get_memory_by_id( - memory_id: str, - current_user: User = Depends(current_active_user), - user_id: Optional[str] = Query(default=None, description="User ID filter (admin only)"), -): - """Get a single memory by ID. Users can only access their own memories, admins can access any.""" - return await memory_controller.get_memory_by_id(memory_id, current_user, user_id) - - @router.post("") async def add_memory( request: AddMemoryRequest, @@ -96,3 +86,13 @@ async def get_memories_unfiltered( async def get_all_memories_admin(current_user: User = Depends(current_superuser), limit: int = 200): """Get all memories across all users for admin review. Admin only.""" return await memory_controller.get_all_memories_admin(current_user, limit) + + +@router.get("/{memory_id}") +async def get_memory_by_id( + memory_id: str, + current_user: User = Depends(current_active_user), + user_id: Optional[str] = Query(default=None, description="User ID filter (admin only)"), +): + """Get a single memory by ID. Users can only access their own memories, admins can access any.""" + return await memory_controller.get_memory_by_id(memory_id, current_user, user_id) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py b/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py index b022e39c..4b41a51a 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/prompts.py @@ -5,7 +5,7 @@ 2. Updating memory with new facts (DEFAULT_UPDATE_MEMORY_PROMPT) 3. Answering questions from memory (MEMORY_ANSWER_PROMPT) 4. Procedural memory for task tracking (PROCEDURAL_MEMORY_SYSTEM_PROMPT) -5. Temporal and entity extraction (TEMPORAL_ENTITY_EXTRACTION_PROMPT) +5. Temporal and entity extraction (get_temporal_entity_extraction_prompt()) """ from datetime import datetime, timedelta @@ -551,4 +551,6 @@ def build_temporal_extraction_prompt(current_date: datetime) -> str: """ -TEMPORAL_ENTITY_EXTRACTION_PROMPT = build_temporal_extraction_prompt(datetime.now()) +def get_temporal_entity_extraction_prompt() -> str: + """Get the temporal entity extraction prompt with current date/time.""" + return build_temporal_extraction_prompt(datetime.now()) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py index 87fbe690..6f9df0ba 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mycelia.py @@ -4,15 +4,24 @@ that uses Mycelia as the backend for all memory operations. """ +import asyncio import json import logging from datetime import datetime from typing import Any, Dict, List, Optional, Tuple + import httpx +from advanced_omi_backend.auth import generate_jwt_for_user +from advanced_omi_backend.users import User + from ..base import MemoryEntry, MemoryServiceBase -from ..prompts import FACT_RETRIEVAL_PROMPT, TEMPORAL_ENTITY_EXTRACTION_PROMPT, TemporalEntity from ..config import MemoryConfig +from ..prompts import ( + FACT_RETRIEVAL_PROMPT, + TemporalEntity, + get_temporal_entity_extraction_prompt, +) from .llm_providers import _get_openai_client memory_logger = logging.getLogger("memory_service") @@ -31,7 +40,7 @@ def strip_markdown_json(content: str) -> str: # Remove opening ```json or ``` first_newline = content.find("\n") if first_newline != -1: - content = content[first_newline + 1:] + content = content[first_newline + 1 :] # Remove closing ``` if content.endswith("```"): content = content[:-3] @@ -68,6 +77,7 @@ def __init__(self, config: MemoryConfig): self.llm_config = config.llm_config or {} memory_logger.info(f"๐Ÿ„ Initializing Mycelia memory service at {self.api_url}") + async def initialize(self) -> None: """Initialize Mycelia client and verify connection.""" try: @@ -75,7 +85,7 @@ async def initialize(self) -> None: self._client = httpx.AsyncClient( base_url=self.api_url, timeout=self.timeout, - headers={"Content-Type": "application/json"} + headers={"Content-Type": "application/json"}, ) # Test connection directly (without calling test_connection to avoid recursion) @@ -106,11 +116,8 @@ async def _get_user_jwt(self, user_id: str, user_email: Optional[str] = None) -> Raises: ValueError: If user not found """ - from advanced_omi_backend.auth import generate_jwt_for_user - # If email not provided, lookup user if not user_email: - from advanced_omi_backend.users import User user = await User.get(user_id) if not user: raise ValueError(f"User {user_id} not found") @@ -181,15 +188,10 @@ def _mycelia_object_to_memory_entry(self, obj: Dict, user_id: str) -> MemoryEntr id=memory_id, content=memory_content, metadata=metadata, - created_at=self._extract_bson_date(obj.get("createdAt")) + created_at=self._extract_bson_date(obj.get("createdAt")), ) - async def _call_resource( - self, - action: str, - jwt_token: str, - **params - ) -> Dict[str, Any]: + async def _call_resource(self, action: str, jwt_token: str, **params) -> Dict[str, Any]: """Call Mycelia objects resource with JWT authentication. Args: @@ -210,17 +212,19 @@ async def _call_resource( response = await self._client.post( "/api/resource/tech.mycelia.objects", json={"action": action, **params}, - headers={"Authorization": f"Bearer {jwt_token}"} + headers={"Authorization": f"Bearer {jwt_token}"}, ) response.raise_for_status() return response.json() except httpx.HTTPStatusError as e: - memory_logger.error(f"Mycelia API error: {e.response.status_code} - {e.response.text}") - raise RuntimeError(f"Mycelia API error: {e.response.status_code}") + memory_logger.exception( + f"Mycelia API error: {e.response.status_code} - {e.response.text}" + ) + raise RuntimeError(f"Mycelia API error: {e.response.status_code}") from e except Exception as e: - memory_logger.error(f"Failed to call Mycelia resource: {e}") - raise RuntimeError(f"Mycelia API call failed: {e}") + memory_logger.exception(f"Failed to call Mycelia resource: {e}") + raise RuntimeError(f"Mycelia API call failed: {e}") from e async def _extract_memories_via_llm( self, @@ -246,7 +250,7 @@ async def _extract_memories_via_llm( client = _get_openai_client( api_key=self.llm_config.get("api_key"), base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), - is_async=True + is_async=True, ) # Call OpenAI for memory extraction @@ -254,10 +258,10 @@ async def _extract_memories_via_llm( model=self.llm_config.get("model", "gpt-4o-mini"), messages=[ {"role": "system", "content": FACT_RETRIEVAL_PROMPT}, - {"role": "user", "content": transcript} + {"role": "user", "content": transcript}, ], response_format={"type": "json_object"}, - temperature=0.1 + temperature=0.1, ) content = response.choices[0].message.content @@ -281,7 +285,7 @@ async def _extract_memories_via_llm( except Exception as e: memory_logger.error(f"Failed to extract memories via OpenAI: {e}") - raise RuntimeError(f"OpenAI memory extraction failed: {e}") + raise RuntimeError(f"OpenAI memory extraction failed: {e}") from e async def _extract_temporal_entity_via_llm( self, @@ -304,18 +308,21 @@ async def _extract_temporal_entity_via_llm( client = _get_openai_client( api_key=self.llm_config.get("api_key"), base_url=self.llm_config.get("base_url", "https://api.openai.com/v1"), - is_async=True + is_async=True, ) # Call OpenAI with structured output request response = await client.chat.completions.create( model=self.llm_config.get("model", "gpt-4o-mini"), messages=[ - {"role": "system", "content": TEMPORAL_ENTITY_EXTRACTION_PROMPT}, - {"role": "user", "content": f"Extract temporal and entity information from this memory fact:\n\n{fact}"} + {"role": "system", "content": get_temporal_entity_extraction_prompt()}, + { + "role": "user", + "content": f"Extract temporal and entity information from this memory fact:\n\n{fact}", + }, ], response_format={"type": "json_object"}, - temperature=0.1 + temperature=0.1, ) content = response.choices[0].message.content @@ -334,12 +341,18 @@ async def _extract_temporal_entity_via_llm( if "timeRanges" in temporal_data: for time_range in temporal_data["timeRanges"]: if isinstance(time_range["start"], str): - time_range["start"] = datetime.fromisoformat(time_range["start"].replace("Z", "+00:00")) + time_range["start"] = datetime.fromisoformat( + time_range["start"].replace("Z", "+00:00") + ) if isinstance(time_range["end"], str): - time_range["end"] = datetime.fromisoformat(time_range["end"].replace("Z", "+00:00")) + time_range["end"] = datetime.fromisoformat( + time_range["end"].replace("Z", "+00:00") + ) temporal_entity = TemporalEntity(**temporal_data) - memory_logger.info(f"โœ… Temporal extraction: isEvent={temporal_entity.isEvent}, timeRanges={len(temporal_entity.timeRanges)}, entities={temporal_entity.entities}") + memory_logger.info( + f"โœ… Temporal extraction: isEvent={temporal_entity.isEvent}, timeRanges={len(temporal_entity.timeRanges)}, entities={temporal_entity.entities}" + ) return temporal_entity except json.JSONDecodeError as e: @@ -382,7 +395,7 @@ async def add_memory( """ # Ensure service is initialized (lazy initialization for RQ workers) await self._ensure_initialized() - + try: # Generate JWT token for this user jwt_token = await self._get_user_jwt(user_id, user_email) @@ -409,7 +422,9 @@ async def add_memory( time_ranges = [] for tr in temporal_entity.timeRanges: time_range_dict = { - "start": tr.start.isoformat() if isinstance(tr.start, datetime) else tr.start, + "start": ( + tr.start.isoformat() if isinstance(tr.start, datetime) else tr.start + ), "end": tr.end.isoformat() if isinstance(tr.end, datetime) else tr.end, } if tr.name: @@ -422,7 +437,8 @@ async def add_memory( object_data = { "name": f"{name_prefix} {fact_preview}", "details": fact, - "aliases": [source_id, client_id] + temporal_entity.entities, # Include extracted entities + "aliases": [source_id, client_id] + + temporal_entity.entities, # Include extracted entities "isPerson": temporal_entity.isPerson, "isPromise": temporal_entity.isPromise, "isEvent": temporal_entity.isEvent, @@ -438,7 +454,9 @@ async def add_memory( if temporal_entity.emoji: object_data["icon"] = {"text": temporal_entity.emoji} - memory_logger.info(f"๐Ÿ“… Temporal extraction: isEvent={temporal_entity.isEvent}, timeRanges={len(time_ranges)}, entities={len(temporal_entity.entities)}") + memory_logger.info( + f"๐Ÿ“… Temporal extraction: isEvent={temporal_entity.isEvent}, timeRanges={len(time_ranges)}, entities={len(temporal_entity.entities)}" + ) else: # Fallback to basic object without temporal data object_data = { @@ -453,20 +471,22 @@ async def add_memory( memory_logger.warning(f"โš ๏ธ No temporal data extracted for fact: {fact_preview}") result = await self._call_resource( - action="create", - jwt_token=jwt_token, - object=object_data + action="create", jwt_token=jwt_token, object=object_data ) memory_id = result.get("insertedId") if memory_id: - memory_logger.info(f"โœ… Created Mycelia memory object: {memory_id} - {fact_preview}") + memory_logger.info( + f"โœ… Created Mycelia memory object: {memory_id} - {fact_preview}" + ) memory_ids.append(memory_id) else: memory_logger.error(f"Failed to create memory fact: {fact}") if memory_ids: - memory_logger.info(f"โœ… Created {len(memory_ids)} Mycelia memory objects from {len(extracted_facts)} facts") + memory_logger.info( + f"โœ… Created {len(memory_ids)} Mycelia memory objects from {len(extracted_facts)} facts" + ) return (True, memory_ids) else: memory_logger.error("No Mycelia memory objects were created") @@ -505,8 +525,8 @@ async def search_memories( options={ "searchTerm": query, "limit": limit, - "sort": {"updatedAt": -1} # Most recent first - } + "sort": {"updatedAt": -1}, # Most recent first + }, ) # Convert Mycelia objects to MemoryEntry objects @@ -528,9 +548,7 @@ async def search_memories( memory_logger.error(f"Failed to search memories via Mycelia: {e}") return [] - async def get_all_memories( - self, user_id: str, limit: int = 100 - ) -> List[MemoryEntry]: + async def get_all_memories(self, user_id: str, limit: int = 100) -> List[MemoryEntry]: """Get all memories for a user from Mycelia. Args: @@ -552,10 +570,7 @@ async def get_all_memories( action="list", jwt_token=jwt_token, filters={}, # Auto-scoped by userId - options={ - "limit": limit, - "sort": {"updatedAt": -1} # Most recent first - } + options={"limit": limit, "sort": {"updatedAt": -1}}, # Most recent first ) # Convert Mycelia objects to MemoryEntry objects @@ -588,12 +603,8 @@ async def count_memories(self, user_id: str) -> Optional[int]: response = await self._client.post( "/api/resource/tech.mycelia.mongo", - json={ - "action": "count", - "collection": "objects", - "query": {"userId": user_id} - }, - headers={"Authorization": f"Bearer {jwt_token}"} + json={"action": "count", "collection": "objects", "query": {"userId": user_id}}, + headers={"Authorization": f"Bearer {jwt_token}"}, ) response.raise_for_status() return response.json() @@ -602,7 +613,9 @@ async def count_memories(self, user_id: str) -> Optional[int]: memory_logger.error(f"Failed to count memories via Mycelia: {e}") return None - async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Optional[MemoryEntry]: + async def get_memory( + self, memory_id: str, user_id: Optional[str] = None + ) -> Optional[MemoryEntry]: """Get a specific memory by ID from Mycelia. Args: @@ -625,11 +638,7 @@ async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Opt jwt_token = await self._get_user_jwt(user_id) # Get the object by ID (auto-scoped by userId in Mycelia) - result = await self._call_resource( - action="get", - jwt_token=jwt_token, - id=memory_id - ) + result = await self._call_resource(action="get", jwt_token=jwt_token, id=memory_id) if result: return self._mycelia_object_to_memory_entry(result, user_id) @@ -647,7 +656,7 @@ async def update_memory( content: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None, user_id: Optional[str] = None, - user_email: Optional[str] = None + user_email: Optional[str] = None, ) -> bool: """Update a specific memory's content and/or metadata in Mycelia. @@ -704,10 +713,7 @@ async def update_memory( # Update the object (auto-scoped by userId in Mycelia) result = await self._call_resource( - action="update", - jwt_token=jwt_token, - id=memory_id, - object=update_data + action="update", jwt_token=jwt_token, id=memory_id, object=update_data ) updated_count = result.get("modifiedCount", 0) @@ -722,7 +728,9 @@ async def update_memory( memory_logger.error(f"Failed to update memory via Mycelia: {e}") return False - async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool: + async def delete_memory( + self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None + ) -> bool: """Delete a specific memory from Mycelia. Args: @@ -743,11 +751,7 @@ async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, use jwt_token = await self._get_user_jwt(user_id, user_email) # Delete the object (auto-scoped by userId in Mycelia) - result = await self._call_resource( - action="delete", - jwt_token=jwt_token, - id=memory_id - ) + result = await self._call_resource(action="delete", jwt_token=jwt_token, id=memory_id) deleted_count = result.get("deletedCount", 0) if deleted_count > 0: @@ -779,7 +783,7 @@ async def delete_all_user_memories(self, user_id: str) -> int: action="list", jwt_token=jwt_token, filters={}, # Auto-scoped by userId - options={"limit": 10000} # Large limit to get all + options={"limit": 10000}, # Large limit to get all ) # Delete each memory individually @@ -817,11 +821,48 @@ async def test_connection(self) -> bool: memory_logger.error(f"Mycelia connection test failed: {e}") return False + async def aclose(self) -> None: + """Asynchronously close Mycelia client and cleanup resources.""" + memory_logger.info("Closing Mycelia memory service") + if self._client: + try: + await self._client.aclose() + memory_logger.info("โœ… Mycelia HTTP client closed successfully") + except Exception as e: + memory_logger.error(f"Error closing Mycelia HTTP client: {e}") + self._initialized = False + def shutdown(self) -> None: - """Shutdown Mycelia client and cleanup resources.""" + """Shutdown Mycelia client and cleanup resources (sync wrapper).""" memory_logger.info("Shutting down Mycelia memory service") + if self._client: - # Note: httpx AsyncClient should be closed in an async context - # In practice, this will be called during shutdown so we log a warning - memory_logger.warning("HTTP client should be closed with await client.aclose()") + try: + # Try to get the current event loop + try: + loop = asyncio.get_running_loop() + except RuntimeError: + # No running loop + loop = None + + if loop and loop.is_running(): + # If we're in an async context, schedule the close operation on the running loop + memory_logger.info( + "Running event loop detected. Scheduling aclose() on the current loop." + ) + try: + # Schedule the coroutine to run on the existing loop + asyncio.ensure_future(self.aclose(), loop=loop) + memory_logger.info("โœ… Close operation scheduled on running event loop") + except Exception as e: + memory_logger.error(f"Error scheduling close on running loop: {e}") + else: + # No running loop, safe to use run_until_complete + try: + asyncio.get_event_loop().run_until_complete(self.aclose()) + except Exception as e: + memory_logger.error(f"Error during shutdown: {e}") + except Exception as e: + memory_logger.error(f"Unexpected error during shutdown: {e}") + self._initialized = False diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py index 319cfed3..84011068 100644 --- a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -6,14 +6,16 @@ that map to their Chronicle user ID. """ +import base64 +import hashlib import logging import os import secrets -import hashlib -import base64 +from datetime import datetime from typing import Optional, Tuple + +from bson import ObjectId from pymongo import MongoClient -from datetime import datetime logger = logging.getLogger(__name__) @@ -25,7 +27,9 @@ def __init__(self): """Initialize the sync service.""" # MongoDB configuration # MONGODB_URI format: mongodb://host:port/database_name - self.mongo_url = os.getenv("MONGODB_URI", os.getenv("MONGO_URL", "mongodb://localhost:27017")) + self.mongo_url = os.getenv( + "MONGODB_URI", os.getenv("MONGO_URL", "mongodb://localhost:27017") + ) # Determine Mycelia database from environment # Test environment uses mycelia_test, production uses mycelia @@ -45,14 +49,10 @@ def _hash_api_key_with_salt(self, api_key: str, salt: bytes) -> str: """Hash API key with salt (matches Mycelia's implementation).""" h = hashlib.sha256() h.update(salt) - h.update(api_key.encode('utf-8')) - return base64.b64encode(h.digest()).decode('utf-8') - - def _create_mycelia_api_key( - self, - user_id: str, - user_email: str - ) -> Tuple[str, str]: + h.update(api_key.encode("utf-8")) + return base64.b64encode(h.digest()).decode("utf-8") + + def _create_mycelia_api_key(self, user_id: str, user_email: str) -> Tuple[str, str]: """ Create a Mycelia API key for a Chronicle user. @@ -120,11 +120,7 @@ def _create_mycelia_api_key( return client_id, api_key - def sync_user_to_mycelia( - self, - user_id: str, - user_email: str - ) -> Optional[Tuple[str, str]]: + def sync_user_to_mycelia(self, user_id: str, user_email: str) -> Optional[Tuple[str, str]]: """ Sync a Chronicle user to Mycelia OAuth. @@ -145,7 +141,6 @@ def sync_user_to_mycelia( db = client[self.chronicle_db] users_collection = db["users"] - from bson import ObjectId users_collection.update_one( {"_id": ObjectId(user_id)}, { @@ -153,10 +148,10 @@ def sync_user_to_mycelia( "mycelia_oauth": { "client_id": client_id, "created_at": datetime.utcnow(), - "synced": True + "synced": True, } } - } + }, ) logger.info(f"โœ… Synced {user_email} with Mycelia OAuth") @@ -201,9 +196,9 @@ def sync_admin_user(self) -> Optional[Tuple[str, str]]: client_id, api_key = result if api_key: # Credentials created successfully - don't log them - logger.info("="*70) + logger.info("=" * 70) logger.info("๐Ÿ”‘ MYCELIA OAUTH CREDENTIALS CREATED") - logger.info("="*70) + logger.info("=" * 70) logger.info(f"User: {admin_email}") logger.info(f"Client ID: {client_id}") logger.info("") @@ -211,8 +206,10 @@ def sync_admin_user(self) -> Optional[Tuple[str, str]]: logger.info(" cd backends/advanced/scripts") logger.info(" python create_mycelia_api_key.py") logger.info("") - logger.info("๐Ÿ“ This will display the API key needed for Mycelia frontend setup") - logger.info("="*70) + logger.info( + "๐Ÿ“ This will display the API key needed for Mycelia frontend setup" + ) + logger.info("=" * 70) return result diff --git a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py index 3bfb84ff..31dba573 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/memory_jobs.py @@ -8,23 +8,20 @@ import time import uuid from datetime import UTC, datetime -from typing import Dict, Any +from typing import Any, Dict -from advanced_omi_backend.models.job import JobPriority, BaseRQJob, async_job from advanced_omi_backend.controllers.queue_controller import ( - memory_queue, JOB_RESULT_TTL, + memory_queue, ) +from advanced_omi_backend.models.job import BaseRQJob, JobPriority, async_job +from advanced_omi_backend.services.memory.base import MemoryEntry logger = logging.getLogger(__name__) @async_job(redis=True, beanie=True) -async def process_memory_job( - conversation_id: str, - *, - redis_client=None -) -> Dict[str, Any]: +async def process_memory_job(conversation_id: str, *, redis_client=None) -> Dict[str, Any]: """ RQ job function for memory extraction and processing from conversations. @@ -53,7 +50,9 @@ async def process_memory_job( logger.info(f"๐Ÿ”„ Starting memory processing for conversation {conversation_id}") # Get conversation data - conversation_model = await Conversation.find_one(Conversation.conversation_id == conversation_id) + conversation_model = await Conversation.find_one( + Conversation.conversation_id == conversation_id + ) if not conversation_model: logger.warning(f"No conversation found for {conversation_id}") return {"success": False, "error": "Conversation not found"} @@ -69,7 +68,9 @@ async def process_memory_job( logger.warning(f"Could not find user {user_id}") user_email = "" - logger.info(f"๐Ÿ”„ Processing memory for conversation {conversation_id}, client={client_id}, user={user_id}") + logger.info( + f"๐Ÿ”„ Processing memory for conversation {conversation_id}, client={client_id}, user={user_id}" + ) # Extract conversation text from transcript segments full_conversation = "" @@ -103,17 +104,19 @@ async def process_memory_job( for segment in conversation_model.segments: # Handle both dict and object segments if isinstance(segment, dict): - identified_as = segment.get('identified_as') + identified_as = segment.get("identified_as") else: - identified_as = getattr(segment, 'identified_as', None) + identified_as = getattr(segment, "identified_as", None) - if identified_as and identified_as != 'Unknown': + if identified_as and identified_as != "Unknown": transcript_speakers.add(identified_as.strip().lower()) - primary_speaker_names = {ps['name'].strip().lower() for ps in user.primary_speakers} + primary_speaker_names = {ps["name"].strip().lower() for ps in user.primary_speakers} if transcript_speakers and not transcript_speakers.intersection(primary_speaker_names): - logger.info(f"Skipping memory - no primary speakers found in conversation {conversation_id}") + logger.info( + f"Skipping memory - no primary speakers found in conversation {conversation_id}" + ) return {"success": True, "skipped": True, "reason": "No primary speakers"} # Process memory @@ -132,7 +135,9 @@ async def process_memory_job( if success and created_memory_ids: # Add memory version to conversation - conversation_model = await Conversation.find_one(Conversation.conversation_id == conversation_id) + conversation_model = await Conversation.find_one( + Conversation.conversation_id == conversation_id + ) if conversation_model: processing_time = time.time() - start_time @@ -160,14 +165,17 @@ async def process_memory_job( provider=memory_provider, processing_time_seconds=processing_time, metadata={"memory_ids": created_memory_ids}, - set_as_active=True + set_as_active=True, ) await conversation_model.save() - logger.info(f"โœ… Completed memory processing for conversation {conversation_id} - created {len(created_memory_ids)} memories in {processing_time:.2f}s") + logger.info( + f"โœ… Completed memory processing for conversation {conversation_id} - created {len(created_memory_ids)} memories in {processing_time:.2f}s" + ) # Update job metadata with memory information from rq import get_current_job + current_job = get_current_job() if current_job: if not current_job.meta: @@ -179,22 +187,50 @@ async def process_memory_job( for memory_id in created_memory_ids[:5]: # Limit to first 5 for display memory_entry = await memory_service.get_memory(memory_id, user_id) if memory_entry: - # memory_entry is a MemoryEntry object, not a dict - memory_text = memory_entry.content if hasattr(memory_entry, 'content') else str(memory_entry) - memory_details.append({ - "memory_id": memory_id, - "text": memory_text[:200] # First 200 chars - }) + # Handle different return types from memory service + memory_text: str + if isinstance(memory_entry, MemoryEntry): + # MemoryEntry object with content attribute + memory_text = memory_entry.content + elif isinstance(memory_entry, dict): + # Dictionary with "content" key + if "content" in memory_entry: + memory_text = memory_entry["content"] + else: + logger.error( + f"Dict memory entry missing 'content' key for {memory_id}: {list(memory_entry.keys())}" + ) + raise ValueError( + f"Dict memory entry missing 'content' key for memory {memory_id}" + ) + elif isinstance(memory_entry, str): + # String content directly + memory_text = memory_entry + else: + # Unexpected type + logger.error( + f"Unexpected memory entry type for {memory_id}: {type(memory_entry).__name__}" + ) + raise TypeError( + f"Unexpected memory entry type: {type(memory_entry).__name__}" + ) + + # Truncate to 200 chars + memory_details.append( + {"memory_id": memory_id, "text": memory_text[:200]} + ) except Exception as e: logger.warning(f"Failed to fetch memory details for UI: {e}") - current_job.meta.update({ - "conversation_id": conversation_id, - "memories_created": len(created_memory_ids), - "memory_ids": created_memory_ids[:5], # Store first 5 IDs - "memory_details": memory_details, - "processing_time": processing_time - }) + current_job.meta.update( + { + "conversation_id": conversation_id, + "memories_created": len(created_memory_ids), + "memory_ids": created_memory_ids[:5], # Store first 5 IDs + "memory_details": memory_details, + "processing_time": processing_time, + } + ) current_job.save_meta() # NOTE: Listening jobs are restarted by open_conversation_job (not here) @@ -204,7 +240,7 @@ async def process_memory_job( return { "success": True, "memories_created": len(created_memory_ids), - "processing_time": processing_time + "processing_time": processing_time, } else: # No memories created - still successful @@ -218,7 +254,7 @@ def enqueue_memory_processing( user_id: str, user_email: str, conversation_id: str, - priority: JobPriority = JobPriority.NORMAL + priority: JobPriority = JobPriority.NORMAL, ): """ Enqueue a memory processing job. @@ -227,9 +263,9 @@ def enqueue_memory_processing( """ timeout_mapping = { JobPriority.URGENT: 3600, # 60 minutes - JobPriority.HIGH: 2400, # 40 minutes + JobPriority.HIGH: 2400, # 40 minutes JobPriority.NORMAL: 1800, # 30 minutes - JobPriority.LOW: 900 # 15 minutes + JobPriority.LOW: 900, # 15 minutes } job = memory_queue.enqueue( @@ -238,7 +274,7 @@ def enqueue_memory_processing( job_timeout=timeout_mapping.get(priority, 1800), result_ttl=JOB_RESULT_TTL, job_id=f"memory_{conversation_id[:8]}", - description=f"Process memory for conversation {conversation_id[:8]}" + description=f"Process memory for conversation {conversation_id[:8]}", ) logger.info(f"๐Ÿ“ฅ RQ: Enqueued memory job {job.id} for conversation {conversation_id}") diff --git a/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py b/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py index 4e340319..c423fb0f 100644 --- a/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py +++ b/backends/advanced/src/advanced_omi_backend/workers/transcription_jobs.py @@ -18,6 +18,7 @@ JOB_RESULT_TTL, REDIS_URL, ) +from advanced_omi_backend.utils.conversation_utils import analyze_speech, mark_conversation_deleted logger = logging.getLogger(__name__) @@ -28,7 +29,7 @@ async def apply_speaker_recognition( words: list, segments: list, user_id: str, - conversation_id: str = None + conversation_id: str = None, ) -> list: """ Apply speaker recognition to segments using the speaker recognition service. @@ -54,27 +55,28 @@ async def apply_speaker_recognition( logger.info(f"๐ŸŽค Speaker recognition disabled, using original speaker labels") return segments - logger.info(f"๐ŸŽค Speaker recognition enabled, identifying speakers{f' for {conversation_id}' if conversation_id else ''}...") + logger.info( + f"๐ŸŽค Speaker recognition enabled, identifying speakers{f' for {conversation_id}' if conversation_id else ''}..." + ) # Prepare transcript data with word-level timings - transcript_data = { - "text": transcript_text, - "words": words - } + transcript_data = {"text": transcript_text, "words": words} # Call speaker recognition service to match and identify speakers speaker_result = await speaker_client.diarize_identify_match( - audio_path=audio_path, - transcript_data=transcript_data, - user_id=user_id + audio_path=audio_path, transcript_data=transcript_data, user_id=user_id ) if not speaker_result or "segments" not in speaker_result: - logger.info(f"๐ŸŽค Speaker recognition returned no segments, keeping original transcription segments") + logger.info( + f"๐ŸŽค Speaker recognition returned no segments, keeping original transcription segments" + ) return segments speaker_identified_segments = speaker_result["segments"] - logger.info(f"๐ŸŽค Speaker recognition returned {len(speaker_identified_segments)} identified segments") + logger.info( + f"๐ŸŽค Speaker recognition returned {len(speaker_identified_segments)} identified segments" + ) logger.info(f"๐ŸŽค Original segments: {len(segments)}") # Create time-based speaker mapping @@ -97,11 +99,15 @@ def get_speaker_at_time(timestamp: float, speaker_segments: list) -> str: original_speaker = seg.speaker seg.speaker = identified_speaker updated_count += 1 - logger.debug(f"๐ŸŽค Segment [{seg.start:.1f}-{seg.end:.1f}] '{original_speaker}' -> '{identified_speaker}'") + logger.debug( + f"๐ŸŽค Segment [{seg.start:.1f}-{seg.end:.1f}] '{original_speaker}' -> '{identified_speaker}'" + ) # Ensure segments remain sorted by start time segments.sort(key=lambda s: s.start) - logger.info(f"๐ŸŽค Updated {updated_count}/{len(segments)} segments with speaker identifications") + logger.info( + f"๐ŸŽค Updated {updated_count}/{len(segments)} segments with speaker identifications" + ) return segments @@ -109,6 +115,7 @@ def get_speaker_at_time(timestamp: float, speaker_segments: list) -> str: logger.warning(f"โš ๏ธ Speaker recognition failed: {speaker_error}") logger.warning(f"Continuing with original transcription speaker labels") import traceback + logger.debug(traceback.format_exc()) return segments @@ -121,7 +128,7 @@ async def transcribe_full_audio_job( version_id: str, trigger: str = "reprocess", *, - redis_client=None + redis_client=None, ) -> Dict[str, Any]: """ RQ job function for transcribing full audio to text (transcription only, no speaker recognition). @@ -149,7 +156,9 @@ async def transcribe_full_audio_job( from advanced_omi_backend.services.transcription import get_transcription_provider from advanced_omi_backend.models.conversation import Conversation - logger.info(f"๐Ÿ”„ RQ: Starting transcript processing for conversation {conversation_id} (trigger: {trigger})") + logger.info( + f"๐Ÿ”„ RQ: Starting transcript processing for conversation {conversation_id} (trigger: {trigger})" + ) start_time = time.time() @@ -176,14 +185,12 @@ async def transcribe_full_audio_job( raise FileNotFoundError(f"Audio file not found: {actual_audio_path}") # Load audio data - with open(audio_file_path, 'rb') as f: + with open(audio_file_path, "rb") as f: audio_data = f.read() # Transcribe the audio (assume 16kHz sample rate) transcription_result = await provider.transcribe( - audio_data=audio_data, - sample_rate=16000, - diarize=True + audio_data=audio_data, sample_rate=16000, diarize=True ) # Extract results @@ -191,11 +198,11 @@ async def transcribe_full_audio_job( segments = transcription_result.get("segments", []) words = transcription_result.get("words", []) - logger.info(f"๐Ÿ“Š Transcription complete: {len(transcript_text)} chars, {len(segments)} segments, {len(words)} words") + logger.info( + f"๐Ÿ“Š Transcription complete: {len(transcript_text)} chars, {len(segments)} segments, {len(words)} words" + ) # Validate meaningful speech BEFORE any further processing - from advanced_omi_backend.utils.conversation_utils import analyze_speech, mark_conversation_deleted - transcript_data = {"text": transcript_text, "words": words} speech_analysis = analyze_speech(transcript_data) @@ -208,7 +215,7 @@ async def transcribe_full_audio_job( # Mark conversation as deleted await mark_conversation_deleted( conversation_id=conversation_id, - deletion_reason="no_meaningful_speech_batch_transcription" + deletion_reason="no_meaningful_speech_batch_transcription", ) # Cancel all dependent jobs (cropping, speaker recognition, memory, title/summary) @@ -227,14 +234,18 @@ async def transcribe_full_audio_job( f"crop_{conversation_id[:12]}", f"speaker_{conversation_id[:12]}", f"memory_{conversation_id[:12]}", - f"title_summary_{conversation_id[:12]}" + f"title_summary_{conversation_id[:12]}", ] cancelled_jobs = [] for job_id in job_patterns: try: dependent_job = Job.fetch(job_id, connection=redis_conn) - if dependent_job and dependent_job.get_status() in ['queued', 'deferred', 'scheduled']: + if dependent_job and dependent_job.get_status() in [ + "queued", + "deferred", + "scheduled", + ]: dependent_job.cancel() cancelled_jobs.append(job_id) logger.info(f"โœ… Cancelled dependent job: {job_id}") @@ -242,7 +253,9 @@ async def transcribe_full_audio_job( logger.debug(f"Job {job_id} not found or already completed: {e}") if cancelled_jobs: - logger.info(f"๐Ÿšซ Cancelled {len(cancelled_jobs)} dependent jobs due to no meaningful speech") + logger.info( + f"๐Ÿšซ Cancelled {len(cancelled_jobs)} dependent jobs due to no meaningful speech" + ) except Exception as cancel_error: logger.warning(f"Failed to cancel some dependent jobs: {cancel_error}") @@ -254,7 +267,7 @@ async def transcribe_full_audio_job( "reason": speech_analysis.get("reason"), "word_count": speech_analysis.get("word_count", 0), "duration": speech_analysis.get("duration", 0.0), - "deleted": True + "deleted": True, } logger.info( @@ -267,7 +280,7 @@ async def transcribe_full_audio_job( # Convert segments to SpeakerSegment objects speaker_segments = [] - + if segments: # Use provided segments for seg in segments: @@ -280,16 +293,16 @@ async def transcribe_full_audio_job( end=seg.get("end", 0), text=seg.get("text", ""), speaker=speaker_name, - confidence=seg.get("confidence") + confidence=seg.get("confidence"), ) ) elif transcript_text: - # NOTE: Parakeet falls here. + # NOTE: Parakeet falls here. # If no segments but we have text, create a single segment from the full transcript # Calculate duration from words if available, otherwise estimate from audio start_time_seg = 0.0 end_time_seg = 0.0 - + if words: # Use word timestamps if available start_times = [w.get("start", 0) for w in words if "start" in w] @@ -302,17 +315,19 @@ async def transcribe_full_audio_job( # Estimate duration: assume ~150 words per minute, or use audio file duration # For now, use a default duration if we can't calculate it end_time_seg = len(transcript_text.split()) * 0.4 # Rough estimate: 0.4s per word - + speaker_segments.append( Conversation.SpeakerSegment( start=start_time_seg, end=end_time_seg if end_time_seg > start_time_seg else start_time_seg + 1.0, text=transcript_text, speaker="Unknown", - confidence=None + confidence=None, ) ) - logger.info(f"๐Ÿ“Š Created single segment from transcript text (no segments returned by provider)") + logger.info( + f"๐Ÿ“Š Created single segment from transcript text (no segments returned by provider)" + ) logger.info(f"๐Ÿ“Š Created {len(speaker_segments)} speaker segments") @@ -326,10 +341,7 @@ async def transcribe_full_audio_job( "segment_count": len(segments), "word_count": len(words), "words": words, # Store words for speaker recognition job to read - "speaker_recognition": { - "enabled": False, - "reason": "handled_by_separate_job" - } + "speaker_recognition": {"enabled": False, "reason": "handled_by_separate_job"}, } conversation.add_transcript_version( @@ -337,10 +349,10 @@ async def transcribe_full_audio_job( transcript=transcript_text, segments=speaker_segments, provider=Conversation.TranscriptProvider(provider_normalized), - model=getattr(provider, 'model', 'unknown'), + model=getattr(provider, "model", "unknown"), processing_time_seconds=processing_time, metadata=metadata, - set_as_active=True + set_as_active=True, ) # Generate title and summary from transcript using LLM @@ -362,38 +374,48 @@ async def transcribe_full_audio_job( llm_response = await async_generate(prompt, temperature=0.7) # Parse LLM response - lines = llm_response.strip().split('\n') + lines = llm_response.strip().split("\n") title = None summary = None for line in lines: - if line.startswith('Title:'): - title = line.replace('Title:', '').strip() - elif line.startswith('Summary:'): - summary = line.replace('Summary:', '').strip() + if line.startswith("Title:"): + title = line.replace("Title:", "").strip() + elif line.startswith("Summary:"): + summary = line.replace("Summary:", "").strip() # Use LLM-generated title/summary if valid, otherwise fallback if title and len(title) > 0: conversation.title = title[:50] + "..." if len(title) > 50 else title else: # Fallback to first sentence if LLM didn't provide title - first_sentence = transcript_text.split('.')[0].strip() - conversation.title = first_sentence[:50] + "..." if len(first_sentence) > 50 else first_sentence + first_sentence = transcript_text.split(".")[0].strip() + conversation.title = ( + first_sentence[:50] + "..." if len(first_sentence) > 50 else first_sentence + ) if summary and len(summary) > 0: conversation.summary = summary[:150] + "..." if len(summary) > 150 else summary else: # Fallback to truncated transcript if LLM didn't provide summary - conversation.summary = transcript_text[:150] + "..." if len(transcript_text) > 150 else transcript_text + conversation.summary = ( + transcript_text[:150] + "..." if len(transcript_text) > 150 else transcript_text + ) - logger.info(f"โœ… Generated title: '{conversation.title}', summary: '{conversation.summary}'") + logger.info( + f"โœ… Generated title: '{conversation.title}', summary: '{conversation.summary}'" + ) except Exception as llm_error: logger.warning(f"โš ๏ธ LLM title/summary generation failed: {llm_error}") # Fallback to simple truncation - first_sentence = transcript_text.split('.')[0].strip() - conversation.title = first_sentence[:50] + "..." if len(first_sentence) > 50 else first_sentence - conversation.summary = transcript_text[:150] + "..." if len(transcript_text) > 150 else transcript_text + first_sentence = transcript_text.split(".")[0].strip() + conversation.title = ( + first_sentence[:50] + "..." if len(first_sentence) > 50 else first_sentence + ) + conversation.summary = ( + transcript_text[:150] + "..." if len(transcript_text) > 150 else transcript_text + ) else: conversation.title = "Empty Conversation" conversation.summary = "No speech detected" @@ -401,22 +423,27 @@ async def transcribe_full_audio_job( # Save the updated conversation await conversation.save() - logger.info(f"โœ… Transcript processing completed for {conversation_id} in {processing_time:.2f}s") + logger.info( + f"โœ… Transcript processing completed for {conversation_id} in {processing_time:.2f}s" + ) # Update job metadata with title and summary for UI display from rq import get_current_job + current_job = get_current_job() if current_job: if not current_job.meta: current_job.meta = {} - current_job.meta.update({ - "conversation_id": conversation_id, - "title": conversation.title, - "summary": conversation.summary, - "transcript_length": len(transcript_text), - "word_count": len(words), - "processing_time": processing_time - }) + current_job.meta.update( + { + "conversation_id": conversation_id, + "title": conversation.title, + "summary": conversation.summary, + "transcript_length": len(transcript_text), + "word_count": len(words), + "processing_time": processing_time, + } + ) current_job.save_meta() return { @@ -429,17 +456,13 @@ async def transcribe_full_audio_job( "words": words, # Needed by speaker recognition "provider": provider_name, "processing_time_seconds": processing_time, - "trigger": trigger + "trigger": trigger, } @async_job(redis=True, beanie=True) async def stream_speech_detection_job( - session_id: str, - user_id: str, - client_id: str, - *, - redis_client=None + session_id: str, user_id: str, client_id: str, *, redis_client=None ) -> Dict[str, Any]: """ Listen for meaningful speech, optionally check for enrolled speakers, then start conversation. @@ -482,25 +505,30 @@ async def stream_speech_detection_job( # Check if speaker filtering is enabled speaker_filter_enabled = os.getenv("RECORD_ONLY_ENROLLED_SPEAKERS", "false").lower() == "true" - logger.info(f"๐Ÿ“Š Conversation #{conversation_count + 1}, Speaker filter: {'enabled' if speaker_filter_enabled else 'disabled'}") + logger.info( + f"๐Ÿ“Š Conversation #{conversation_count + 1}, Speaker filter: {'enabled' if speaker_filter_enabled else 'disabled'}" + ) # Update job metadata to show status if current_job: if not current_job.meta: current_job.meta = {} - current_job.meta.update({ - "status": "listening_for_speech", - "session_id": session_id, - "audio_uuid": session_id, - "client_id": client_id, - "session_level": True # Mark as session-level job - }) + current_job.meta.update( + { + "status": "listening_for_speech", + "session_id": session_id, + "audio_uuid": session_id, + "client_id": client_id, + "session_level": True, # Mark as session-level job + } + ) current_job.save_meta() # Main loop: Listen for speech while True: # Check if job still exists in Redis (detect zombie state) from advanced_omi_backend.utils.job_utils import check_job_alive + if not await check_job_alive(redis_client, current_job): break @@ -538,16 +566,11 @@ async def stream_speech_detection_job( # Add session event for speech detected from datetime import datetime + await redis_client.hset( - session_key, - "last_event", - f"speech_detected:{datetime.utcnow().isoformat()}" - ) - await redis_client.hset( - session_key, - "speech_detected_at", - datetime.utcnow().isoformat() + session_key, "last_event", f"speech_detected:{datetime.utcnow().isoformat()}" ) + await redis_client.hset(session_key, "speech_detected_at", datetime.utcnow().isoformat()) # Step 2: If speaker filter enabled, check for enrolled speakers identified_speakers = [] @@ -557,15 +580,9 @@ async def stream_speech_detection_job( # Add session event for speaker check starting await redis_client.hset( - session_key, - "last_event", - f"speaker_check_starting:{datetime.utcnow().isoformat()}" - ) - await redis_client.hset( - session_key, - "speaker_check_status", - "checking" + session_key, "last_event", f"speaker_check_starting:{datetime.utcnow().isoformat()}" ) + await redis_client.hset(session_key, "speaker_check_status", "checking") from .speaker_jobs import check_enrolled_speakers_job # Enqueue speaker check as a separate trackable job @@ -578,7 +595,7 @@ async def stream_speech_detection_job( result_ttl=600, job_id=f"speaker-check_{session_id[:12]}_{conversation_count}", description=f"Speaker check for conversation #{conversation_count+1}", - meta={'audio_uuid': session_id, 'client_id': client_id} + meta={"audio_uuid": session_id, "client_id": client_id}, ) # Poll for result (with timeout) @@ -592,8 +609,11 @@ async def stream_speech_detection_job( speaker_check_job.refresh() except Exception as e: from rq.exceptions import NoSuchJobError + if isinstance(e, NoSuchJobError): - logger.warning(f"โš ๏ธ Speaker check job disappeared from Redis (likely completed quickly), assuming not enrolled") + logger.warning( + f"โš ๏ธ Speaker check job disappeared from Redis (likely completed quickly), assuming not enrolled" + ) break else: raise @@ -608,18 +628,16 @@ async def stream_speech_detection_job( await redis_client.hset( session_key, "last_event", - f"speaker_check_complete:{datetime.utcnow().isoformat()}" + f"speaker_check_complete:{datetime.utcnow().isoformat()}", ) await redis_client.hset( session_key, "speaker_check_status", - "enrolled" if enrolled_present else "not_enrolled" + "enrolled" if enrolled_present else "not_enrolled", ) if identified_speakers: await redis_client.hset( - session_key, - "identified_speakers", - ",".join(identified_speakers) + session_key, "identified_speakers", ",".join(identified_speakers) ) break elif speaker_check_job.is_failed: @@ -629,38 +647,36 @@ async def stream_speech_detection_job( await redis_client.hset( session_key, "last_event", - f"speaker_check_failed:{datetime.utcnow().isoformat()}" - ) - await redis_client.hset( - session_key, - "speaker_check_status", - "failed" + f"speaker_check_failed:{datetime.utcnow().isoformat()}", ) + await redis_client.hset(session_key, "speaker_check_status", "failed") break await asyncio.sleep(poll_interval) waited += poll_interval else: # Timeout - assume not enrolled - logger.warning(f"โฑ๏ธ Speaker check timed out after {max_wait}s, assuming not enrolled") + logger.warning( + f"โฑ๏ธ Speaker check timed out after {max_wait}s, assuming not enrolled" + ) enrolled_present = False # Update session event for speaker check timeout await redis_client.hset( session_key, "last_event", - f"speaker_check_timeout:{datetime.utcnow().isoformat()}" - ) - await redis_client.hset( - session_key, - "speaker_check_status", - "timeout" + f"speaker_check_timeout:{datetime.utcnow().isoformat()}", ) + await redis_client.hset(session_key, "speaker_check_status", "timeout") # Log speaker check result but proceed with conversation regardless if enrolled_present: - logger.info(f"โœ… Enrolled speaker(s) found: {', '.join(identified_speakers) if identified_speakers else 'Unknown'}") + logger.info( + f"โœ… Enrolled speaker(s) found: {', '.join(identified_speakers) if identified_speakers else 'Unknown'}" + ) else: - logger.info(f"โ„น๏ธ No enrolled speakers found, but proceeding with conversation anyway") + logger.info( + f"โ„น๏ธ No enrolled speakers found, but proceeding with conversation anyway" + ) # Step 3: Start conversation and EXIT speech_detected_at = time.time() @@ -682,7 +698,7 @@ async def stream_speech_detection_job( result_ttl=JOB_RESULT_TTL, # Use configured TTL (24 hours) instead of 10 minutes job_id=f"open-conv_{session_id[:12]}_{conversation_count}", description=f"Conversation #{conversation_count+1} for {session_id[:12]}", - meta={'audio_uuid': session_id, 'client_id': client_id} + meta={"audio_uuid": session_id, "client_id": client_id}, ) # Track the job @@ -694,17 +710,19 @@ async def stream_speech_detection_job( current_job.meta = {} # Remove session_level flag now that conversation is starting - current_job.meta.pop('session_level', None) - - current_job.meta.update({ - "conversation_job_id": open_job.id, - "speaker_check_job_id": speaker_check_job.id if speaker_check_job else None, - "detected_speakers": identified_speakers, - "speech_detected_at": datetime.fromtimestamp(speech_detected_at).isoformat(), - "session_id": session_id, - "audio_uuid": session_id, # For job grouping - "client_id": client_id # For job grouping - }) + current_job.meta.pop("session_level", None) + + current_job.meta.update( + { + "conversation_job_id": open_job.id, + "speaker_check_job_id": speaker_check_job.id if speaker_check_job else None, + "detected_speakers": identified_speakers, + "speech_detected_at": datetime.fromtimestamp(speech_detected_at).isoformat(), + "session_id": session_id, + "audio_uuid": session_id, # For job grouping + "client_id": client_id, # For job grouping + } + ) current_job.save_meta() logger.info(f"โœ… Started conversation job {open_job.id}, exiting speech detection") @@ -715,7 +733,7 @@ async def stream_speech_detection_job( "client_id": client_id, "conversation_job_id": open_job.id, "speech_detected_at": datetime.fromtimestamp(speech_detected_at).isoformat(), - "runtime_seconds": time.time() - start_time + "runtime_seconds": time.time() - start_time, } # Session ended without speech @@ -725,7 +743,5 @@ async def stream_speech_detection_job( "user_id": user_id, "client_id": client_id, "no_speech_detected": True, - "runtime_seconds": time.time() - start_time + "runtime_seconds": time.time() - start_time, } - - diff --git a/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx b/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx index d8da0aed..6765cf98 100644 --- a/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx +++ b/backends/advanced/webui/src/pages/FrappeGanttTimeline.tsx @@ -47,6 +47,16 @@ export default function FrappeGanttTimeline() { const scrollLeft = useRef(0) const { user } = useAuth() + // HTML escape function to prevent XSS attacks + const escapeHtml = (unsafe: string): string => { + return unsafe + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, ''') + } + // Demo data for testing the Timeline visualization - spans multiple years const getDemoMemories = (): MemoryWithTimeRange[] => { return [ @@ -251,7 +261,13 @@ export default function FrappeGanttTimeline() { date_format: 'YYYY-MM-DD', language: 'en', custom_popup_html: (task: any) => { - const memory = displayMemories.find(m => task.id.startsWith(m.id)) + // Extract memoryId from task.id (format: "memoryId-index") + // Use lastIndexOf to handle memory IDs that contain dashes (e.g., UUIDs) + const lastDashIndex = task.id.lastIndexOf('-') + const memoryId = lastDashIndex !== -1 ? task.id.slice(0, lastDashIndex) : task.id + + // Find memory using exact equality instead of prefix matching + const memory = displayMemories.find(m => m.id === memoryId) const startDate = new Date(task._start) const endDate = new Date(task._end) const formatOptions: Intl.DateTimeFormatOptions = { @@ -263,14 +279,14 @@ export default function FrappeGanttTimeline() { } return ` ` } diff --git a/backends/advanced/webui/src/pages/MyceliaTimeline.tsx b/backends/advanced/webui/src/pages/MyceliaTimeline.tsx index 48a4a24a..ba193f71 100644 --- a/backends/advanced/webui/src/pages/MyceliaTimeline.tsx +++ b/backends/advanced/webui/src/pages/MyceliaTimeline.tsx @@ -244,18 +244,39 @@ export default function MyceliaTimeline() { minute: '2-digit' }) + // Build tooltip using DOM APIs to prevent XSS tooltip .style('opacity', 1) .style('left', `${event.pageX + 10}px`) .style('top', `${event.pageY - 10}px`) - .html(` -
${d.name}
-
-
Start: ${startDate}
-
End: ${endDate}
-
Click to view memory
-
- `) + .html('') // Clear existing content + + // Add title (user-controlled content via textContent) + tooltip + .append('div') + .attr('class', 'font-semibold text-sm mb-1') + .text(d.name) // Safe: uses textContent, not innerHTML + + // Add details container + const detailsDiv = tooltip + .append('div') + .attr('class', 'text-xs text-gray-600 dark:text-gray-300') + + // Add start date + const startDiv = detailsDiv.append('div') + startDiv.append('strong').text('Start: ') + startDiv.append('span').text(startDate) // Safe: uses textContent + + // Add end date + const endDiv = detailsDiv.append('div') + endDiv.append('strong').text('End: ') + endDiv.append('span').text(endDate) // Safe: uses textContent + + // Add static click instruction + detailsDiv + .append('div') + .attr('class', 'mt-1 text-blue-600 dark:text-blue-400') + .text('Click to view memory') } }) .on('mouseout', function() { @@ -269,8 +290,13 @@ export default function MyceliaTimeline() { .on('click', function(event, d) { event.stopPropagation() // Extract memory ID from task ID (format: "memory-id-rangeIndex") - const memoryId = d.id.split('-').slice(0, -1).join('-') - navigate(`/memories/${memoryId}`) + // Use lastIndexOf to handle memory IDs that contain dashes (e.g., UUIDs) + const lastDashIndex = d.id.lastIndexOf('-') + const memoryId = lastDashIndex !== -1 ? d.id.slice(0, lastDashIndex) : d.id + + if (memoryId) { + navigate(`/memories/${memoryId}`) + } }) // Add labels diff --git a/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx b/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx index e1bc127f..daf6bc18 100644 --- a/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx +++ b/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx @@ -144,10 +144,17 @@ export default function ReactGanttTimeline() { } const fetchMemoriesWithTimeRanges = async () => { + // Guard: only fetch if user ID exists + if (!user?.id) { + setError('User not authenticated') + setLoading(false) + return + } + setLoading(true) setError(null) try { - const response = await memoriesApi.getAll() + const response = await memoriesApi.getAll(user.id) // Extract memories from response const memoriesData = response.data.memories || response.data || [] From 96e4e1925ef1b50beaaacc9f110fae972497a225 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Wed, 10 Dec 2025 10:35:29 +0000 Subject: [PATCH 18/31] added typescript dependancies --- backends/advanced/webui/package-lock.json | 170 ++++---- backends/advanced/webui/package.json | 3 +- .../advanced/webui/src/hooks/useD3Zoom.ts | 7 +- .../webui/src/pages/MyceliaTimeline.tsx | 24 +- .../webui/src/pages/ReactGanttTimeline.tsx | 366 ------------------ 5 files changed, 107 insertions(+), 463 deletions(-) delete mode 100644 backends/advanced/webui/src/pages/ReactGanttTimeline.tsx diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 4582a222..5ab5420e 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -20,6 +20,7 @@ }, "devDependencies": { "@types/d3": "^7.4.3", + "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -31,7 +32,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.80.7", + "sass-embedded": "^1.83.0", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" @@ -1990,6 +1991,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/frappe-gantt": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", + "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", @@ -5244,9 +5252,9 @@ "license": "MIT" }, "node_modules/sass": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.93.3.tgz", - "integrity": "sha512-elOcIZRTM76dvxNAjqYrucTSI0teAF/L2Lv0s6f6b7FOwcwIuA357bIE871580AjHJuSvLIRUosgV+lIWx6Rgg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.95.1.tgz", + "integrity": "sha512-uPoDh5NIEZV4Dp5GBodkmNY9tSQfXY02pmCcUo+FR1P+x953HGkpw+vV28D4IqYB6f8webZtwoSaZaiPtpTeMg==", "dev": true, "license": "MIT", "optional": true, @@ -5266,9 +5274,9 @@ } }, "node_modules/sass-embedded": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.93.3.tgz", - "integrity": "sha512-+VUy01yfDqNmIVMd/LLKl2TTtY0ovZN0rTonh+FhKr65mFwIYgU9WzgIZKS7U9/SPCQvWTsTGx9jyt+qRm/XFw==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.95.1.tgz", + "integrity": "sha512-l086+s40Z0qP7ckj4T+rI/7tZcwAfcKCG9ah9A808yINWOxZFv0kO0u/UHhR4G9Aimeyax/JNvqh8RE7z1wngg==", "dev": true, "license": "MIT", "dependencies": { @@ -5288,30 +5296,30 @@ "node": ">=16.0.0" }, "optionalDependencies": { - "sass-embedded-all-unknown": "1.93.3", - "sass-embedded-android-arm": "1.93.3", - "sass-embedded-android-arm64": "1.93.3", - "sass-embedded-android-riscv64": "1.93.3", - "sass-embedded-android-x64": "1.93.3", - "sass-embedded-darwin-arm64": "1.93.3", - "sass-embedded-darwin-x64": "1.93.3", - "sass-embedded-linux-arm": "1.93.3", - "sass-embedded-linux-arm64": "1.93.3", - "sass-embedded-linux-musl-arm": "1.93.3", - "sass-embedded-linux-musl-arm64": "1.93.3", - "sass-embedded-linux-musl-riscv64": "1.93.3", - "sass-embedded-linux-musl-x64": "1.93.3", - "sass-embedded-linux-riscv64": "1.93.3", - "sass-embedded-linux-x64": "1.93.3", - "sass-embedded-unknown-all": "1.93.3", - "sass-embedded-win32-arm64": "1.93.3", - "sass-embedded-win32-x64": "1.93.3" + "sass-embedded-all-unknown": "1.95.1", + "sass-embedded-android-arm": "1.95.1", + "sass-embedded-android-arm64": "1.95.1", + "sass-embedded-android-riscv64": "1.95.1", + "sass-embedded-android-x64": "1.95.1", + "sass-embedded-darwin-arm64": "1.95.1", + "sass-embedded-darwin-x64": "1.95.1", + "sass-embedded-linux-arm": "1.95.1", + "sass-embedded-linux-arm64": "1.95.1", + "sass-embedded-linux-musl-arm": "1.95.1", + "sass-embedded-linux-musl-arm64": "1.95.1", + "sass-embedded-linux-musl-riscv64": "1.95.1", + "sass-embedded-linux-musl-x64": "1.95.1", + "sass-embedded-linux-riscv64": "1.95.1", + "sass-embedded-linux-x64": "1.95.1", + "sass-embedded-unknown-all": "1.95.1", + "sass-embedded-win32-arm64": "1.95.1", + "sass-embedded-win32-x64": "1.95.1" } }, "node_modules/sass-embedded-all-unknown": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.93.3.tgz", - "integrity": "sha512-3okGgnE41eg+CPLtAPletu6nQ4N0ij7AeW+Sl5Km4j29XcmqZQeFwYjHe1AlKTEgLi/UAONk1O8i8/lupeKMbw==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.95.1.tgz", + "integrity": "sha512-ObGM3xSHEK2fu89GusvAdk1hId3D1R03CyQ6/AVTFSrcBFav1a3aWUmBWtImzf5LsVzliRnlAPPS6+rT/Ghb1A==", "cpu": [ "!arm", "!arm64", @@ -5322,13 +5330,13 @@ "license": "MIT", "optional": true, "dependencies": { - "sass": "1.93.3" + "sass": "1.95.1" } }, "node_modules/sass-embedded-android-arm": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.93.3.tgz", - "integrity": "sha512-8xOw9bywfOD6Wv24BgCmgjkk6tMrsOTTHcb28KDxeJtFtoxiUyMbxo0vChpPAfp2Hyg2tFFKS60s0s4JYk+Raw==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.95.1.tgz", + "integrity": "sha512-siaN1TVEjhBP4QJ5UlDBRhyKmMbFhbdcyHj0B4hIuNcinuVprP6tH1NT0NkHvkXh2egBmTvjzZgJ1ySsCB32JA==", "cpu": [ "arm" ], @@ -5343,9 +5351,9 @@ } }, "node_modules/sass-embedded-android-arm64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.93.3.tgz", - "integrity": "sha512-uqUl3Kt1IqdGVAcAdbmC+NwuUJy8tM+2ZnB7/zrt6WxWVShVCRdFnWR9LT8HJr7eJN7AU8kSXxaVX/gedanPsg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.95.1.tgz", + "integrity": "sha512-E+3vZXhUOVHFiSITH2g53/ynxTG4zz8vTVrXGAKkZQwSe6aCO22uc1Pah23F3jOrDNF/YLrsyp82T/CIIczK3w==", "cpu": [ "arm64" ], @@ -5360,9 +5368,9 @@ } }, "node_modules/sass-embedded-android-riscv64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.93.3.tgz", - "integrity": "sha512-2jNJDmo+3qLocjWqYbXiBDnfgwrUeZgZFHJIwAefU7Fn66Ot7rsXl+XPwlokaCbTpj7eMFIqsRAZ/uDueXNCJg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.95.1.tgz", + "integrity": "sha512-UcPcr5JXVtInD+/XE+2DhwPsALUdRAHyippnnAP6MtdaT3+AnqqvzSVy9Gb6SKyeqEk4YxPmIlQpZCVODDT4eA==", "cpu": [ "riscv64" ], @@ -5377,9 +5385,9 @@ } }, "node_modules/sass-embedded-android-x64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.93.3.tgz", - "integrity": "sha512-y0RoAU6ZenQFcjM9PjQd3cRqRTjqwSbtWLL/p68y2oFyh0QGN0+LQ826fc0ZvU/AbqCsAizkqjzOn6cRZJxTTQ==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.95.1.tgz", + "integrity": "sha512-sW/TO+B0Wq9VDTa7YiO74DW4iF9jEYds+9yslaHtc69r/Ch+Zj+ZB6HeJysfmen91zn5CLJDGrnTSrIk+/COfQ==", "cpu": [ "x64" ], @@ -5394,9 +5402,9 @@ } }, "node_modules/sass-embedded-darwin-arm64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.93.3.tgz", - "integrity": "sha512-7zb/hpdMOdKteK17BOyyypemglVURd1Hdz6QGsggy60aUFfptTLQftLRg8r/xh1RbQAUKWFbYTNaM47J9yPxYg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.95.1.tgz", + "integrity": "sha512-SWTCwszlBzjin35T2OiGZSDRbC/sqg5Mjepih18lelELrz14eB9LcFTZeiqDfdnwx6qQqPWj2VufCpExr8jElA==", "cpu": [ "arm64" ], @@ -5411,9 +5419,9 @@ } }, "node_modules/sass-embedded-darwin-x64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.93.3.tgz", - "integrity": "sha512-Ek1Vp8ZDQEe327Lz0b7h3hjvWH3u9XjJiQzveq74RPpJQ2q6d9LfWpjiRRohM4qK6o4XOHw1X10OMWPXJtdtWg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.95.1.tgz", + "integrity": "sha512-0GZEgkE1e8E2h97lUtwgZbKHrJYmRE/KhWQBHv6ZueAto8DJcAFNFrIQiQoRJjraE6QTaw6ahSvc1YJ7gL4OQA==", "cpu": [ "x64" ], @@ -5428,9 +5436,9 @@ } }, "node_modules/sass-embedded-linux-arm": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.93.3.tgz", - "integrity": "sha512-yeiv2y+dp8B4wNpd3+JsHYD0mvpXSfov7IGyQ1tMIR40qv+ROkRqYiqQvAOXf76Qwh4Y9OaYZtLpnsPjfeq6mA==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.95.1.tgz", + "integrity": "sha512-zUAm/rztm5Uyy+DSs408VJg404siVgUuZyqId4tFwkPNC5WRKu25Z8bFMriyGaE4YfEqbNwFV07C16mJoGeVOA==", "cpu": [ "arm" ], @@ -5445,9 +5453,9 @@ } }, "node_modules/sass-embedded-linux-arm64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.93.3.tgz", - "integrity": "sha512-RBrHWgfd8Dd8w4fbmdRVXRrhh8oBAPyeWDTKAWw8ZEmuXfVl4ytjDuyxaVilh6rR1xTRTNpbaA/YWApBlLrrNw==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.95.1.tgz", + "integrity": "sha512-MQxa+qVX7Os2rMpJ/AvhWup+1cS0JieQgCfi9cz1Zckn4zaUhg35+m2FQhfKvzv4afeW5bubTMOQeTRMQujbXw==", "cpu": [ "arm64" ], @@ -5462,9 +5470,9 @@ } }, "node_modules/sass-embedded-linux-musl-arm": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.93.3.tgz", - "integrity": "sha512-fU0fwAwbp7sBE3h5DVU5UPzvaLg7a4yONfFWkkcCp6ZrOiPuGRHXXYriWQ0TUnWy4wE+svsVuWhwWgvlb/tkKg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.95.1.tgz", + "integrity": "sha512-gNdaGmM3nZ0jkFNmyXWyNlXZPdaMP+7n5Mk3yGFGShqRt/6T/bHh5SkyNnU2ZdP1z7R9poPItJhULrZJ42ETeA==", "cpu": [ "arm" ], @@ -5479,9 +5487,9 @@ } }, "node_modules/sass-embedded-linux-musl-arm64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.93.3.tgz", - "integrity": "sha512-PS829l+eUng+9W4PFclXGb4uA2+965NHV3/Sa5U7qTywjeeUUYTZg70dJHSqvhrBEfCc2XJABeW3adLJbyQYkw==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.95.1.tgz", + "integrity": "sha512-8lD5vHGzBjBRCMIr9CXCyjmy8Q1q+H4ygcYCIm/aPNYhrm9uPOzJfs8hv9kDRgRAASFkcPGlFw8tDH4QqiJ5wg==", "cpu": [ "arm64" ], @@ -5496,9 +5504,9 @@ } }, "node_modules/sass-embedded-linux-musl-riscv64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.93.3.tgz", - "integrity": "sha512-cK1oBY+FWQquaIGEeQ5H74KTO8cWsSWwXb/WaildOO9U6wmUypTgUYKQ0o5o/29nZbWWlM1PHuwVYTSnT23Jjg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.95.1.tgz", + "integrity": "sha512-WjKfHxnFc/jOL5QtmgYuiWCc4616V15DkpE+7z41JWEawRXku6w++w7AR+Zx/jbz93FZ/AsZp27IS3XUt80u3Q==", "cpu": [ "riscv64" ], @@ -5513,9 +5521,9 @@ } }, "node_modules/sass-embedded-linux-musl-x64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.93.3.tgz", - "integrity": "sha512-A7wkrsHu2/I4Zpa0NMuPGkWDVV7QGGytxGyUq3opSXgAexHo/vBPlGoDXoRlSdex0cV+aTMRPjoGIfdmNlHwyg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.95.1.tgz", + "integrity": "sha512-3U6994SRUUmC8mPvSG/vNLUo2ZcGv3jHuPoBywTbJhGQI8gq0hef1MY8TU5mvtj9DhQYlah6MYktM4YrOQgqcQ==", "cpu": [ "x64" ], @@ -5530,9 +5538,9 @@ } }, "node_modules/sass-embedded-linux-riscv64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.93.3.tgz", - "integrity": "sha512-vWkW1+HTF5qcaHa6hO80gx/QfB6GGjJUP0xLbnAoY4pwEnw5ulGv6RM8qYr8IDhWfVt/KH+lhJ2ZFxnJareisQ==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.95.1.tgz", + "integrity": "sha512-CJ0tEEQnfpJEMCQrdubLsmuVc/c66EgaCAO0ZgSJ/KpxBKF3O1lHN6e1UErRf6VO0rh8ExAOh75po12Vu849Og==", "cpu": [ "riscv64" ], @@ -5547,9 +5555,9 @@ } }, "node_modules/sass-embedded-linux-x64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.93.3.tgz", - "integrity": "sha512-k6uFxs+e5jSuk1Y0niCwuq42F9ZC5UEP7P+RIOurIm8w/5QFa0+YqeW+BPWEW5M1FqVOsNZH3qGn4ahqvAEjPA==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.95.1.tgz", + "integrity": "sha512-nGnzrEpZZOsGOwrRVyX4t15M8ijZWhc4e4lLpOqaPm+lv23HFncfY05WxU5bRj0KAknrkeTM2IX/6veP2aeUdA==", "cpu": [ "x64" ], @@ -5564,9 +5572,9 @@ } }, "node_modules/sass-embedded-unknown-all": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.93.3.tgz", - "integrity": "sha512-o5wj2rLpXH0C+GJKt/VpWp6AnMsCCbfFmnMAttcrsa+U3yrs/guhZ3x55KAqqUsE8F47e3frbsDL+1OuQM5DAA==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.95.1.tgz", + "integrity": "sha512-bhywAcadVQoCotD4gVmyMBi2SENPvyLFPrXf33VK5mY487Nf/g5SgGCUuGmfTsbns4NBwbwR7PA/1fnJmeMtdA==", "dev": true, "license": "MIT", "optional": true, @@ -5577,13 +5585,13 @@ "!win32" ], "dependencies": { - "sass": "1.93.3" + "sass": "1.95.1" } }, "node_modules/sass-embedded-win32-arm64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.93.3.tgz", - "integrity": "sha512-0dOfT9moy9YmBolodwYYXtLwNr4jL4HQC9rBfv6mVrD7ud8ue2kDbn+GVzj1hEJxvEexVSmDCf7MHUTLcGs9xQ==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.95.1.tgz", + "integrity": "sha512-RWWODCthWdMVODoq98lyIk9R56mgGJ4TFUjD9LSCe7fAYD/tiTkUabE4AUzkZqknQSYr0n0Q2uy7POSDIKvhVg==", "cpu": [ "arm64" ], @@ -5598,9 +5606,9 @@ } }, "node_modules/sass-embedded-win32-x64": { - "version": "1.93.3", - "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.93.3.tgz", - "integrity": "sha512-wHFVfxiS9hU/sNk7KReD+lJWRp3R0SLQEX4zfOnRP2zlvI2X4IQR5aZr9GNcuMP6TmNpX0nQPZTegS8+h9RrEg==", + "version": "1.95.1", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.95.1.tgz", + "integrity": "sha512-jotHgOQnCb1XdjK0fhsyuhsfox7Y5EkrOc4h2caEpRcNCnsPTBZHqhuc8Lnw8HbKIhwKYkqWhexkjgz62MShhg==", "cpu": [ "x64" ], diff --git a/backends/advanced/webui/package.json b/backends/advanced/webui/package.json index 43647927..2d42fa35 100644 --- a/backends/advanced/webui/package.json +++ b/backends/advanced/webui/package.json @@ -22,6 +22,7 @@ }, "devDependencies": { "@types/d3": "^7.4.3", + "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -33,7 +34,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.80.7", + "sass-embedded": "^1.83.0", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" diff --git a/backends/advanced/webui/src/hooks/useD3Zoom.ts b/backends/advanced/webui/src/hooks/useD3Zoom.ts index c33db069..87f16735 100644 --- a/backends/advanced/webui/src/hooks/useD3Zoom.ts +++ b/backends/advanced/webui/src/hooks/useD3Zoom.ts @@ -25,7 +25,7 @@ export function useD3Zoom(options: UseD3ZoomOptions = {}) { onZoom?.(t) // Synchronize zoom across all zoomable SVG elements - d3.selectAll('.zoomable').each(function () { + d3.selectAll('.zoomable').each(function (this: SVGSVGElement) { const svg = d3.select(this) const node = svg.node() @@ -47,9 +47,10 @@ export function useD3Zoom(options: UseD3ZoomOptions = {}) { .on('zoom', handleZoom) .wheelDelta(wheelDelta) .touchable(() => true) - .filter((event) => { + .filter((event: Event) => { + const mouseEvent = event as MouseEvent if (event.type === 'dblclick') return false - if (event.button && event.button !== 0) return false + if (mouseEvent.button && mouseEvent.button !== 0) return false return true }), [handleZoom, scaleExtent, wheelDelta] diff --git a/backends/advanced/webui/src/pages/MyceliaTimeline.tsx b/backends/advanced/webui/src/pages/MyceliaTimeline.tsx index ba193f71..482c7f6a 100644 --- a/backends/advanced/webui/src/pages/MyceliaTimeline.tsx +++ b/backends/advanced/webui/src/pages/MyceliaTimeline.tsx @@ -215,15 +215,15 @@ export default function MyceliaTimeline() { // Bar background with click and hover bars.append('rect') - .attr('x', d => xScale(d.start)) - .attr('y', d => yScale(d.id)!) - .attr('width', d => Math.max(2, xScale(d.end) - xScale(d.start))) + .attr('x', (d: TimelineTask) => xScale(d.start)) + .attr('y', (d: TimelineTask) => yScale(d.id)!) + .attr('width', (d: TimelineTask) => Math.max(2, xScale(d.end) - xScale(d.start))) .attr('height', yScale.bandwidth()) - .attr('fill', d => d.color) + .attr('fill', (d: TimelineTask) => d.color) .attr('rx', 4) .style('opacity', 0.8) .style('cursor', 'pointer') - .on('mouseover', function(event, d) { + .on('mouseover', function(this: SVGRectElement, event: MouseEvent, d: TimelineTask) { d3.select(this).style('opacity', 1) // Show tooltip @@ -279,7 +279,7 @@ export default function MyceliaTimeline() { .text('Click to view memory') } }) - .on('mouseout', function() { + .on('mouseout', function(this: SVGRectElement) { d3.select(this).style('opacity', 0.8) // Hide tooltip @@ -287,7 +287,7 @@ export default function MyceliaTimeline() { d3.select(tooltipRef.current).style('opacity', 0) } }) - .on('click', function(event, d) { + .on('click', function(this: SVGRectElement, event: MouseEvent, d: TimelineTask) { event.stopPropagation() // Extract memory ID from task ID (format: "memory-id-rangeIndex") // Use lastIndexOf to handle memory IDs that contain dashes (e.g., UUIDs) @@ -307,17 +307,17 @@ export default function MyceliaTimeline() { .enter() .append('text') .attr('x', -10) - .attr('y', d => yScale(d.id)! + yScale.bandwidth() / 2) + .attr('y', (d: TimelineTask) => yScale(d.id)! + yScale.bandwidth() / 2) .attr('dy', '0.35em') .attr('text-anchor', 'end') - .text(d => d.name) + .text((d: TimelineTask) => d.name) .style('fill', 'currentColor') .style('font-size', '12px') // Zoom behavior const zoom = d3.zoom() .scaleExtent([0.5, 5]) - .on('zoom', (event) => { + .on('zoom', (event: d3.D3ZoomEvent) => { const transform = event.transform // Update x scale @@ -332,8 +332,8 @@ export default function MyceliaTimeline() { // Update bars g.selectAll('.bars rect') - .attr('x', d => newXScale(d.start)) - .attr('width', d => Math.max(2, newXScale(d.end) - newXScale(d.start))) + .attr('x', (d: TimelineTask) => newXScale(d.start)) + .attr('width', (d: TimelineTask) => Math.max(2, newXScale(d.end) - newXScale(d.start))) }) svg.call(zoom as any) diff --git a/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx b/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx deleted file mode 100644 index daf6bc18..00000000 --- a/backends/advanced/webui/src/pages/ReactGanttTimeline.tsx +++ /dev/null @@ -1,366 +0,0 @@ -import { useState, useEffect } from 'react' -import { Calendar, RefreshCw, AlertCircle, ZoomIn, ZoomOut } from 'lucide-react' -import Timeline from 'react-gantt-timeline' -import { memoriesApi } from '../services/api' -import { useAuth } from '../contexts/AuthContext' - -interface TimeRange { - start: string - end: string - name?: string -} - -interface MemoryWithTimeRange { - id: string - content: string - created_at: string - metadata?: { - name?: string - timeRanges?: TimeRange[] - isPerson?: boolean - isEvent?: boolean - isPlace?: boolean - } -} - -interface ReactGanttTask { - id: string - name: string - start: Date - end: Date - color?: string -} - -export default function ReactGanttTimeline() { - const [memories, setMemories] = useState([]) - const [loading, setLoading] = useState(false) - const [error, setError] = useState(null) - const [useDemoData, setUseDemoData] = useState(false) - const [zoomLevel, setZoomLevel] = useState(1) // 0.5 = 50%, 1 = 100%, 2 = 200% - const { user } = useAuth() - - const handleZoomIn = () => { - setZoomLevel(prev => Math.min(prev + 0.25, 3)) // Max 300% - } - - const handleZoomOut = () => { - setZoomLevel(prev => Math.max(prev - 0.25, 0.5)) // Min 50% - } - - // Demo data for testing the Timeline visualization - spans multiple years - const getDemoMemories = (): MemoryWithTimeRange[] => { - return [ - { - id: 'demo-graduation', - content: 'College graduation ceremony and celebration dinner with family.', - created_at: '2024-05-20T14:00:00', - metadata: { - name: 'College Graduation', - isEvent: true, - timeRanges: [ - { - name: 'Graduation Ceremony', - start: '2024-05-20T14:00:00', - end: '2024-05-20T17:00:00' - }, - { - name: 'Celebration Dinner', - start: '2024-05-20T18:00:00', - end: '2024-05-20T21:00:00' - } - ] - } - }, - { - id: 'demo-vacation', - content: 'Summer vacation in Hawaii with family. Visited beaches, hiked Diamond Head, attended a luau.', - created_at: '2024-07-10T08:00:00', - metadata: { - name: 'Hawaii Vacation', - isEvent: true, - timeRanges: [ - { - name: 'Hawaii Trip', - start: '2024-07-10T08:00:00', - end: '2024-07-17T20:00:00' - } - ] - } - }, - { - id: 'demo-marathon', - content: 'Completed first marathon in Boston. Training started 6 months ago.', - created_at: '2025-04-15T06:00:00', - metadata: { - name: 'Boston Marathon', - isEvent: true, - timeRanges: [ - { - name: 'Marathon Race', - start: '2025-04-15T06:00:00', - end: '2025-04-15T11:30:00' - } - ] - } - }, - { - id: 'demo-wedding', - content: "Sarah and Tom's wedding was a beautiful celebration. The ceremony started at 3 PM, followed by a reception.", - created_at: '2025-06-15T15:00:00', - metadata: { - name: "Sarah & Tom's Wedding", - isEvent: true, - timeRanges: [ - { - name: 'Wedding Ceremony', - start: '2025-06-15T15:00:00', - end: '2025-06-15T16:30:00' - }, - { - name: 'Reception', - start: '2025-06-15T18:00:00', - end: '2025-06-16T00:00:00' - } - ] - } - }, - { - id: 'demo-conference', - content: 'Tech conference in San Francisco. Attended keynotes, workshops, and networking events.', - created_at: '2026-03-10T09:00:00', - metadata: { - name: 'Tech Conference 2026', - isEvent: true, - timeRanges: [ - { - name: 'Conference', - start: '2026-03-10T09:00:00', - end: '2026-03-13T18:00:00' - } - ] - } - } - ] - } - - const fetchMemoriesWithTimeRanges = async () => { - // Guard: only fetch if user ID exists - if (!user?.id) { - setError('User not authenticated') - setLoading(false) - return - } - - setLoading(true) - setError(null) - try { - const response = await memoriesApi.getAll(user.id) - - // Extract memories from response - const memoriesData = response.data.memories || response.data || [] - - const memoriesWithTimeRanges = memoriesData.filter( - (memory: MemoryWithTimeRange) => - memory.metadata?.timeRanges && - memory.metadata.timeRanges.length > 0 - ) - - if (memoriesWithTimeRanges.length === 0) { - setUseDemoData(true) - setMemories(getDemoMemories()) - setError('No memories with time ranges found. Showing demo data.') - } else { - setMemories(memoriesWithTimeRanges) - setUseDemoData(false) - } - } catch (err) { - console.error('Failed to fetch memories:', err) - setError('Failed to load memories. Showing demo data.') - setUseDemoData(true) - setMemories(getDemoMemories()) - } finally { - setLoading(false) - } - } - - useEffect(() => { - if (user) { - fetchMemoriesWithTimeRanges() - } - }, [user]) - - const handleRefresh = () => { - fetchMemoriesWithTimeRanges() - } - - const handleToggleDemoData = () => { - if (useDemoData) { - fetchMemoriesWithTimeRanges() - } else { - setMemories(getDemoMemories()) - setUseDemoData(true) - } - } - - // Convert memories to react-gantt-timeline format - const convertToReactGanttFormat = (memories: MemoryWithTimeRange[]): ReactGanttTask[] => { - const tasks: ReactGanttTask[] = [] - - memories.forEach((memory) => { - const timeRanges = memory.metadata?.timeRanges || [] - const isEvent = memory.metadata?.isEvent - const isPerson = memory.metadata?.isPerson - const isPlace = memory.metadata?.isPlace - - let color = '#3b82f6' // default blue - if (isEvent) color = '#3b82f6' // blue - else if (isPerson) color = '#10b981' // green - else if (isPlace) color = '#f59e0b' // amber - - timeRanges.forEach((range, index) => { - tasks.push({ - id: `${memory.id}-${index}`, - name: range.name || memory.metadata?.name || memory.content.substring(0, 30), - start: new Date(range.start), - end: new Date(range.end), - color: color - }) - }) - }) - - return tasks - } - - const tasks = convertToReactGanttFormat(memories) - - const data = tasks.map((task) => ({ - id: task.id, - name: task.name, - start: task.start, - end: task.end, - color: task.color - })) - - return ( -
-
-
-

- - Timeline (React Gantt) -

-

- Visualize your memories on an interactive timeline using react-gantt-timeline -

-
-
- {/* Zoom controls */} -
- -
- {Math.round(zoomLevel * 100)}% -
- -
- - -
-
- - {error && ( -
- - {error} -
- )} - - {loading ? ( -
- -
- ) : memories.length === 0 ? ( -
- -

- No Timeline Data -

-

- No memories with time ranges found. Try the demo data to see the timeline in action. -

- -
- ) : ( -
- {/* Timeline Container - Expands with zoom */} -
-
- -
-
- - {/* Legend */} -
-
-
- Event -
-
-
- Person -
-
-
- Place -
-
- - {useDemoData && ( -
- Showing demo data with events spanning 2024-2026 -
- )} -
- )} -
- ) -} From c2f8ab265e7af26c1c7e0e64940cde47e0c8e8f2 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Wed, 10 Dec 2025 10:35:42 +0000 Subject: [PATCH 19/31] fixed tests --- tests/endpoints/client_queue_tests.robot | 55 +++++++------------ tests/infrastructure/infra_tests.robot | 5 +- .../websocket_streaming_tests.robot | 1 + 3 files changed, 23 insertions(+), 38 deletions(-) diff --git a/tests/endpoints/client_queue_tests.robot b/tests/endpoints/client_queue_tests.robot index ace3588a..dd8016da 100644 --- a/tests/endpoints/client_queue_tests.robot +++ b/tests/endpoints/client_queue_tests.robot @@ -2,9 +2,10 @@ Documentation Client and Queue Management API Tests Library RequestsLibrary Library Collections -Resource ../resources/setup_resources.robot -Resource ../resources/session_resources.robot -Resource ../resources/user_resources.robot +Resource ../setup/setup_keywords.robot +Resource ../setup/teardown_keywords.robot +Resource ../resources/user_keywords.robot +Resource ../resources/session_keywords.robot Suite Setup Suite Setup Suite Teardown Delete All Sessions @@ -86,16 +87,11 @@ Get Queue Statistics Test Should Be Equal As Integers ${response.status_code} 200 ${stats}= Set Variable ${response.json()} - Dictionary Should Contain Key ${stats} queued - Dictionary Should Contain Key ${stats} processing - Dictionary Should Contain Key ${stats} completed - Dictionary Should Contain Key ${stats} failed + Dictionary Should Contain Key ${stats} queued_jobs + Dictionary Should Contain Key ${stats} processing_jobs + Dictionary Should Contain Key ${stats} completed_jobs + Dictionary Should Contain Key ${stats} failed_jobs - # All counts should be non-negative - Should Be True ${stats}[queued] >= 0 - Should Be True ${stats}[processing] >= 0 - Should Be True ${stats}[completed] >= 0 - Should Be True ${stats}[failed] >= 0 Get Queue Health Test [Documentation] Test getting queue health status @@ -103,16 +99,16 @@ Get Queue Health Test Get Anonymous Session anon_session Create API Session admin_session - ${response}= GET On Session admin_session /api/queue/health + ${response}= GET On Session admin_session /api/queue/worker-details Should Be Equal As Integers ${response.status_code} 200 ${health}= Set Variable ${response.json()} - Dictionary Should Contain Key ${health} status - Dictionary Should Contain Key ${health} worker_running - Dictionary Should Contain Key ${health} message + Dictionary Should Contain Key ${health} redis_connection + Dictionary Should Contain Key ${health} workers + Dictionary Should Contain Key ${health} queues # Status should be one of expected values - Should Be True '${health}[status]' in ['healthy', 'stopped', 'unhealthy'] + Should Be True '${health}[redis_connection]' in ['healthy', 'stopped', 'unhealthy'] Queue Jobs User Isolation Test [Documentation] Test that regular users only see their own queue jobs @@ -120,7 +116,7 @@ Queue Jobs User Isolation Test Get Anonymous Session anon_session Create API Session admin_session - + ${RANDOM_ID}= Get Random ID # Create a test user ${test_user}= Create Test User admin_session test-user-${RANDOM_ID}@example.com test-password-123 Create API Session user_session email=test-user-${RANDOM_ID}@example.com password=test-password-123 @@ -139,8 +135,8 @@ Queue Jobs User Isolation Test END END - # Cleanup - Delete Test User ${test_user}[user_id] + # # Cleanup + # Delete Test User ${test_user}[user_id] Invalid Queue Parameters Test [Documentation] Test queue endpoints with invalid parameters @@ -167,7 +163,8 @@ Invalid Queue Parameters Test Unauthorized Client Access Test [Documentation] Test that client endpoints require authentication [Tags] client security negative - Get Anonymous Session session + ${session}= Get Anonymous Session session + # Try to access active clients without token ${response}= GET On Session ${session} /api/clients/active expected_status=401 @@ -176,8 +173,8 @@ Unauthorized Client Access Test Unauthorized Queue Access Test [Documentation] Test that queue endpoints require authentication [Tags] queue security negative - Get Anonymous Session session - + ${session}= Get Anonymous Session session + # Try to access queue jobs without token ${response}= GET On Session ${session} /api/queue/jobs expected_status=401 Should Be Equal As Integers ${response.status_code} 401 @@ -186,22 +183,10 @@ Unauthorized Queue Access Test ${response}= GET On Session ${session} /api/queue/stats expected_status=401 Should Be Equal As Integers ${response.status_code} 401 -Queue Health Public Access Test - [Documentation] Test that queue health endpoint is publicly accessible - [Tags] queue health public - Get Anonymous Session session - - # Queue health should be accessible without authentication - ${response}= GET On Session ${session} /api/queue/health - Should Be Equal As Integers ${response.status_code} 200 - - ${health}= Set Variable ${response.json()} - Dictionary Should Contain Key ${health} status Client Manager Integration Test [Documentation] Test client manager functionality [Tags] client manager integration - Get Anonymous Session anon_session Create API Session admin_session diff --git a/tests/infrastructure/infra_tests.robot b/tests/infrastructure/infra_tests.robot index aa64ecb0..09e48a18 100644 --- a/tests/infrastructure/infra_tests.robot +++ b/tests/infrastructure/infra_tests.robot @@ -258,9 +258,8 @@ WebSocket Disconnect Conversation End Reason Test ${device_name}= Set Variable disconnect ${stream_id}= Open Audio Stream device_name=${device_name} - # Send enough audio to trigger speech detection (test audio has speech) - # Test audio is 4 minutes long at 16kHz, sending 200 chunks ensures enough speech - Send Audio Chunks To Stream ${stream_id} ${TEST_AUDIO_FILE} num_chunks=200 + # Send audio fast (no realtime pacing) to simulate disconnect before END signal + Send Audio Chunks To Stream ${stream_id} ${TEST_AUDIO_FILE} num_chunks=200 # Wait for conversation job to be created and conversation_id to be populated # Transcription + speech analysis takes time (30-60s with queue) diff --git a/tests/integration/websocket_streaming_tests.robot b/tests/integration/websocket_streaming_tests.robot index 6f2c7a9a..01e0a533 100644 --- a/tests/integration/websocket_streaming_tests.robot +++ b/tests/integration/websocket_streaming_tests.robot @@ -212,6 +212,7 @@ Segment Timestamps Match Cropped Audio # To use a different dataset: Verify Segments Match Expected Timestamps ${segments} ${EXPECTED_SEGMENT_TIMES_SHORT} # To use custom tolerance: Verify Segments Match Expected Timestamps ${segments} ${EXPECTED_SEGMENT_TIMES} ${tolerance}=1.0 Verify Segments Match Expected Timestamps ${segments} expected_segments=${EXPECTED_SEGMENT_TIMES} + Log To Console โœ“ Validated ${segment_count} segments with proper cropped timestamps matching expected data From e12e477edaefb0a8611c8df32e23b2050761abf1 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 9 Dec 2025 22:17:07 +0000 Subject: [PATCH 20/31] use instance localstorage to get auth token --- backends/advanced/webui/src/App.tsx | 7 ++- .../webui/src/contexts/AuthContext.tsx | 15 +++--- .../webui/src/hooks/useAudioRecording.ts | 54 ++++++------------- .../src/hooks/useSimpleAudioRecording.ts | 44 ++++++--------- .../webui/src/pages/Conversations.tsx | 5 +- backends/advanced/webui/src/services/api.ts | 48 ++++++++++++----- backends/advanced/webui/src/utils/storage.ts | 11 ++++ backends/advanced/webui/vite.config.ts | 3 +- 8 files changed, 96 insertions(+), 91 deletions(-) create mode 100644 backends/advanced/webui/src/utils/storage.ts diff --git a/backends/advanced/webui/src/App.tsx b/backends/advanced/webui/src/App.tsx index 6f7f3e72..fca59623 100644 --- a/backends/advanced/webui/src/App.tsx +++ b/backends/advanced/webui/src/App.tsx @@ -18,12 +18,15 @@ import { ErrorBoundary, PageErrorBoundary } from './components/ErrorBoundary' function App() { console.log('๐Ÿš€ Full App restored with working login!') - + + // Get base path from Vite config (e.g., "/prod/" for path-based routing) + const basename = import.meta.env.BASE_URL + return ( - + } /> (undefined) export function AuthProvider({ children }: { children: ReactNode }) { const [user, setUser] = useState(null) - const [token, setToken] = useState(localStorage.getItem('token')) + const [token, setToken] = useState(localStorage.getItem(getStorageKey('token'))) const [isLoading, setIsLoading] = useState(true) // Check if user is admin @@ -30,7 +31,7 @@ export function AuthProvider({ children }: { children: ReactNode }) { useEffect(() => { const initAuth = async () => { console.log('๐Ÿ” AuthContext: Initializing authentication...') - const savedToken = localStorage.getItem('token') + const savedToken = localStorage.getItem(getStorageKey('token')) console.log('๐Ÿ” AuthContext: Saved token exists:', !!savedToken) if (savedToken) { @@ -44,7 +45,7 @@ export function AuthProvider({ children }: { children: ReactNode }) { } catch (error) { console.error('โŒ AuthContext: Token verification failed:', error) // Token is invalid, clear it - localStorage.removeItem('token') + localStorage.removeItem(getStorageKey('token')) setToken(null) setUser(null) } @@ -64,9 +65,9 @@ export function AuthProvider({ children }: { children: ReactNode }) { const { access_token } = response.data setToken(access_token) - localStorage.setItem('token', access_token) + localStorage.setItem(getStorageKey('token'), access_token) // Store JWT for Mycelia auto-login (enables seamless access to Mycelia frontend) - localStorage.setItem('mycelia_jwt_token', access_token) + localStorage.setItem(getStorageKey('mycelia_jwt_token'), access_token) // Get user info const userResponse = await authApi.getMe() @@ -100,8 +101,8 @@ export function AuthProvider({ children }: { children: ReactNode }) { const logout = () => { setUser(null) setToken(null) - localStorage.removeItem('token') - localStorage.removeItem('mycelia_jwt_token') + localStorage.removeItem(getStorageKey('token')) + localStorage.removeItem(getStorageKey('mycelia_jwt_token')) } return ( diff --git a/backends/advanced/webui/src/hooks/useAudioRecording.ts b/backends/advanced/webui/src/hooks/useAudioRecording.ts index 3e303cbc..dbb29889 100644 --- a/backends/advanced/webui/src/hooks/useAudioRecording.ts +++ b/backends/advanced/webui/src/hooks/useAudioRecording.ts @@ -1,4 +1,6 @@ import { useState, useRef, useCallback, useEffect } from 'react' +import { BACKEND_URL } from '../services/api' +import { getStorageKey } from '../utils/storage' export interface ComponentErrors { websocket: string | null @@ -126,48 +128,26 @@ export const useAudioRecording = (): UseAudioRecordingReturn => { setError(null) try { - const token = localStorage.getItem('token') + const token = localStorage.getItem(getStorageKey('token')) if (!token) { throw new Error('No authentication token found') } - // Build WebSocket URL using same logic as API service - let wsUrl: string - const { protocol, port } = window.location - // Check if we have a backend URL from environment - if (import.meta.env.VITE_BACKEND_URL) { - const backendUrl = import.meta.env.VITE_BACKEND_URL - const wsProtocol = protocol === 'https:' ? 'wss:' : 'ws:' - // Fallback logic based on current location - const isStandardPort = (protocol === 'https:' && (port === '' || port === '443')) || - (protocol === 'http:' && (port === '' || port === '80')) - - if (isStandardPort || backendUrl === '') { - // Use same origin for Ingress access - wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-recorder` - } else if (backendUrl != undefined && backendUrl != '') { - wsUrl = `${wsProtocol}//${backendUrl}/ws_pcm?token=${token}&device_name=webui-recorder` - } - else if (port === '5173') { - // Development mode - wsUrl = `ws://localhost:8000/ws_pcm?token=${token}&device_name=webui-recorder` - } else { - // Fallback - use same origin instead of hardcoded port 8000 - wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-recorder` - } + // Build WebSocket URL using BACKEND_URL from API service (handles base path correctly) + const { protocol } = window.location + const wsProtocol = protocol === 'https:' ? 'wss:' : 'ws:' + + let wsUrl: string + if (BACKEND_URL && BACKEND_URL.startsWith('http')) { + // BACKEND_URL is a full URL (e.g., http://localhost:8000) + const backendHost = BACKEND_URL.replace(/^https?:\/\//, '') + wsUrl = `${wsProtocol}//${backendHost}/ws_pcm?token=${token}&device_name=webui-recorder` + } else if (BACKEND_URL && BACKEND_URL !== '') { + // BACKEND_URL is a path (e.g., /prod) + wsUrl = `${wsProtocol}//${window.location.host}${BACKEND_URL}/ws_pcm?token=${token}&device_name=webui-recorder` } else { - // No environment variable set, use fallback logic - const wsProtocol = protocol === 'https:' ? 'wss:' : 'ws:' - const isStandardPort = (protocol === 'https:' && (port === '' || port === '443')) || - (protocol === 'http:' && (port === '' || port === '80')) - - if (isStandardPort) { - wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-recorder` - } else if (port === '5173') { - wsUrl = `ws://localhost:8000/ws_pcm?token=${token}&device_name=webui-recorder` - } else { - wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-recorder` - } + // BACKEND_URL is empty (same origin) + wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-recorder` } const ws = new WebSocket(wsUrl) // Note: Don't set binaryType yet - will cause protocol violations with text messages diff --git a/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts b/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts index e0a1badc..cb3e3eee 100644 --- a/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts +++ b/backends/advanced/webui/src/hooks/useSimpleAudioRecording.ts @@ -1,4 +1,6 @@ import { useState, useRef, useCallback, useEffect } from 'react' +import { BACKEND_URL } from '../services/api' +import { getStorageKey } from '../utils/storage' export type RecordingStep = 'idle' | 'mic' | 'websocket' | 'audio-start' | 'streaming' | 'stopping' | 'error' export type RecordingMode = 'batch' | 'streaming' @@ -152,40 +154,26 @@ export const useSimpleAudioRecording = (): SimpleAudioRecordingReturn => { // Step 2: Connect WebSocket const connectWebSocket = useCallback(async (): Promise => { console.log('๐Ÿ”— Step 2: Connecting to WebSocket') - - const token = localStorage.getItem('token') + + const token = localStorage.getItem(getStorageKey('token')) if (!token) { throw new Error('No authentication token found') } - // Build WebSocket URL using same logic as API service + // Build WebSocket URL using BACKEND_URL from API service (handles base path correctly) + const { protocol } = window.location + const wsProtocol = protocol === 'https:' ? 'wss:' : 'ws:' + let wsUrl: string - const { protocol, port } = window.location - - // Check if we have a backend URL from environment - if (import.meta.env.VITE_BACKEND_URL) { - const backendUrl = import.meta.env.VITE_BACKEND_URL - const wsProtocol = protocol === 'https:' ? 'wss:' : 'ws:' - // Fallback logic based on current location - const isStandardPort = (protocol === 'https:' && (port === '' || port === '443')) || - (protocol === 'http:' && (port === '' || port === '80')) - - if (isStandardPort || backendUrl === '') { - // Use same origin for Ingress access - wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-simple-recorder` - } else if (backendUrl != undefined && backendUrl != '') { - wsUrl = `${wsProtocol}//${backendUrl}/ws_pcm?token=${token}&device_name=webui-simple-recorder` - } - else if (port === '5173') { - // Development mode - wsUrl = `ws://localhost:8000/ws_pcm?token=${token}&device_name=webui-simple-recorder` - } else { - // Fallback - use same origin instead of hardcoded port 8000 - wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-simple-recorder` - } + if (BACKEND_URL && BACKEND_URL.startsWith('http')) { + // BACKEND_URL is a full URL (e.g., http://localhost:8000) + const backendHost = BACKEND_URL.replace(/^https?:\/\//, '') + wsUrl = `${wsProtocol}//${backendHost}/ws_pcm?token=${token}&device_name=webui-simple-recorder` + } else if (BACKEND_URL && BACKEND_URL !== '') { + // BACKEND_URL is a path (e.g., /prod) + wsUrl = `${wsProtocol}//${window.location.host}${BACKEND_URL}/ws_pcm?token=${token}&device_name=webui-simple-recorder` } else { - // No environment variable set, use same origin as fallback - const wsProtocol = protocol === 'https:' ? 'wss:' : 'ws:' + // BACKEND_URL is empty (same origin) wsUrl = `${wsProtocol}//${window.location.host}/ws_pcm?token=${token}&device_name=webui-simple-recorder` } diff --git a/backends/advanced/webui/src/pages/Conversations.tsx b/backends/advanced/webui/src/pages/Conversations.tsx index b3a34b5c..d4b76ed3 100644 --- a/backends/advanced/webui/src/pages/Conversations.tsx +++ b/backends/advanced/webui/src/pages/Conversations.tsx @@ -2,6 +2,7 @@ import { useState, useEffect, useRef } from 'react' import { MessageSquare, RefreshCw, Calendar, User, Play, Pause, MoreVertical, RotateCcw, Zap, ChevronDown, ChevronUp, Trash2 } from 'lucide-react' import { conversationsApi, BACKEND_URL } from '../services/api' import ConversationVersionHeader from '../components/ConversationVersionHeader' +import { getStorageKey } from '../utils/storage' interface Conversation { conversation_id?: string @@ -330,7 +331,7 @@ export default function Conversations() { // Check if we need to create a new audio element (none exists or previous had error) if (!audio || audio.error) { - const token = localStorage.getItem('token') || ''; + const token = localStorage.getItem(getStorageKey('token')) || ''; const audioUrl = `${BACKEND_URL}/api/audio/get_audio/${conversationId}?cropped=${useCropped}&token=${token}`; console.log('Creating audio element with URL:', audioUrl); console.log('Token present:', !!token, 'Token length:', token.length); @@ -647,7 +648,7 @@ export default function Conversations() { className="w-full h-10" preload="metadata" style={{ minWidth: '300px' }} - src={`${BACKEND_URL}/api/audio/get_audio/${conversation.conversation_id}?cropped=${!debugMode}&token=${localStorage.getItem('token') || ''}`} + src={`${BACKEND_URL}/api/audio/get_audio/${conversation.conversation_id}?cropped=${!debugMode}&token=${localStorage.getItem(getStorageKey('token')) || ''}`} > Your browser does not support the audio element. diff --git a/backends/advanced/webui/src/services/api.ts b/backends/advanced/webui/src/services/api.ts index e43902ad..0d988a9d 100644 --- a/backends/advanced/webui/src/services/api.ts +++ b/backends/advanced/webui/src/services/api.ts @@ -1,32 +1,52 @@ import axios from 'axios' +import { getStorageKey } from '../utils/storage' // Get backend URL from environment or auto-detect based on current location const getBackendUrl = () => { - // If explicitly set in environment, use that + const { protocol, hostname, port } = window.location + console.log('Protocol:', protocol) + console.log('Hostname:', hostname) + console.log('Port:', port) + + const isStandardPort = (protocol === 'https:' && (port === '' || port === '443')) || + (protocol === 'http:' && (port === '' || port === '80')) + + // Check if we have a base path (Caddy path-based routing) + const basePath = import.meta.env.BASE_URL + console.log('Base path from Vite:', basePath) + + if (isStandardPort && basePath && basePath !== '/') { + // We're using Caddy path-based routing - use the base path + console.log('Using Caddy path-based routing with base path') + return basePath.replace(/\/$/, '') + } + + // If explicitly set in environment, use that (for direct backend access) if (import.meta.env.VITE_BACKEND_URL !== undefined && import.meta.env.VITE_BACKEND_URL !== '') { + console.log('Using explicit VITE_BACKEND_URL') return import.meta.env.VITE_BACKEND_URL } - - // If accessed through proxy (standard ports), use relative URLs - const { protocol, hostname, port } = window.location - const isStandardPort = (protocol === 'https:' && (port === '' || port === '443')) || - (protocol === 'http:' && (port === '' || port === '80')) - + if (isStandardPort) { - // We're being accessed through nginx proxy or Kubernetes Ingress, use same origin - return '' // Empty string means use relative URLs (same origin) + // We're being accessed through nginx proxy or standard proxy + console.log('Using standard proxy - relative URLs') + return '' } - + // Development mode - direct access to dev server if (port === '5173') { + console.log('Development mode - using localhost:8000') return 'http://localhost:8000' } - + // Fallback + console.log('Fallback - using hostname:8000') return `${protocol}//${hostname}:8000` } const BACKEND_URL = getBackendUrl() +console.log('VITE_BACKEND_URL:', import.meta.env.VITE_BACKEND_URL) + console.log('๐ŸŒ API: Backend URL configured as:', BACKEND_URL || 'Same origin (relative URLs)') // Export BACKEND_URL for use in other components @@ -39,7 +59,7 @@ export const api = axios.create({ // Add request interceptor to include auth token api.interceptors.request.use((config) => { - const token = localStorage.getItem('token') + const token = localStorage.getItem(getStorageKey('token')) if (token) { config.headers.Authorization = `Bearer ${token}` } @@ -54,7 +74,7 @@ api.interceptors.response.use( if (error.response?.status === 401) { // Token expired or invalid, redirect to login console.warn('๐Ÿ” API: 401 Unauthorized - clearing token and redirecting to login') - localStorage.removeItem('token') + localStorage.removeItem(getStorageKey('token')) window.location.href = '/login' } else if (error.code === 'ECONNABORTED') { // Request timeout - don't logout, just log it @@ -228,7 +248,7 @@ export const chatApi = { method: 'POST', headers: { 'Content-Type': 'application/json', - 'Authorization': `Bearer ${localStorage.getItem('token')}` + 'Authorization': `Bearer ${localStorage.getItem(getStorageKey('token'))}` }, body: JSON.stringify(requestBody) }) diff --git a/backends/advanced/webui/src/utils/storage.ts b/backends/advanced/webui/src/utils/storage.ts new file mode 100644 index 00000000..24c5c184 --- /dev/null +++ b/backends/advanced/webui/src/utils/storage.ts @@ -0,0 +1,11 @@ +/** + * Helper to get environment-specific localStorage keys + * Each environment (dev, test, test2, etc.) gets its own token storage + * This prevents token conflicts when running multiple environments simultaneously + */ +export const getStorageKey = (key: string): string => { + const basePath = import.meta.env.BASE_URL || '/' + // Normalize: /test2/ -> test2, / -> root + const envName = basePath.replace(/^\/|\/$/g, '') || 'root' + return `${envName}_${key}` +} diff --git a/backends/advanced/webui/vite.config.ts b/backends/advanced/webui/vite.config.ts index a3b411c3..c6f49a58 100644 --- a/backends/advanced/webui/vite.config.ts +++ b/backends/advanced/webui/vite.config.ts @@ -3,10 +3,11 @@ import react from '@vitejs/plugin-react' export default defineConfig({ plugins: [react()], + base: process.env.VITE_BASE_PATH || '/', server: { port: 5173, host: '0.0.0.0', - allowedHosts: process.env.VITE_ALLOWED_HOSTS + allowedHosts: process.env.VITE_ALLOWED_HOSTS ? process.env.VITE_ALLOWED_HOSTS.split(' ').map(host => host.trim()).filter(host => host.length > 0) : [ 'localhost', From 7be07d62f213748606c55049b68b11a25a59fbbf Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 9 Dec 2025 22:16:40 +0000 Subject: [PATCH 21/31] use db name and use memory userid --- .../src/advanced_omi_backend/app_config.py | 4 ++- .../src/advanced_omi_backend/database.py | 4 ++- .../src/advanced_omi_backend/models/job.py | 5 ++-- .../services/memory/providers/mcp_client.py | 30 ++++++++++++------- .../memory/providers/openmemory_mcp.py | 17 ++++++----- 5 files changed, 38 insertions(+), 22 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/app_config.py b/backends/advanced/src/advanced_omi_backend/app_config.py index d42535fd..601c3813 100644 --- a/backends/advanced/src/advanced_omi_backend/app_config.py +++ b/backends/advanced/src/advanced_omi_backend/app_config.py @@ -28,8 +28,10 @@ class AppConfig: def __init__(self): # MongoDB Configuration self.mongodb_uri = os.getenv("MONGODB_URI", "mongodb://mongo:27017") + # default to legacy value to avoid breaking peoples .env + self.mongodb_database = os.getenv("MONGODB_DATABASE", "friend-lite") self.mongo_client = AsyncIOMotorClient(self.mongodb_uri) - self.db = self.mongo_client.get_default_database("chronicle") + self.db = self.mongo_client.get_default_database(self.mongodb_database) self.users_col = self.db["users"] self.speakers_col = self.db["speakers"] diff --git a/backends/advanced/src/advanced_omi_backend/database.py b/backends/advanced/src/advanced_omi_backend/database.py index 0d5e6507..ae7650b0 100644 --- a/backends/advanced/src/advanced_omi_backend/database.py +++ b/backends/advanced/src/advanced_omi_backend/database.py @@ -14,6 +14,8 @@ # MongoDB Configuration MONGODB_URI = os.getenv("MONGODB_URI", "mongodb://mongo:27017") +MONGODB_DATABASE = os.getenv("MONGODB_DATABASE", "friend-lite") + mongo_client = AsyncIOMotorClient( MONGODB_URI, maxPoolSize=50, # Increased pool size for concurrent operations @@ -22,7 +24,7 @@ serverSelectionTimeoutMS=5000, # Fail fast if server unavailable socketTimeoutMS=20000, # 20 second timeout for operations ) -db = mongo_client.get_default_database("chronicle") +db = mongo_client.get_default_database(MONGODB_DATABASE) # Collection references (for non-Beanie collections) users_col = db["users"] diff --git a/backends/advanced/src/advanced_omi_backend/models/job.py b/backends/advanced/src/advanced_omi_backend/models/job.py index 8a19fd8e..b295782c 100644 --- a/backends/advanced/src/advanced_omi_backend/models/job.py +++ b/backends/advanced/src/advanced_omi_backend/models/job.py @@ -43,11 +43,12 @@ async def _ensure_beanie_initialized(): mongodb_uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") # Create MongoDB client + mongodb_database = os.getenv("MONGODB_DATABASE", "friend-lite") client = AsyncIOMotorClient(mongodb_uri) try: - database = client.get_default_database("chronicle") + database = client.get_default_database(mongodb_database) except ConfigurationError: - database = client["chronicle"] + database = client[mongodb_database] raise _beanie_initialized = True # Initialize Beanie diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py index 3e08fae7..97acc529 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py @@ -111,20 +111,28 @@ async def add_memories(self, text: str) -> List[str]: # Use REST API endpoint for creating memories # The 'app' field can be either app name (string) or app UUID + payload = { + "user_id": self.user_id, + "text": text, + "app": self.client_name, # Use app name (OpenMemory accepts name or UUID) + "metadata": { + "source": "friend_lite", + "client": self.client_name, + "user_email": self.user_email + }, + "infer": True + } + + memory_logger.info(f"POSTing memory to {self.server_url}/api/v1/memories/ with payload={payload}") + response = await self.client.post( f"{self.server_url}/api/v1/memories/", - json={ - "user_id": self.user_id, - "text": text, - "app": self.client_name, # Use app name (OpenMemory accepts name or UUID) - "metadata": { - "source": "chronicle", - "client": self.client_name, - "user_email": self.user_email - }, - "infer": True - } + json=payload ) + + response_body = response.text[:500] if response.status_code != 200 else "..." + memory_logger.info(f"OpenMemory response: status={response.status_code}, body={response_body}, headers={dict(response.headers)}") + response.raise_for_status() result = response.json() diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py index d8811c67..8e2eda56 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py @@ -142,18 +142,21 @@ async def add_memory( memory_logger.info(f"Skipping empty transcript for {source_id}") return True, [] - # Update MCP client user context for this operation + # Pass Friend-Lite user details to OpenMemory for proper user tracking + # OpenMemory will auto-create users if they don't exist original_user_id = self.mcp_client.user_id original_user_email = self.mcp_client.user_email - self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID - self.mcp_client.user_email = user_email # Use the actual user's email + + # Update MCP client with Friend-Lite user details + self.mcp_client.user_id = user_id + self.mcp_client.user_email = user_email try: # Thin client approach: Send raw transcript to OpenMemory MCP server # OpenMemory handles: extraction, deduplication, vector storage, ACL enriched_transcript = f"[Source: {source_id}, Client: {client_id}] {transcript}" - memory_logger.info(f"Delegating memory processing to OpenMemory MCP for user {user_id}, source {source_id}") + memory_logger.info(f"Delegating memory processing to OpenMemory for user {user_id} (email: {user_email}), source {source_id}") memory_ids = await self.mcp_client.add_memories(text=enriched_transcript) finally: @@ -204,9 +207,9 @@ async def search_memories( if not self._initialized: await self.initialize() - # Update MCP client user context for this operation + # Update MCP client user context for this search operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID + self.mcp_client.user_id = user_id try: results = await self.mcp_client.search_memory( @@ -231,7 +234,7 @@ async def search_memories( memory_logger.error(f"Search memories failed: {e}") return [] finally: - # Restore original user_id + # Restore original user context self.mcp_client.user_id = original_user_id async def get_all_memories( From 563b9dc58fc2028ecf567d3609ae9909acb11d27 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Thu, 11 Dec 2025 13:06:29 +0000 Subject: [PATCH 22/31] fixed tests and other tweaks --- backends/advanced/docker-compose-ci.yml | 192 ------------------ backends/advanced/docker-compose-test.yml | 1 + tests/infrastructure/infra_tests.robot | 4 +- tests/integration/conversation_queue.robot | 2 +- tests/resources/transcript_verification.robot | 2 +- tests/setup/test_env.py | 20 +- 6 files changed, 18 insertions(+), 203 deletions(-) delete mode 100644 backends/advanced/docker-compose-ci.yml diff --git a/backends/advanced/docker-compose-ci.yml b/backends/advanced/docker-compose-ci.yml deleted file mode 100644 index 79d7a2b0..00000000 --- a/backends/advanced/docker-compose-ci.yml +++ /dev/null @@ -1,192 +0,0 @@ -# docker-compose-ci.yml -# CI/CD environment for GitHub Actions -# Uses built image without source code mounts to ensure memory_config.yaml is included - -services: - friend-backend-test: - build: - context: . - dockerfile: Dockerfile - ports: - - "8001:8000" # Avoid conflict with dev on 8000 - volumes: - # No src mount for CI - use built image with all files included - - ./data/test_audio_chunks:/app/audio_chunks - - ./data/test_debug_dir:/app/debug_dir - - ./data/test_data:/app/data - environment: - # Override with test-specific settings - - MONGODB_URI=mongodb://mongo-test:27017/test_db - - QDRANT_BASE_URL=qdrant-test - - QDRANT_PORT=6333 - - REDIS_URL=redis://redis-test:6379/0 - - DEBUG_DIR=/app/debug_dir - # Import API keys from environment - - DEEPGRAM_API_KEY=${DEEPGRAM_API_KEY} - - OPENAI_API_KEY=${OPENAI_API_KEY} - # LLM provider configuration (required for memory service) - - LLM_PROVIDER=${LLM_PROVIDER:-openai} - - OPENAI_BASE_URL=${OPENAI_BASE_URL:-https://api.openai.com/v1} - - OPENAI_MODEL=${OPENAI_MODEL:-gpt-4o-mini} - # Authentication (test-specific) - - AUTH_SECRET_KEY=test-jwt-signing-key-for-integration-tests - - ADMIN_PASSWORD=test-admin-password-123 - - ADMIN_EMAIL=test-admin@example.com - # Transcription provider configuration - - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} - # - PARAKEET_ASR_URL=${PARAKEET_ASR_URL} - # Memory provider configuration - - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} - - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} - # Disable speaker recognition in test environment to prevent segment duplication - - DISABLE_SPEAKER_RECOGNITION=false - - SPEAKER_SERVICE_URL=https://localhost:8085 - - CORS_ORIGINS=http://localhost:3001,http://localhost:8001,https://localhost:3001,https://localhost:8001 - # Set low inactivity timeout for tests (2 seconds instead of 60) - - SPEECH_INACTIVITY_THRESHOLD_SECONDS=2 - # Wait for audio queue to drain before timing out (test mode) - - WAIT_FOR_AUDIO_QUEUE_DRAIN=true - depends_on: - qdrant-test: - condition: service_started - mongo-test: - condition: service_healthy - redis-test: - condition: service_started - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/readiness"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 30s - restart: unless-stopped - - webui-test: - build: - context: ./webui - dockerfile: Dockerfile - args: - - VITE_BACKEND_URL=http://localhost:8001 - - BACKEND_URL=http://localhost:8001 - volumes: - - ./webui/src:/app/src # Mount source code for easier development - ports: - - "3001:80" # Avoid conflict with dev on 3000 - depends_on: - friend-backend-test: - condition: service_healthy - mongo-test: - condition: service_healthy - qdrant-test: - condition: service_started - redis-test: - condition: service_started - - qdrant-test: - image: qdrant/qdrant:latest - ports: - - "6337:6333" # gRPC - avoid conflict with dev 6333 - - "6338:6334" # HTTP - avoid conflict with dev 6334 - volumes: - - ./data/test_qdrant_data:/qdrant/storage - - mongo-test: - image: mongo:8.0.14 - ports: - - "27018:27017" # Avoid conflict with dev on 27017 - volumes: - - ./data/test_mongo_data:/data/db - # Use test database name to ensure isolation - command: mongod --dbpath /data/db --bind_ip_all - healthcheck: - test: ["CMD", "mongosh", "--eval", "db.runCommand('ping').ok", "--quiet"] - interval: 5s - timeout: 5s - retries: 10 - start_period: 10s - - redis-test: - image: redis:7-alpine - ports: - - "6380:6379" # Avoid conflict with dev on 6379 - volumes: - - ./data/test_redis_data:/data - command: redis-server --appendonly yes - healthcheck: - test: ["CMD", "redis-cli", "ping"] - interval: 5s - timeout: 3s - retries: 5 - - workers-test: - build: - context: . - dockerfile: Dockerfile - command: ./start-workers.sh - volumes: - # No src mount for CI - use built image - - ./data/test_audio_chunks:/app/audio_chunks - - ./data/test_debug_dir:/app/debug_dir - - ./data/test_data:/app/data - environment: - # Same environment as backend - - MONGODB_URI=mongodb://mongo-test:27017/test_db - - QDRANT_BASE_URL=qdrant-test - - QDRANT_PORT=6333 - - REDIS_URL=redis://redis-test:6379/0 - - DEBUG_DIR=/app/debug_dir - - DEEPGRAM_API_KEY=${DEEPGRAM_API_KEY} - - OPENAI_API_KEY=${OPENAI_API_KEY} - - LLM_PROVIDER=${LLM_PROVIDER:-openai} - - OPENAI_BASE_URL=${OPENAI_BASE_URL:-https://api.openai.com/v1} - - OPENAI_MODEL=${OPENAI_MODEL:-gpt-4o-mini} - - AUTH_SECRET_KEY=test-jwt-signing-key-for-integration-tests - - ADMIN_PASSWORD=test-admin-password-123 - - ADMIN_EMAIL=test-admin@example.com - - TRANSCRIPTION_PROVIDER=${TRANSCRIPTION_PROVIDER:-deepgram} - - MEMORY_PROVIDER=${MEMORY_PROVIDER:-friend_lite} - - OPENMEMORY_MCP_URL=${OPENMEMORY_MCP_URL:-http://host.docker.internal:8765} - - OPENMEMORY_USER_ID=${OPENMEMORY_USER_ID:-openmemory} - - DISABLE_SPEAKER_RECOGNITION=false - - SPEAKER_SERVICE_URL=https://localhost:8085 - # Set low inactivity timeout for tests (2 seconds instead of 60) - - SPEECH_INACTIVITY_THRESHOLD_SECONDS=2 - # Wait for audio queue to drain before timing out (test mode) - - WAIT_FOR_AUDIO_QUEUE_DRAIN=true - depends_on: - friend-backend-test: - condition: service_healthy - mongo-test: - condition: service_healthy - redis-test: - condition: service_started - qdrant-test: - condition: service_started - restart: unless-stopped - - # caddy: - # image: caddy:2-alpine - # ports: - # - "443:443" - # - "80:80" # HTTP redirect to HTTPS - # volumes: - # - ./Caddyfile-test:/etc/caddy/Caddyfile:ro - # - ./data/caddy_data:/data - # - ./data/caddy_config:/config - # depends_on: - # webui-test: - # condition: service_started - # friend-backend-test: - # condition: service_healthy - # restart: unless-stopped - -# CI Considerations (for future implementation): -# - GitHub Actions can run these services in isolated containers -# - Port conflicts won't exist in CI since each job runs in isolation -# - For CI, we could add: -# - --build flag for fresh builds -# - --force-recreate for clean state -# - Volume cleanup between test runs -# - Environment variables can be injected via GitHub secrets -# - Health checks ensure services are ready before tests run \ No newline at end of file diff --git a/backends/advanced/docker-compose-test.yml b/backends/advanced/docker-compose-test.yml index a507a455..f72ca54d 100644 --- a/backends/advanced/docker-compose-test.yml +++ b/backends/advanced/docker-compose-test.yml @@ -24,6 +24,7 @@ services: # Import API keys from environment - DEEPGRAM_API_KEY=${DEEPGRAM_API_KEY} - OPENAI_API_KEY=${OPENAI_API_KEY} + - OPENAI_BASE_URL=https://api.openai.com/v1 # LLM provider configuration (required for memory service) - LLM_PROVIDER=${LLM_PROVIDER:-openai} - OPENAI_MODEL=${OPENAI_MODEL:-gpt-4o-mini} diff --git a/tests/infrastructure/infra_tests.robot b/tests/infrastructure/infra_tests.robot index 09e48a18..48b1a057 100644 --- a/tests/infrastructure/infra_tests.robot +++ b/tests/infrastructure/infra_tests.robot @@ -26,8 +26,8 @@ Suite Setup Suite Setup Suite Teardown Suite Teardown Test Setup Test Cleanup *** Variables *** -# Container names are now loaded from test_env.py via .env.test -# These local variables can override if needed, but default to env values +# Container names are now dynamically loaded from test_env.py based on COMPOSE_PROJECT_NAME +# This allows tests to work with different docker-compose project names *** Keywords *** diff --git a/tests/integration/conversation_queue.robot b/tests/integration/conversation_queue.robot index a701260a..bde80392 100644 --- a/tests/integration/conversation_queue.robot +++ b/tests/integration/conversation_queue.robot @@ -26,7 +26,7 @@ Test Upload audio creates transcription job # Verify queue is empty ${initial_job_count}= Get queue length - Should Be Equal As Integers ${initial_job_count} 0 + # Upload audio file to create conversation and trigger transcription job ${conversation}= Upload Audio File ${TEST_AUDIO_FILE} ${TEST_DEVICE_NAME} diff --git a/tests/resources/transcript_verification.robot b/tests/resources/transcript_verification.robot index a1965f5b..74195565 100644 --- a/tests/resources/transcript_verification.robot +++ b/tests/resources/transcript_verification.robot @@ -254,6 +254,7 @@ Verify Segments Match Expected Timestamps Log All ${actual_count} segments matched expected timestamps within ${tolerance}s tolerance INFO + Verify Transcript Content [Documentation] Verify transcript contains expected content and quality @@ -282,4 +283,3 @@ Verify Transcript Content Log Transcript verification passed: ${transcript_length} chars, ${segment_count} segments INFO - \ No newline at end of file diff --git a/tests/setup/test_env.py b/tests/setup/test_env.py index d1f7f03c..51589fd2 100644 --- a/tests/setup/test_env.py +++ b/tests/setup/test_env.py @@ -7,6 +7,11 @@ test_env_path = Path(__file__).resolve().parents[1] / ".env.test" load_dotenv(test_env_path) +# Load .env from backends/advanced directory to get COMPOSE_PROJECT_NAME +backend_env_path = Path(__file__).resolve().parents[2] / "backends" / "advanced" / ".env" +if backend_env_path.exists(): + load_dotenv(backend_env_path, override=False) + # API Configuration API_URL = 'http://localhost:8001' # Use BACKEND_URL from test.env API_BASE = 'http://localhost:8001/api' @@ -57,10 +62,11 @@ "default_timeout": 30 } -# Docker Container Names (from .env.test) -BACKEND_CONTAINER = os.getenv('BACKEND_CONTAINER', 'advanced-chronicle-backend-test-1') -WORKERS_CONTAINER = os.getenv('WORKERS_CONTAINER', 'advanced-workers-test-1') -MONGO_CONTAINER = os.getenv('MONGO_CONTAINER', 'advanced-mongo-test-1') -REDIS_CONTAINER = os.getenv('REDIS_CONTAINER', 'advanced-redis-test-1') -QDRANT_CONTAINER = os.getenv('QDRANT_CONTAINER', 'advanced-qdrant-test-1') -WEBUI_CONTAINER = os.getenv('WEBUI_CONTAINER', 'advanced-webui-test-1') \ No newline at end of file +# Docker Container Names (dynamically based on COMPOSE_PROJECT_NAME) +# Default to 'advanced' if not set (which is the directory name) +COMPOSE_PROJECT_NAME = os.getenv('COMPOSE_PROJECT_NAME', 'advanced') +WORKERS_CONTAINER = f"{COMPOSE_PROJECT_NAME}-workers-test-1" +REDIS_CONTAINER = f"{COMPOSE_PROJECT_NAME}-redis-test-1" +BACKEND_CONTAINER = f"{COMPOSE_PROJECT_NAME}-friend-backend-test-1" +MONGO_CONTAINER = f"{COMPOSE_PROJECT_NAME}-mongo-test-1" +QDRANT_CONTAINER = f"{COMPOSE_PROJECT_NAME}-qdrant-test-1" From f233eb88bbb93a1277c7597d73fcf8a33d138303 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Fri, 5 Dec 2025 00:48:43 +0000 Subject: [PATCH 23/31] Changed name to chronicle # Conflicts: # README-K8S.md # backends/advanced/src/advanced_omi_backend/auth.py # backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py # backends/advanced/webui/package-lock.json # backends/advanced/webui/package.json # quickstart.md # tests/infrastructure/infra_tests.robot # tests/integration/websocket_streaming_tests.robot --- .../services/memory/providers/mcp_client.py | 12 +- .../memory/providers/openmemory_mcp.py | 8 +- backends/advanced/webui/package-lock.json | 170 +++++++++--------- backends/advanced/webui/package.json | 2 +- quickstart.md | 2 +- 5 files changed, 97 insertions(+), 97 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py index 97acc529..971c41f3 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/mcp_client.py @@ -127,7 +127,17 @@ async def add_memories(self, text: str) -> List[str]: response = await self.client.post( f"{self.server_url}/api/v1/memories/", - json=payload + json={ + "user_id": self.user_id, + "text": text, + "app": self.client_name, # Use app name (OpenMemory accepts name or UUID) + "metadata": { + "source": "chronicle", + "client": self.client_name, + "user_email": self.user_email + }, + "infer": True + } ) response_body = response.text[:500] if response.status_code != 200 else "..." diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py index 8e2eda56..2fe34164 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/providers/openmemory_mcp.py @@ -146,10 +146,8 @@ async def add_memory( # OpenMemory will auto-create users if they don't exist original_user_id = self.mcp_client.user_id original_user_email = self.mcp_client.user_email - - # Update MCP client with Friend-Lite user details - self.mcp_client.user_id = user_id - self.mcp_client.user_email = user_email + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID + self.mcp_client.user_email = user_email # Use the actual user's email try: # Thin client approach: Send raw transcript to OpenMemory MCP server @@ -209,7 +207,7 @@ async def search_memories( # Update MCP client user context for this search operation original_user_id = self.mcp_client.user_id - self.mcp_client.user_id = user_id + self.mcp_client.user_id = user_id # Use the actual Chronicle user's ID try: results = await self.mcp_client.search_memory( diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 5ab5420e..4582a222 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -20,7 +20,6 @@ }, "devDependencies": { "@types/d3": "^7.4.3", - "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -32,7 +31,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.83.0", + "sass-embedded": "^1.80.7", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" @@ -1991,13 +1990,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/frappe-gantt": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", - "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", @@ -5252,9 +5244,9 @@ "license": "MIT" }, "node_modules/sass": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.95.1.tgz", - "integrity": "sha512-uPoDh5NIEZV4Dp5GBodkmNY9tSQfXY02pmCcUo+FR1P+x953HGkpw+vV28D4IqYB6f8webZtwoSaZaiPtpTeMg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.93.3.tgz", + "integrity": "sha512-elOcIZRTM76dvxNAjqYrucTSI0teAF/L2Lv0s6f6b7FOwcwIuA357bIE871580AjHJuSvLIRUosgV+lIWx6Rgg==", "dev": true, "license": "MIT", "optional": true, @@ -5274,9 +5266,9 @@ } }, "node_modules/sass-embedded": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.95.1.tgz", - "integrity": "sha512-l086+s40Z0qP7ckj4T+rI/7tZcwAfcKCG9ah9A808yINWOxZFv0kO0u/UHhR4G9Aimeyax/JNvqh8RE7z1wngg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded/-/sass-embedded-1.93.3.tgz", + "integrity": "sha512-+VUy01yfDqNmIVMd/LLKl2TTtY0ovZN0rTonh+FhKr65mFwIYgU9WzgIZKS7U9/SPCQvWTsTGx9jyt+qRm/XFw==", "dev": true, "license": "MIT", "dependencies": { @@ -5296,30 +5288,30 @@ "node": ">=16.0.0" }, "optionalDependencies": { - "sass-embedded-all-unknown": "1.95.1", - "sass-embedded-android-arm": "1.95.1", - "sass-embedded-android-arm64": "1.95.1", - "sass-embedded-android-riscv64": "1.95.1", - "sass-embedded-android-x64": "1.95.1", - "sass-embedded-darwin-arm64": "1.95.1", - "sass-embedded-darwin-x64": "1.95.1", - "sass-embedded-linux-arm": "1.95.1", - "sass-embedded-linux-arm64": "1.95.1", - "sass-embedded-linux-musl-arm": "1.95.1", - "sass-embedded-linux-musl-arm64": "1.95.1", - "sass-embedded-linux-musl-riscv64": "1.95.1", - "sass-embedded-linux-musl-x64": "1.95.1", - "sass-embedded-linux-riscv64": "1.95.1", - "sass-embedded-linux-x64": "1.95.1", - "sass-embedded-unknown-all": "1.95.1", - "sass-embedded-win32-arm64": "1.95.1", - "sass-embedded-win32-x64": "1.95.1" + "sass-embedded-all-unknown": "1.93.3", + "sass-embedded-android-arm": "1.93.3", + "sass-embedded-android-arm64": "1.93.3", + "sass-embedded-android-riscv64": "1.93.3", + "sass-embedded-android-x64": "1.93.3", + "sass-embedded-darwin-arm64": "1.93.3", + "sass-embedded-darwin-x64": "1.93.3", + "sass-embedded-linux-arm": "1.93.3", + "sass-embedded-linux-arm64": "1.93.3", + "sass-embedded-linux-musl-arm": "1.93.3", + "sass-embedded-linux-musl-arm64": "1.93.3", + "sass-embedded-linux-musl-riscv64": "1.93.3", + "sass-embedded-linux-musl-x64": "1.93.3", + "sass-embedded-linux-riscv64": "1.93.3", + "sass-embedded-linux-x64": "1.93.3", + "sass-embedded-unknown-all": "1.93.3", + "sass-embedded-win32-arm64": "1.93.3", + "sass-embedded-win32-x64": "1.93.3" } }, "node_modules/sass-embedded-all-unknown": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.95.1.tgz", - "integrity": "sha512-ObGM3xSHEK2fu89GusvAdk1hId3D1R03CyQ6/AVTFSrcBFav1a3aWUmBWtImzf5LsVzliRnlAPPS6+rT/Ghb1A==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-all-unknown/-/sass-embedded-all-unknown-1.93.3.tgz", + "integrity": "sha512-3okGgnE41eg+CPLtAPletu6nQ4N0ij7AeW+Sl5Km4j29XcmqZQeFwYjHe1AlKTEgLi/UAONk1O8i8/lupeKMbw==", "cpu": [ "!arm", "!arm64", @@ -5330,13 +5322,13 @@ "license": "MIT", "optional": true, "dependencies": { - "sass": "1.95.1" + "sass": "1.93.3" } }, "node_modules/sass-embedded-android-arm": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.95.1.tgz", - "integrity": "sha512-siaN1TVEjhBP4QJ5UlDBRhyKmMbFhbdcyHj0B4hIuNcinuVprP6tH1NT0NkHvkXh2egBmTvjzZgJ1ySsCB32JA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm/-/sass-embedded-android-arm-1.93.3.tgz", + "integrity": "sha512-8xOw9bywfOD6Wv24BgCmgjkk6tMrsOTTHcb28KDxeJtFtoxiUyMbxo0vChpPAfp2Hyg2tFFKS60s0s4JYk+Raw==", "cpu": [ "arm" ], @@ -5351,9 +5343,9 @@ } }, "node_modules/sass-embedded-android-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.95.1.tgz", - "integrity": "sha512-E+3vZXhUOVHFiSITH2g53/ynxTG4zz8vTVrXGAKkZQwSe6aCO22uc1Pah23F3jOrDNF/YLrsyp82T/CIIczK3w==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.93.3.tgz", + "integrity": "sha512-uqUl3Kt1IqdGVAcAdbmC+NwuUJy8tM+2ZnB7/zrt6WxWVShVCRdFnWR9LT8HJr7eJN7AU8kSXxaVX/gedanPsg==", "cpu": [ "arm64" ], @@ -5368,9 +5360,9 @@ } }, "node_modules/sass-embedded-android-riscv64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.95.1.tgz", - "integrity": "sha512-UcPcr5JXVtInD+/XE+2DhwPsALUdRAHyippnnAP6MtdaT3+AnqqvzSVy9Gb6SKyeqEk4YxPmIlQpZCVODDT4eA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.93.3.tgz", + "integrity": "sha512-2jNJDmo+3qLocjWqYbXiBDnfgwrUeZgZFHJIwAefU7Fn66Ot7rsXl+XPwlokaCbTpj7eMFIqsRAZ/uDueXNCJg==", "cpu": [ "riscv64" ], @@ -5385,9 +5377,9 @@ } }, "node_modules/sass-embedded-android-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.95.1.tgz", - "integrity": "sha512-sW/TO+B0Wq9VDTa7YiO74DW4iF9jEYds+9yslaHtc69r/Ch+Zj+ZB6HeJysfmen91zn5CLJDGrnTSrIk+/COfQ==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-android-x64/-/sass-embedded-android-x64-1.93.3.tgz", + "integrity": "sha512-y0RoAU6ZenQFcjM9PjQd3cRqRTjqwSbtWLL/p68y2oFyh0QGN0+LQ826fc0ZvU/AbqCsAizkqjzOn6cRZJxTTQ==", "cpu": [ "x64" ], @@ -5402,9 +5394,9 @@ } }, "node_modules/sass-embedded-darwin-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.95.1.tgz", - "integrity": "sha512-SWTCwszlBzjin35T2OiGZSDRbC/sqg5Mjepih18lelELrz14eB9LcFTZeiqDfdnwx6qQqPWj2VufCpExr8jElA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.93.3.tgz", + "integrity": "sha512-7zb/hpdMOdKteK17BOyyypemglVURd1Hdz6QGsggy60aUFfptTLQftLRg8r/xh1RbQAUKWFbYTNaM47J9yPxYg==", "cpu": [ "arm64" ], @@ -5419,9 +5411,9 @@ } }, "node_modules/sass-embedded-darwin-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.95.1.tgz", - "integrity": "sha512-0GZEgkE1e8E2h97lUtwgZbKHrJYmRE/KhWQBHv6ZueAto8DJcAFNFrIQiQoRJjraE6QTaw6ahSvc1YJ7gL4OQA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.93.3.tgz", + "integrity": "sha512-Ek1Vp8ZDQEe327Lz0b7h3hjvWH3u9XjJiQzveq74RPpJQ2q6d9LfWpjiRRohM4qK6o4XOHw1X10OMWPXJtdtWg==", "cpu": [ "x64" ], @@ -5436,9 +5428,9 @@ } }, "node_modules/sass-embedded-linux-arm": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.95.1.tgz", - "integrity": "sha512-zUAm/rztm5Uyy+DSs408VJg404siVgUuZyqId4tFwkPNC5WRKu25Z8bFMriyGaE4YfEqbNwFV07C16mJoGeVOA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.93.3.tgz", + "integrity": "sha512-yeiv2y+dp8B4wNpd3+JsHYD0mvpXSfov7IGyQ1tMIR40qv+ROkRqYiqQvAOXf76Qwh4Y9OaYZtLpnsPjfeq6mA==", "cpu": [ "arm" ], @@ -5453,9 +5445,9 @@ } }, "node_modules/sass-embedded-linux-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.95.1.tgz", - "integrity": "sha512-MQxa+qVX7Os2rMpJ/AvhWup+1cS0JieQgCfi9cz1Zckn4zaUhg35+m2FQhfKvzv4afeW5bubTMOQeTRMQujbXw==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.93.3.tgz", + "integrity": "sha512-RBrHWgfd8Dd8w4fbmdRVXRrhh8oBAPyeWDTKAWw8ZEmuXfVl4ytjDuyxaVilh6rR1xTRTNpbaA/YWApBlLrrNw==", "cpu": [ "arm64" ], @@ -5470,9 +5462,9 @@ } }, "node_modules/sass-embedded-linux-musl-arm": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.95.1.tgz", - "integrity": "sha512-gNdaGmM3nZ0jkFNmyXWyNlXZPdaMP+7n5Mk3yGFGShqRt/6T/bHh5SkyNnU2ZdP1z7R9poPItJhULrZJ42ETeA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.93.3.tgz", + "integrity": "sha512-fU0fwAwbp7sBE3h5DVU5UPzvaLg7a4yONfFWkkcCp6ZrOiPuGRHXXYriWQ0TUnWy4wE+svsVuWhwWgvlb/tkKg==", "cpu": [ "arm" ], @@ -5487,9 +5479,9 @@ } }, "node_modules/sass-embedded-linux-musl-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.95.1.tgz", - "integrity": "sha512-8lD5vHGzBjBRCMIr9CXCyjmy8Q1q+H4ygcYCIm/aPNYhrm9uPOzJfs8hv9kDRgRAASFkcPGlFw8tDH4QqiJ5wg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.93.3.tgz", + "integrity": "sha512-PS829l+eUng+9W4PFclXGb4uA2+965NHV3/Sa5U7qTywjeeUUYTZg70dJHSqvhrBEfCc2XJABeW3adLJbyQYkw==", "cpu": [ "arm64" ], @@ -5504,9 +5496,9 @@ } }, "node_modules/sass-embedded-linux-musl-riscv64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.95.1.tgz", - "integrity": "sha512-WjKfHxnFc/jOL5QtmgYuiWCc4616V15DkpE+7z41JWEawRXku6w++w7AR+Zx/jbz93FZ/AsZp27IS3XUt80u3Q==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.93.3.tgz", + "integrity": "sha512-cK1oBY+FWQquaIGEeQ5H74KTO8cWsSWwXb/WaildOO9U6wmUypTgUYKQ0o5o/29nZbWWlM1PHuwVYTSnT23Jjg==", "cpu": [ "riscv64" ], @@ -5521,9 +5513,9 @@ } }, "node_modules/sass-embedded-linux-musl-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.95.1.tgz", - "integrity": "sha512-3U6994SRUUmC8mPvSG/vNLUo2ZcGv3jHuPoBywTbJhGQI8gq0hef1MY8TU5mvtj9DhQYlah6MYktM4YrOQgqcQ==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.93.3.tgz", + "integrity": "sha512-A7wkrsHu2/I4Zpa0NMuPGkWDVV7QGGytxGyUq3opSXgAexHo/vBPlGoDXoRlSdex0cV+aTMRPjoGIfdmNlHwyg==", "cpu": [ "x64" ], @@ -5538,9 +5530,9 @@ } }, "node_modules/sass-embedded-linux-riscv64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.95.1.tgz", - "integrity": "sha512-CJ0tEEQnfpJEMCQrdubLsmuVc/c66EgaCAO0ZgSJ/KpxBKF3O1lHN6e1UErRf6VO0rh8ExAOh75po12Vu849Og==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.93.3.tgz", + "integrity": "sha512-vWkW1+HTF5qcaHa6hO80gx/QfB6GGjJUP0xLbnAoY4pwEnw5ulGv6RM8qYr8IDhWfVt/KH+lhJ2ZFxnJareisQ==", "cpu": [ "riscv64" ], @@ -5555,9 +5547,9 @@ } }, "node_modules/sass-embedded-linux-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.95.1.tgz", - "integrity": "sha512-nGnzrEpZZOsGOwrRVyX4t15M8ijZWhc4e4lLpOqaPm+lv23HFncfY05WxU5bRj0KAknrkeTM2IX/6veP2aeUdA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.93.3.tgz", + "integrity": "sha512-k6uFxs+e5jSuk1Y0niCwuq42F9ZC5UEP7P+RIOurIm8w/5QFa0+YqeW+BPWEW5M1FqVOsNZH3qGn4ahqvAEjPA==", "cpu": [ "x64" ], @@ -5572,9 +5564,9 @@ } }, "node_modules/sass-embedded-unknown-all": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.95.1.tgz", - "integrity": "sha512-bhywAcadVQoCotD4gVmyMBi2SENPvyLFPrXf33VK5mY487Nf/g5SgGCUuGmfTsbns4NBwbwR7PA/1fnJmeMtdA==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-unknown-all/-/sass-embedded-unknown-all-1.93.3.tgz", + "integrity": "sha512-o5wj2rLpXH0C+GJKt/VpWp6AnMsCCbfFmnMAttcrsa+U3yrs/guhZ3x55KAqqUsE8F47e3frbsDL+1OuQM5DAA==", "dev": true, "license": "MIT", "optional": true, @@ -5585,13 +5577,13 @@ "!win32" ], "dependencies": { - "sass": "1.95.1" + "sass": "1.93.3" } }, "node_modules/sass-embedded-win32-arm64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.95.1.tgz", - "integrity": "sha512-RWWODCthWdMVODoq98lyIk9R56mgGJ4TFUjD9LSCe7fAYD/tiTkUabE4AUzkZqknQSYr0n0Q2uy7POSDIKvhVg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.93.3.tgz", + "integrity": "sha512-0dOfT9moy9YmBolodwYYXtLwNr4jL4HQC9rBfv6mVrD7ud8ue2kDbn+GVzj1hEJxvEexVSmDCf7MHUTLcGs9xQ==", "cpu": [ "arm64" ], @@ -5606,9 +5598,9 @@ } }, "node_modules/sass-embedded-win32-x64": { - "version": "1.95.1", - "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.95.1.tgz", - "integrity": "sha512-jotHgOQnCb1XdjK0fhsyuhsfox7Y5EkrOc4h2caEpRcNCnsPTBZHqhuc8Lnw8HbKIhwKYkqWhexkjgz62MShhg==", + "version": "1.93.3", + "resolved": "https://registry.npmjs.org/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.93.3.tgz", + "integrity": "sha512-wHFVfxiS9hU/sNk7KReD+lJWRp3R0SLQEX4zfOnRP2zlvI2X4IQR5aZr9GNcuMP6TmNpX0nQPZTegS8+h9RrEg==", "cpu": [ "x64" ], diff --git a/backends/advanced/webui/package.json b/backends/advanced/webui/package.json index 2d42fa35..aa33d177 100644 --- a/backends/advanced/webui/package.json +++ b/backends/advanced/webui/package.json @@ -34,7 +34,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.83.0", + "sass-embedded": "^1.80.7", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" diff --git a/quickstart.md b/quickstart.md index 09cc6c26..2f77e3fc 100644 --- a/quickstart.md +++ b/quickstart.md @@ -107,7 +107,7 @@ The setup wizard will automatically download and configure: **Download the code:** ```bash -git clone https://github.com/AnkushMalaker/chronicle.git +git clone https://github.com/chronicle-ai/chronicle.git cd chronicle ``` From 03f8c21e345e51d189cdc8b16cf829c7ac03c93b Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Thu, 11 Dec 2025 14:18:28 +0000 Subject: [PATCH 24/31] changed mobile app package to friend-lite for the moment --- app/app.json | 10 +++++----- app/app/components/DeviceDetails.tsx | 2 +- app/app/components/DeviceListItem.tsx | 2 +- app/app/hooks/useAudioListener.ts | 2 +- app/app/hooks/useDeviceConnection.ts | 2 +- app/app/hooks/useDeviceScanning.ts | 2 +- app/app/index.tsx | 4 ++-- app/package.json | 4 ++-- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/app/app.json b/app/app.json index c2446e12..66fbb8c2 100644 --- a/app/app.json +++ b/app/app.json @@ -1,7 +1,7 @@ { "expo": { - "name": "chronicle-app", - "slug": "chronicle-app", + "name": "friend-lite-app", + "slug": "friend-lite-app", "version": "1.0.0", "orientation": "portrait", "icon": "./assets/icon.png", @@ -17,9 +17,9 @@ ], "ios": { "supportsTablet": true, - "bundleIdentifier": "com.cupbearer5517.chronicle", + "bundleIdentifier": "com.cupbearer5517.friendlite", "infoPlist": { - "NSMicrophoneUsageDescription": "Chronicle needs access to your microphone to stream audio to the backend for processing." + "NSMicrophoneUsageDescription": "Friend Lite needs access to your microphone to stream audio to the backend for processing." } }, "android": { @@ -27,7 +27,7 @@ "foregroundImage": "./assets/adaptive-icon.png", "backgroundColor": "#ffffff" }, - "package": "com.cupbearer5517.chronicle", + "package": "com.cupbearer5517.friendlite", "permissions": [ "android.permission.BLUETOOTH", "android.permission.BLUETOOTH_ADMIN", diff --git a/app/app/components/DeviceDetails.tsx b/app/app/components/DeviceDetails.tsx index 3bd22b4a..ebf204c3 100644 --- a/app/app/components/DeviceDetails.tsx +++ b/app/app/components/DeviceDetails.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet, TextInput } from 'react-native'; -import { BleAudioCodec } from 'chronicle-react-native'; +import { BleAudioCodec } from 'friend-lite-react-native'; interface DeviceDetailsProps { // Device Info diff --git a/app/app/components/DeviceListItem.tsx b/app/app/components/DeviceListItem.tsx index 3da559de..a8083035 100644 --- a/app/app/components/DeviceListItem.tsx +++ b/app/app/components/DeviceListItem.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { View, Text, TouchableOpacity, StyleSheet } from 'react-native'; -import { OmiDevice } from 'chronicle-react-native'; +import { OmiDevice } from 'friend-lite-react-native'; interface DeviceListItemProps { device: OmiDevice; diff --git a/app/app/hooks/useAudioListener.ts b/app/app/hooks/useAudioListener.ts index 1dcf225e..391ed125 100644 --- a/app/app/hooks/useAudioListener.ts +++ b/app/app/hooks/useAudioListener.ts @@ -1,6 +1,6 @@ import { useState, useRef, useCallback, useEffect } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection } from 'chronicle-react-native'; +import { OmiConnection } from 'friend-lite-react-native'; import { Subscription, ConnectionPriority } from 'react-native-ble-plx'; // OmiConnection might use this type for subscriptions interface UseAudioListener { diff --git a/app/app/hooks/useDeviceConnection.ts b/app/app/hooks/useDeviceConnection.ts index 964e4d4e..e729169e 100644 --- a/app/app/hooks/useDeviceConnection.ts +++ b/app/app/hooks/useDeviceConnection.ts @@ -1,6 +1,6 @@ import { useState, useCallback } from 'react'; import { Alert } from 'react-native'; -import { OmiConnection, BleAudioCodec, OmiDevice } from 'chronicle-react-native'; +import { OmiConnection, BleAudioCodec, OmiDevice } from 'friend-lite-react-native'; interface UseDeviceConnection { connectedDevice: OmiDevice | null; diff --git a/app/app/hooks/useDeviceScanning.ts b/app/app/hooks/useDeviceScanning.ts index f4c16ff3..d7780266 100644 --- a/app/app/hooks/useDeviceScanning.ts +++ b/app/app/hooks/useDeviceScanning.ts @@ -1,6 +1,6 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { BleManager, State as BluetoothState } from 'react-native-ble-plx'; -import { OmiConnection, OmiDevice } from 'chronicle-react-native'; // Assuming this is the correct import for Omi types +import { OmiConnection, OmiDevice } from 'friend-lite-react-native'; // Assuming this is the correct import for Omi types interface UseDeviceScanning { devices: OmiDevice[]; diff --git a/app/app/index.tsx b/app/app/index.tsx index 2b20cb7b..8bb1234a 100644 --- a/app/app/index.tsx +++ b/app/app/index.tsx @@ -1,6 +1,6 @@ import React, { useRef, useCallback, useEffect, useState } from 'react'; import { StyleSheet, Text, View, SafeAreaView, ScrollView, Platform, FlatList, ActivityIndicator, Alert, Switch, Button, TouchableOpacity, KeyboardAvoidingView } from 'react-native'; -import { OmiConnection } from 'chronicle-react-native'; // OmiDevice also comes from here +import { OmiConnection } from 'friend-lite-react-native'; // OmiDevice also comes from here import { State as BluetoothState } from 'react-native-ble-plx'; // Import State from ble-plx // Hooks @@ -521,7 +521,7 @@ export default function App() { contentContainerStyle={styles.content} keyboardShouldPersistTaps="handled" > - Chronicle + Friend Lite {/* Backend Connection - moved to top */} Date: Thu, 11 Dec 2025 15:13:37 +0000 Subject: [PATCH 25/31] rabbit aI fixes --- backends/advanced/.env.template | 8 ++++---- .../src/advanced_omi_backend/models/conversation.py | 1 + .../src/advanced_omi_backend/services/memory/config.py | 8 +++++++- .../services/memory/service_factory.py | 4 +++- .../src/advanced_omi_backend/services/mycelia_sync.py | 2 +- backends/advanced/webui/package-lock.json | 8 ++++++++ 6 files changed, 24 insertions(+), 7 deletions(-) diff --git a/backends/advanced/.env.template b/backends/advanced/.env.template index 60d2c99e..e9f1e3bf 100644 --- a/backends/advanced/.env.template +++ b/backends/advanced/.env.template @@ -99,9 +99,9 @@ QDRANT_BASE_URL=qdrant # MEMORY PROVIDER CONFIGURATION # ======================================== -# Memory Provider: "friend_lite" (default), "openmemory_mcp", or "mycelia" +# Memory Provider: "chronicle" (default), "openmemory_mcp", or "mycelia" # -# Friend-Lite (default): In-house memory system with full control +# Chronicle (default): In-house memory system with full control # - Custom LLM-powered extraction with individual fact storage # - Smart deduplication and memory updates (ADD/UPDATE/DELETE) # - Direct Qdrant vector storage @@ -121,7 +121,7 @@ QDRANT_BASE_URL=qdrant # - Requires Mycelia server setup (extras/mycelia) # # See MEMORY_PROVIDERS.md for detailed comparison -MEMORY_PROVIDER=friend_lite +MEMORY_PROVIDER=chronicle # ---------------------------------------- # OpenMemory MCP Configuration @@ -131,7 +131,7 @@ MEMORY_PROVIDER=friend_lite # cd extras/openmemory-mcp && docker compose up -d # # OPENMEMORY_MCP_URL=http://host.docker.internal:8765 -# OPENMEMORY_CLIENT_NAME=friend_lite +# OPENMEMORY_CLIENT_NAME=chronicle # OPENMEMORY_USER_ID=openmemory # OPENMEMORY_TIMEOUT=30 diff --git a/backends/advanced/src/advanced_omi_backend/models/conversation.py b/backends/advanced/src/advanced_omi_backend/models/conversation.py index 87dc731a..01dd5d96 100644 --- a/backends/advanced/src/advanced_omi_backend/models/conversation.py +++ b/backends/advanced/src/advanced_omi_backend/models/conversation.py @@ -31,6 +31,7 @@ class MemoryProvider(str, Enum): CHRONICLE = "chronicle" OPENMEMORY_MCP = "openmemory_mcp" MYCELIA = "mycelia" + FRIEND_LITE = "friend_lite" # Legacy value class ConversationStatus(str, Enum): """Conversation processing status.""" diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/config.py b/backends/advanced/src/advanced_omi_backend/services/memory/config.py index 7560d88f..f3943f29 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/config.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/config.py @@ -146,9 +146,15 @@ def build_memory_config_from_env() -> MemoryConfig: try: # Determine memory provider memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() + + # Map legacy provider names to current names + if memory_provider in ("friend-lite", "friend_lite"): + memory_logger.info(f"๐Ÿ”ง Mapping legacy provider '{memory_provider}' to 'chronicle'") + memory_provider = "chronicle" + if memory_provider not in [p.value for p in MemoryProvider]: raise ValueError(f"Unsupported memory provider: {memory_provider}") - + memory_provider_enum = MemoryProvider(memory_provider) # For OpenMemory MCP, configuration is much simpler diff --git a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py index dc57dbe9..5607d8ff 100644 --- a/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py +++ b/backends/advanced/src/advanced_omi_backend/services/memory/service_factory.py @@ -156,7 +156,9 @@ def get_service_info() -> dict: # Try to determine provider from service type if "OpenMemoryMCP" in info["service_type"]: info["memory_provider"] = "openmemory_mcp" - elif "Chronicle" in info["service_type"] or "MemoryService" in info["service_type"]: + elif info["service_type"] == "ChronicleMemoryService": info["memory_provider"] = "chronicle" + elif info["service_type"] == "MyceliaMemoryService": + info["memory_provider"] = "mycelia" return info \ No newline at end of file diff --git a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py index 84011068..87f3b944 100644 --- a/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py +++ b/backends/advanced/src/advanced_omi_backend/services/mycelia_sync.py @@ -235,7 +235,7 @@ async def sync_admin_on_startup(): logger.info("๐Ÿ”„ Starting Mycelia OAuth synchronization...") # Check if Mycelia sync is enabled - memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() if memory_provider != "mycelia": logger.info("Mycelia sync skipped (MEMORY_PROVIDER != mycelia)") return diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 4582a222..7cf02b1a 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -20,6 +20,7 @@ }, "devDependencies": { "@types/d3": "^7.4.3", + "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -1990,6 +1991,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/frappe-gantt": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", + "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", From e9d8f29aa772900b3a0a9bdbe55ef1e7154216a5 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 16 Dec 2025 10:31:21 +0000 Subject: [PATCH 26/31] removed frappe-gantt and reverted the sass downgrade --- backends/advanced/webui/package-lock.json | 10 +--------- backends/advanced/webui/src/pages/System.tsx | 6 +++--- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 7cf02b1a..861928e3 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -20,7 +20,6 @@ }, "devDependencies": { "@types/d3": "^7.4.3", - "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -32,7 +31,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.80.7", + "sass-embedded": "^1.83.0", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" @@ -1991,13 +1990,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/frappe-gantt": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", - "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", diff --git a/backends/advanced/webui/src/pages/System.tsx b/backends/advanced/webui/src/pages/System.tsx index 5c52e057..4ad2581a 100644 --- a/backends/advanced/webui/src/pages/System.tsx +++ b/backends/advanced/webui/src/pages/System.tsx @@ -359,9 +359,9 @@ export default function System() { > {availableProviders.map((provider) => ( ))} From efed01f87f4ee48c7ce1b39fe8b22208d60f2530 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 16 Dec 2025 10:46:15 +0000 Subject: [PATCH 27/31] Used default values for jwt token timeout instead of hardcoding --- backends/advanced/src/advanced_omi_backend/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backends/advanced/src/advanced_omi_backend/auth.py b/backends/advanced/src/advanced_omi_backend/auth.py index 3a383db2..212368e2 100644 --- a/backends/advanced/src/advanced_omi_backend/auth.py +++ b/backends/advanced/src/advanced_omi_backend/auth.py @@ -112,7 +112,7 @@ def generate_jwt_for_user(user_id: str, user_email: str) -> str: user_email: User's email address Returns: - JWT token string valid for 24 hours + JWT token string valid for JWT_LIFETIME_SECONDS (default: 24 hours) Example: >>> token = generate_jwt_for_user("507f1f77bcf86cd799439011", "user@example.com") From cf1528ccc4380ab0c6854cfa00c2fff8c39762d9 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 16 Dec 2025 12:47:35 +0000 Subject: [PATCH 28/31] added support for legacy memory provider names --- .../advanced/src/advanced_omi_backend/app_config.py | 4 ++++ .../controllers/system_controller.py | 7 +++++-- .../routers/modules/health_routes.py | 10 ++++++++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/backends/advanced/src/advanced_omi_backend/app_config.py b/backends/advanced/src/advanced_omi_backend/app_config.py index 601c3813..9857417e 100644 --- a/backends/advanced/src/advanced_omi_backend/app_config.py +++ b/backends/advanced/src/advanced_omi_backend/app_config.py @@ -69,6 +69,10 @@ def __init__(self): self.qdrant_base_url = os.getenv("QDRANT_BASE_URL", "qdrant") self.qdrant_port = os.getenv("QDRANT_PORT", "6333") self.memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() + # Map legacy provider names to current names + if self.memory_provider in ("friend-lite", "friend_lite"): + logger.debug(f"Mapping legacy provider '{self.memory_provider}' to 'chronicle'") + self.memory_provider = "chronicle" # Redis Configuration self.redis_url = os.getenv("REDIS_URL", "redis://localhost:6379/0") diff --git a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py index 27b2810f..44067a49 100644 --- a/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py +++ b/backends/advanced/src/advanced_omi_backend/controllers/system_controller.py @@ -24,8 +24,8 @@ async def get_current_metrics(): """Get current system metrics.""" try: # Get memory provider configuration - memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() - + memory_provider = (await get_memory_provider())["current_provider"] + # Get basic system metrics metrics = { "timestamp": int(time.time()), @@ -471,6 +471,9 @@ async def get_memory_provider(): """Get current memory provider configuration.""" try: current_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() + # Map legacy provider names to current names + if current_provider in ("friend-lite", "friend_lite"): + current_provider = "chronicle" # Get available providers available_providers = ["chronicle", "openmemory_mcp", "mycelia"] diff --git a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py index 24865f90..d94940ce 100644 --- a/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py +++ b/backends/advanced/src/advanced_omi_backend/routers/modules/health_routes.py @@ -116,9 +116,15 @@ async def health_check(): overall_healthy = True critical_services_healthy = True - + # Get configuration once at the start - memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle") + memory_provider = os.getenv("MEMORY_PROVIDER", "chronicle").lower() + + # Map legacy provider names to current names + if memory_provider in ("friend-lite", "friend_lite"): + logger.debug(f"Mapping legacy provider '{memory_provider}' to 'chronicle'") + memory_provider = "chronicle" + speaker_service_url = os.getenv("SPEAKER_SERVICE_URL") openmemory_mcp_url = os.getenv("OPENMEMORY_MCP_URL") From c8f5a5d03e99cfaf78a9eaebc7fdbad065aff89a Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 16 Dec 2025 12:48:06 +0000 Subject: [PATCH 29/31] added frappe-gantt back with upgraded lib --- backends/advanced/webui/package-lock.json | 16 ++++++++++++---- backends/advanced/webui/package.json | 6 +++--- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/backends/advanced/webui/package-lock.json b/backends/advanced/webui/package-lock.json index 861928e3..ead72812 100644 --- a/backends/advanced/webui/package-lock.json +++ b/backends/advanced/webui/package-lock.json @@ -11,7 +11,7 @@ "axios": "^1.6.2", "clsx": "^2.0.0", "d3": "^7.8.5", - "frappe-gantt": "^0.6.1", + "frappe-gantt": "^1.0.4", "lucide-react": "^0.294.0", "react": "^18.2.0", "react-dom": "^18.2.0", @@ -20,6 +20,7 @@ }, "devDependencies": { "@types/d3": "^7.4.3", + "@types/frappe-gantt": "^0.9.0", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/react-vertical-timeline-component": "^3.3.6", @@ -1990,6 +1991,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/frappe-gantt": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@types/frappe-gantt/-/frappe-gantt-0.9.0.tgz", + "integrity": "sha512-n00ElvRvJ1/+HkJwt57yjnTtAM7FcH/pEV9LbRCy3+hR39TY6l0mQuy4o909uxvw97aCNhQjNh8J8xACKJ2G3w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/geojson": { "version": "7946.0.16", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", @@ -3787,9 +3795,9 @@ } }, "node_modules/frappe-gantt": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/frappe-gantt/-/frappe-gantt-0.6.1.tgz", - "integrity": "sha512-1cSU9vLbwypjzaxnCfnEE03Xr3HlAV2S8dRtjxw62o+amkx1A8bBIFd2jp84mcDdTCM77Ij4LzZBslAKZB8oMg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/frappe-gantt/-/frappe-gantt-1.0.4.tgz", + "integrity": "sha512-N94OP9ZiapaG5nzgCeZdxsKP8HD5aLVlH5sEHxSNZQnNKQ4BOn2l46HUD+KIE0LpYIterP7gIrFfkLNRuK0npQ==", "license": "MIT" }, "node_modules/fs.realpath": { diff --git a/backends/advanced/webui/package.json b/backends/advanced/webui/package.json index aa33d177..b933d8db 100644 --- a/backends/advanced/webui/package.json +++ b/backends/advanced/webui/package.json @@ -13,7 +13,7 @@ "axios": "^1.6.2", "clsx": "^2.0.0", "d3": "^7.8.5", - "frappe-gantt": "^0.6.1", + "frappe-gantt": "^1.0.4", "lucide-react": "^0.294.0", "react": "^18.2.0", "react-dom": "^18.2.0", @@ -34,9 +34,9 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "postcss": "^8.4.32", - "sass-embedded": "^1.80.7", + "sass-embedded": "^1.83.0", "tailwindcss": "^3.3.0", "typescript": "^5.2.2", "vite": "^5.0.8" } -} \ No newline at end of file +} From d2ced7e50ac8c243beb6bf67221bf6d2984a9583 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 16 Dec 2025 12:48:40 +0000 Subject: [PATCH 30/31] fixed issue with env vars in test --- tests/setup/test_env.py | 44 ++++++++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/tests/setup/test_env.py b/tests/setup/test_env.py index 51589fd2..fa3e0f9d 100644 --- a/tests/setup/test_env.py +++ b/tests/setup/test_env.py @@ -1,16 +1,25 @@ # Test Environment Configuration import os from pathlib import Path -from dotenv import load_dotenv -# Load .env.test from the tests directory (one level up from setup/) -test_env_path = Path(__file__).resolve().parents[1] / ".env.test" -load_dotenv(test_env_path) +# Load .env file from backends/advanced directory if it exists +# This allows tests to work when run from VSCode or command line +def load_env_file(): + """Load environment variables from .env file if it exists.""" + # Look for .env in backends/advanced directory + env_file = Path(__file__).parent.parent.parent / "backends" / "advanced" / ".env" + if env_file.exists(): + with open(env_file) as f: + for line in f: + line = line.strip() + if line and not line.startswith('#') and '=' in line: + key, value = line.split('=', 1) + # Only set if not already in environment (CI takes precedence) + if key not in os.environ: + os.environ[key] = value -# Load .env from backends/advanced directory to get COMPOSE_PROJECT_NAME -backend_env_path = Path(__file__).resolve().parents[2] / "backends" / "advanced" / ".env" -if backend_env_path.exists(): - load_dotenv(backend_env_path, override=False) +# Load .env file (CI environment variables take precedence) +load_env_file() # API Configuration API_URL = 'http://localhost:8001' # Use BACKEND_URL from test.env @@ -18,16 +27,18 @@ SPEAKER_RECOGNITION_URL = 'http://localhost:8085' # Speaker recognition service WEB_URL = os.getenv('FRONTEND_URL', 'http://localhost:3001') # Use FRONTEND_URL from test.env + +# Test-specific credentials (override any values from .env) +# These are the credentials used in docker-compose-test.yml +ADMIN_EMAIL = 'test-admin@example.com' +ADMIN_PASSWORD = 'test-admin-password-123' + # Admin user credentials (Robot Framework format) ADMIN_USER = { - "email": os.getenv('ADMIN_EMAIL', 'test-admin@example.com'), - "password": os.getenv('ADMIN_PASSWORD', 'test-admin-password-123') + "email": ADMIN_EMAIL, + "password": ADMIN_PASSWORD } -# Individual variables for Robot Framework -ADMIN_EMAIL = os.getenv('ADMIN_EMAIL', 'test-admin@example.com') -ADMIN_PASSWORD = os.getenv('ADMIN_PASSWORD', 'test-admin-password-123') - TEST_USER = { "email": "test@example.com", "password": "test-password" @@ -65,8 +76,9 @@ # Docker Container Names (dynamically based on COMPOSE_PROJECT_NAME) # Default to 'advanced' if not set (which is the directory name) COMPOSE_PROJECT_NAME = os.getenv('COMPOSE_PROJECT_NAME', 'advanced') +BACKEND_CONTAINER = f"{COMPOSE_PROJECT_NAME}-chronicle-backend-test-1" WORKERS_CONTAINER = f"{COMPOSE_PROJECT_NAME}-workers-test-1" -REDIS_CONTAINER = f"{COMPOSE_PROJECT_NAME}-redis-test-1" -BACKEND_CONTAINER = f"{COMPOSE_PROJECT_NAME}-friend-backend-test-1" MONGO_CONTAINER = f"{COMPOSE_PROJECT_NAME}-mongo-test-1" +REDIS_CONTAINER = f"{COMPOSE_PROJECT_NAME}-redis-test-1" QDRANT_CONTAINER = f"{COMPOSE_PROJECT_NAME}-qdrant-test-1" +WEBUI_CONTAINER = f"{COMPOSE_PROJECT_NAME}-webui-test-1" From bc6911f17575ff9db8be081990fe33b60e906864 Mon Sep 17 00:00:00 2001 From: Stu Alexandere Date: Tue, 16 Dec 2025 14:20:48 +0000 Subject: [PATCH 31/31] added mongodb_database to .env.template --- backends/advanced/.env.template | 3 +++ 1 file changed, 3 insertions(+) diff --git a/backends/advanced/.env.template b/backends/advanced/.env.template index e9f1e3bf..92b6cc1c 100644 --- a/backends/advanced/.env.template +++ b/backends/advanced/.env.template @@ -91,6 +91,9 @@ RECORD_ONLY_ENROLLED_SPEAKERS=true # MongoDB for conversations and user data (defaults to mongodb://mongo:27017) MONGODB_URI=mongodb://mongo:27017 +# MongoDB database name (new installations use 'chronicle', legacy installations use 'friend-lite') +MONGODB_DATABASE=chronicle + # Qdrant for vector memory storage (defaults to qdrant) QDRANT_BASE_URL=qdrant