Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,62 @@ async def count_memories(self, user_id: str) -> Optional[int]:
memory_logger.error(f"Count memories failed: {e}")
return None

async def get_memory(self, memory_id: str, user_id: Optional[str] = None) -> Optional[MemoryEntry]:
"""Get a specific memory by ID.

Retrieves a single memory from Qdrant by its ID and verifies user ownership.

Args:
memory_id: Unique identifier of the memory to retrieve
user_id: Optional user ID for access control

Returns:
MemoryEntry object if found and authorized, None otherwise
"""
if not self._initialized:
await self.initialize()

try:
# Retrieve the point from Qdrant
points = await self.vector_store.client.retrieve(
collection_name=self.vector_store.collection_name,
ids=[memory_id],
with_payload=True,
with_vectors=False
)

if not points:
memory_logger.debug(f"Memory {memory_id} not found in vector store")
return None

point = points[0]
payload = point.payload

# Check user ownership if user_id is provided
if user_id:
memory_user_id = payload.get("metadata", {}).get("user_id")
if memory_user_id != user_id:
memory_logger.warning(
f"Access denied: Memory {memory_id} belongs to {memory_user_id}, requested by {user_id}"
)
return None

# Convert to MemoryEntry
memory_entry = MemoryEntry(
id=str(point.id),
content=payload.get("content", ""),
metadata=payload.get("metadata", {}),
created_at=payload.get("created_at"),
embedding=None # Don't return embeddings for single memory retrieval
)

memory_logger.info(f"📖 Retrieved memory {memory_id} for user {user_id}")
return memory_entry

except Exception as e:
memory_logger.error(f"Get memory {memory_id} failed: {e}", exc_info=True)
return None

async def delete_memory(self, memory_id: str, user_id: Optional[str] = None, user_email: Optional[str] = None) -> bool:
"""Delete a specific memory by ID.

Expand Down Expand Up @@ -428,7 +484,7 @@ def _create_memory_entries(
"extraction_enabled": self.config.extraction_enabled,
},
embedding=embedding,
created_at=str(int(time.time())),
created_at=int(time.time()),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

find . -name "chronicle.py" -path "*/memory/providers/*" | head -20

Repository: chronicler-ai/chronicle

Length of output: 150


🏁 Script executed:

fd -t f "chronicle.py" | grep -E "memory/providers"

Repository: chronicler-ai/chronicle

Length of output: 148


🏁 Script executed:

wc -l backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py

Repository: chronicler-ai/chronicle

Length of output: 152


🏁 Script executed:

# Find MemoryEntry base class definition and its created_at field
rg -n "class MemoryEntry" backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py

Repository: chronicler-ai/chronicle

Length of output: 49


🏁 Script executed:

# Search for MemoryEntry imports and definition
rg -n "MemoryEntry" backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py | head -20

Repository: chronicler-ai/chronicle

Length of output: 1013


🏁 Script executed:

# Find the base.py file and examine MemoryEntry definition
fd -t f "base.py" backends/advanced/src/advanced_omi_backend/services/memory/ | head -5

Repository: chronicler-ai/chronicle

Length of output: 133


🏁 Script executed:

# Check the size of base.py first
wc -l backends/advanced/src/advanced_omi_backend/services/memory/base.py

Repository: chronicler-ai/chronicle

Length of output: 137


🏁 Script executed:

# Find MemoryEntry class definition in base.py
rg -n "class MemoryEntry" backends/advanced/src/advanced_omi_backend/services/memory/base.py -A 20

Repository: chronicler-ai/chronicle

Length of output: 935


🏁 Script executed:

# Check if there's a __post_init__ method in MemoryEntry
rg -n "__post_init__" backends/advanced/src/advanced_omi_backend/services/memory/base.py -A 10

Repository: chronicler-ai/chronicle

Length of output: 578


🏁 Script executed:

# Check lines around 487 in chronicle.py
sed -n '480,495n' backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py

Repository: chronicler-ai/chronicle

Length of output: 49


🏁 Script executed:

# Check lines around 487 in chronicle.py using head/tail
head -495 backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py | tail -20

Repository: chronicler-ai/chronicle

Length of output: 788


🏁 Script executed:

# Check line 693 area
head -700 backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py | tail -20

Repository: chronicler-ai/chronicle

Length of output: 930


🏁 Script executed:

# Check line 338 area for get_memory
head -345 backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py | tail -20

Repository: chronicler-ai/chronicle

Length of output: 921


🏁 Script executed:

# Check the imports and dataclass decorator in base.py
head -50 backends/advanced/src/advanced_omi_backend/services/memory/base.py

Repository: chronicler-ai/chronicle

Length of output: 1617


🏁 Script executed:

# Verify what happens when int is passed to created_at field
# Let's trace through the issue: what does __post_init__ actually do with an int value?
cat > /tmp/test_dataclass.py << 'EOF'
import time
from dataclasses import dataclass
from typing import Optional

@dataclass
class TestEntry:
    content: str
    created_at: Optional[str] = None
    
    def __post_init__(self):
        if self.created_at is None:
            self.created_at = str(int(time.time()))

# Test 1: Pass None - __post_init__ converts to string
entry1 = TestEntry(content="test", created_at=None)
print(f"Test 1 (None): type={type(entry1.created_at).__name__}, value={entry1.created_at}")

# Test 2: Pass int - __post_init__ does NOT convert
entry2 = TestEntry(content="test", created_at=int(time.time()))
print(f"Test 2 (int): type={type(entry2.created_at).__name__}, value={entry2.created_at}")

# Test 3: Pass string - remains string
entry3 = TestEntry(content="test", created_at="1234567890")
print(f"Test 3 (str): type={type(entry3.created_at).__name__}, value={entry3.created_at}")
EOF
python /tmp/test_dataclass.py

Repository: chronicler-ai/chronicle

Length of output: 153


Ensure created_at field uses consistent types across creation and retrieval.

The code passes int(time.time()) for created_at at lines 487 and 693, but the base class MemoryEntry defines this field as Optional[str]. The __post_init__ method only converts None to a string, so passing an integer directly creates a type violation.

Additionally, the get_memory method (line 338) retrieves created_at without normalizing the type, risking mixed types (string or int) depending on when the memory was stored.

Verify the intended type and ensure consistency:

  1. Either convert int(time.time()) to str(int(time.time())) when creating instances
  2. Or update the field type hint to Optional[int | str] and normalize on retrieval
  3. Update __post_init__ or retrieval logic to handle both types consistently
🤖 Prompt for AI Agents
In
backends/advanced/src/advanced_omi_backend/services/memory/providers/chronicle.py
around lines 487 and 693 (and referencing retrieval at ~338), created_at is
being set with int(time.time()) while the base MemoryEntry defines created_at as
Optional[str], and get_memory does not normalize types; fix by choosing one
approach and implementing it consistently: either (A) convert timestamps to
strings when creating entries (use str(int(time.time()))) and ensure
__post_init__ still enforces a string, or (B) widen the type hint to
Optional[int | str], update __post_init__ to normalize both int and str into a
single canonical type, and modify get_memory to always return the canonical
type; apply the chosen change to all creation sites (lines 487 and 693), the
MemoryEntry type hint/__post_init__, and the retrieval path around line 338 so
created_at is consistent across create and read.

)
)

Expand Down Expand Up @@ -626,15 +682,15 @@ async def _apply_memory_actions(
if emb is None:
memory_logger.warning(f"Skipping ADD action due to missing embedding: {action_text}")
continue

memory_id = str(uuid.uuid4())
memory_entries.append(
MemoryEntry(
id=memory_id,
content=action_text,
metadata=base_metadata,
embedding=emb,
created_at=str(int(time.time())),
created_at=int(time.time()),
)
)
memory_logger.info(f"➕ Added new memory: {memory_id} - {action_text[:50]}...")
Expand Down
29 changes: 15 additions & 14 deletions tests/setup/test_env.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
# Test Environment Configuration
import os
from pathlib import Path
from dotenv import load_dotenv

# Load .env file from backends/advanced directory if it exists
# This allows tests to work when run from VSCode or command line
def load_env_file():
"""Load environment variables from .env file if it exists."""
# Look for .env in backends/advanced directory
env_file = Path(__file__).parent.parent.parent / "backends" / "advanced" / ".env"
if env_file.exists():
with open(env_file) as f:
for line in f:
line = line.strip()
if line and not line.startswith('#') and '=' in line:
key, value = line.split('=', 1)
# Only set if not already in environment (CI takes precedence)
if key not in os.environ:
os.environ[key] = value
# def load_env_file():
# """Load environment variables from .env file if it exists."""
# # Look for .env in backends/advanced directory
# env_file = Path(__file__).parent.parent.parent / "backends" / "advanced" / ".env"
# if env_file.exists():
# with open(env_file) as f:
# for line in f:
# line = line.strip()
# if line and not line.startswith('#') and '=' in line:
# key, value = line.split('=', 1)
# # Only set if not already in environment (CI takes precedence)
# if key not in os.environ:
# os.environ[key] = value

# Load .env file (CI environment variables take precedence)
load_env_file()
# load_env_file()

# Load .env from backends/advanced directory to get COMPOSE_PROJECT_NAME
backend_env_path = Path(__file__).resolve().parents[2] / "backends" / "advanced" / ".env"
Expand Down
Loading