Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions backend/app/api/ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ async def ingest_log(log: LogCreate, team: TeamFromApiKey):
message=log.message,
metadata=log.metadata,
source=log.source,
user_id=log.user_id,
)

return {"status": "ok"}
Expand Down Expand Up @@ -55,6 +56,7 @@ async def ingest_logs_batch(batch: LogBatchCreate, team: TeamFromApiKey):
message=log.message,
metadata=log.metadata,
source=log.source,
user_id=log.user_id,
)
for log in batch.logs
]
Expand Down
56 changes: 53 additions & 3 deletions backend/app/api/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
from typing import Annotated
from datetime import datetime
from fastapi import APIRouter, Query, Depends, HTTPException, status, Request
from app.models import Log, LogLevel
from app.schemas import LogResponse
from tortoise import connections
from app.models import Log, LogLevel, TeamRole
from app.schemas import LogResponse, UserIdBackfillRequest, UserIdBackfillResponse
from app.api.deps import get_team_member, CurrentUser

router = APIRouter()
Expand All @@ -17,6 +18,7 @@ async def search_logs(
q: str | None = None,
level: Annotated[list[LogLevel] | None, Query()] = None,
source: str | None = None,
user_id: str | None = None,
from_time: datetime | None = Query(None, alias="from"),
to_time: datetime | None = Query(None, alias="to"),
page: int = Query(1, ge=1),
Expand All @@ -28,6 +30,7 @@ async def search_logs(
- **q**: Full-text search on message
- **level**: Filter by log levels (can specify multiple)
- **source**: Filter by source
- **user_id**: Filter by user ID
- **from/to**: Date range filter
- **metadata.field=value**: Filter by JSON metadata fields

Expand Down Expand Up @@ -60,6 +63,10 @@ async def search_logs(
if source:
query = query.filter(source=source)

# User ID filter
if user_id:
query = query.filter(user_id=user_id)

# Date range
if from_time:
query = query.filter(timestamp__gte=from_time)
Expand Down Expand Up @@ -109,6 +116,7 @@ async def delete_logs(
request: Request,
level: Annotated[list[LogLevel] | None, Query()] = None,
source: str | None = None,
user_id: str | None = None,
from_time: datetime | None = Query(None, alias="from"),
to_time: datetime | None = Query(None, alias="to"),
):
Expand All @@ -119,7 +127,7 @@ async def delete_logs(
team, membership = await get_team_member(team_id, user)

# Require at least one filter
has_filter = any([level, source, from_time, to_time])
has_filter = any([level, source, user_id, from_time, to_time])
metadata_filters = [k for k in request.query_params.keys() if k.startswith("metadata.")]

if not has_filter and not metadata_filters:
Expand All @@ -134,6 +142,8 @@ async def delete_logs(
query = query.filter(level__in=level)
if source:
query = query.filter(source=source)
if user_id:
query = query.filter(user_id=user_id)
if from_time:
query = query.filter(timestamp__gte=from_time)
if to_time:
Expand All @@ -147,3 +157,43 @@ async def delete_logs(
deleted = await query.delete()

return {"deleted": deleted}


@router.post("/{team_id}/logs/backfill-user-id", response_model=UserIdBackfillResponse)
async def backfill_user_id(
team_id: UUID,
user: CurrentUser,
body: UserIdBackfillRequest,
):
"""
Backfill the user_id column from a metadata key.

- **metadata_key**: The key in the metadata JSONB to copy into user_id
- **overwrite**: If false (default), only updates rows where user_id IS NULL
"""
team, membership = await get_team_member(team_id, user)

if membership.role not in (TeamRole.MEMBER, TeamRole.MANAGER):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Only members and managers can perform backfill operations",
)

conn = connections.get("default")

sql = """
UPDATE logs
SET user_id = metadata->>$1
WHERE team_id = $2
AND metadata ? $1
"""
if not body.overwrite:
sql += " AND user_id IS NULL"

result = await conn.execute_query(sql, [body.metadata_key, str(team.id)])
updated = result[0]

return UserIdBackfillResponse(
updated=updated,
message=f"Updated {updated} rows from metadata key '{body.metadata_key}'",
)
1 change: 1 addition & 0 deletions backend/app/models/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class Log(Model):
message = fields.TextField()
metadata = fields.JSONField(null=True) # JSONB in PostgreSQL
source = fields.CharField(max_length=255, null=True, index=True)
user_id = fields.CharField(max_length=255, null=True)
created_at = fields.DatetimeField(auto_now_add=True, index=True)

class Meta:
Expand Down
3 changes: 2 additions & 1 deletion backend/app/schemas/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from app.schemas.auth import Token, TokenPayload, LoginRequest, RefreshRequest
from app.schemas.user import UserCreate, UserUpdate, UserResponse
from app.schemas.team import TeamCreate, TeamUpdate, TeamResponse, TeamWithKey, MembershipCreate, MembershipResponse
from app.schemas.log import LogCreate, LogBatchCreate, LogResponse, LogSearchParams
from app.schemas.log import LogCreate, LogBatchCreate, LogResponse, LogSearchParams, UserIdBackfillRequest, UserIdBackfillResponse

__all__ = [
"Token", "TokenPayload", "LoginRequest", "RefreshRequest",
"UserCreate", "UserUpdate", "UserResponse",
"TeamCreate", "TeamUpdate", "TeamResponse", "TeamWithKey", "MembershipCreate", "MembershipResponse",
"LogCreate", "LogBatchCreate", "LogResponse", "LogSearchParams",
"UserIdBackfillRequest", "UserIdBackfillResponse",
]
13 changes: 13 additions & 0 deletions backend/app/schemas/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ class LogCreate(BaseModel):
message: str
metadata: dict[str, Any] | None = None
source: str | None = None
user_id: str | None = None
timestamp: datetime | None = None # If not provided, server time is used


Expand All @@ -25,6 +26,7 @@ class LogResponse(BaseModel):
message: str
metadata: dict[str, Any] | None
source: str | None
user_id: str | None
created_at: datetime

class Config:
Expand All @@ -35,8 +37,19 @@ class LogSearchParams(BaseModel):
q: str | None = None # Full-text search query
level: list[LogLevel] | None = None
source: str | None = None
user_id: str | None = None
from_time: datetime | None = None
to_time: datetime | None = None
page: int = Field(default=1, ge=1)
limit: int = Field(default=50, ge=1, le=1000)
# Metadata filters will be parsed from query params like metadata.field=value


class UserIdBackfillRequest(BaseModel):
metadata_key: str = Field(..., min_length=1, max_length=255)
overwrite: bool = False


class UserIdBackfillResponse(BaseModel):
updated: int
message: str
26 changes: 26 additions & 0 deletions backend/migrations/003_add_user_id_to_logs.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
-- Migration: Add user_id column to logs
-- Allows first-class filtering by user identifier instead of relying on metadata JSONB

DO $$
BEGIN
-- Add user_id column if it doesn't exist
IF NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'logs' AND column_name = 'user_id'
) THEN
ALTER TABLE logs ADD COLUMN user_id VARCHAR(255);
RAISE NOTICE 'Added user_id column to logs';
ELSE
RAISE NOTICE 'user_id column already exists, skipping';
END IF;

-- Add composite index for per-team user filtering
IF NOT EXISTS (
SELECT 1 FROM pg_indexes WHERE indexname = 'idx_logs_team_id_user_id'
) THEN
CREATE INDEX idx_logs_team_id_user_id ON logs (team_id, user_id);
RAISE NOTICE 'Created index idx_logs_team_id_user_id';
ELSE
RAISE NOTICE 'Index idx_logs_team_id_user_id already exists, skipping';
END IF;
END $$;
1 change: 1 addition & 0 deletions frontend/src/api/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ export interface Log {
message: string
metadata: Record<string, unknown> | null
source: string | null
user_id: string | null
created_at: string
}

Expand Down
Loading