Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 56 additions & 4 deletions .github/workflows/advanced-docker-compose-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ on:
- "extras/speaker-recognition/**"
- "extras/openmemory-mcp/**"
- ".github/workflows/advanced-docker-compose-build.yml"
tags:
- "v*"
release:
types: [ published ]


permissions:
contents: read
contents: write
packages: write
actions: read

Expand Down Expand Up @@ -122,12 +122,14 @@ jobs:
run: |
if [ -n "${{ github.event.inputs.version }}" ]; then
VERSION="${{ github.event.inputs.version }}"
elif [ "${{ github.event_name }}" = "release" ]; then
VERSION="${{ github.event.release.tag_name }}"
elif [[ "${GITHUB_REF}" == refs/tags/* ]]; then
VERSION="${GITHUB_REF#refs/tags/}"
else
VERSION="sha-${GITHUB_SHA::7}"
fi

echo "VERSION=$VERSION" >> "$GITHUB_OUTPUT"

- name: Build, tag, and push services sequentially with version
Expand Down Expand Up @@ -267,3 +269,53 @@ jobs:
echo "Built and pushed images with version tag: ${VERSION}"
echo "Images pushed to: $REGISTRY/$OWNER_LC/"
echo "::endgroup::"

- name: Update release notes with Docker images
if: github.event_name == 'release'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OWNER: ${{ github.repository_owner }}
VERSION: ${{ steps.version.outputs.VERSION }}
TAG_NAME: ${{ github.event.release.tag_name }}
run: |
set -euo pipefail
OWNER_LC=$(echo "$OWNER" | tr '[:upper:]' '[:lower:]')

DOCKER_SECTION=$(cat <<EOF

---

## Docker Images

All images are published to \`ghcr.io/${OWNER_LC}\` with tags \`${VERSION}\` and \`latest\`.

### Core Services
\`\`\`bash
docker pull ghcr.io/${OWNER_LC}/advanced-chronicle-backend:${VERSION}
docker pull ghcr.io/${OWNER_LC}/advanced-workers:${VERSION}
docker pull ghcr.io/${OWNER_LC}/advanced-webui:${VERSION}
docker pull ghcr.io/${OWNER_LC}/openmemory-mcp:${VERSION}
\`\`\`

### Parakeet ASR (pick your CUDA version)
\`\`\`bash
docker pull ghcr.io/${OWNER_LC}/parakeet-asr-cu121:${VERSION}
docker pull ghcr.io/${OWNER_LC}/parakeet-asr-cu126:${VERSION}
docker pull ghcr.io/${OWNER_LC}/parakeet-asr-cu128:${VERSION}
\`\`\`

### Speaker Recognition (pick your variant)
\`\`\`bash
docker pull ghcr.io/${OWNER_LC}/speaker-recognition-cpu:${VERSION}
docker pull ghcr.io/${OWNER_LC}/speaker-recognition-cu121:${VERSION}
docker pull ghcr.io/${OWNER_LC}/speaker-recognition-cu126:${VERSION}
docker pull ghcr.io/${OWNER_LC}/speaker-recognition-cu128:${VERSION}
\`\`\`
EOF
)

EXISTING_BODY=$(gh release view "$TAG_NAME" --json body -q '.body' --repo "$GITHUB_REPOSITORY")
UPDATED_BODY="${EXISTING_BODY}${DOCKER_SECTION}"
gh release edit "$TAG_NAME" --notes "$UPDATED_BODY" --repo "$GITHUB_REPOSITORY"
echo "Release notes updated with Docker image info"
working-directory: .
3 changes: 0 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +0,0 @@
[submodule "extras/mycelia"]
path = extras/mycelia
url = https://github.com/mycelia-tech/mycelia
52 changes: 1 addition & 51 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export $(shell sed 's/=.*//' config.env | grep -v '^\s*$$' | grep -v '^\s*\#')
SCRIPTS_DIR := scripts
K8S_SCRIPTS_DIR := $(SCRIPTS_DIR)/k8s

.PHONY: help menu setup-k8s setup-infrastructure setup-rbac setup-storage-pvc config config-docker config-k8s config-all clean deploy deploy-docker deploy-k8s deploy-k8s-full deploy-infrastructure deploy-apps check-infrastructure check-apps build-backend up-backend down-backend k8s-status k8s-cleanup k8s-purge audio-manage mycelia-sync-status mycelia-sync-all mycelia-sync-user mycelia-check-orphans mycelia-reassign-orphans test-robot test-robot-integration test-robot-unit test-robot-endpoints test-robot-specific test-robot-clean
.PHONY: help menu setup-k8s setup-infrastructure setup-rbac setup-storage-pvc config config-docker config-k8s config-all clean deploy deploy-docker deploy-k8s deploy-k8s-full deploy-infrastructure deploy-apps check-infrastructure check-apps build-backend up-backend down-backend k8s-status k8s-cleanup k8s-purge audio-manage test-robot test-robot-integration test-robot-unit test-robot-endpoints test-robot-specific test-robot-clean

# Default target
.DEFAULT_GOAL := menu
Expand Down Expand Up @@ -57,13 +57,6 @@ menu: ## Show interactive menu (default)
@echo " check-apps 🔍 Check application services"
@echo " clean 🧹 Clean up generated files"
@echo
@echo "🔄 Mycelia Sync:"
@echo " mycelia-sync-status 📊 Show Mycelia OAuth sync status"
@echo " mycelia-sync-all 🔄 Sync all Chronicle users to Mycelia"
@echo " mycelia-sync-user 👤 Sync specific user (EMAIL=user@example.com)"
@echo " mycelia-check-orphans 🔍 Find orphaned Mycelia objects"
@echo " mycelia-reassign-orphans ♻️ Reassign orphans (EMAIL=admin@example.com)"
@echo
@echo "Current configuration:"
@echo " DOMAIN: $(DOMAIN)"
@echo " DEPLOYMENT_MODE: $(DEPLOYMENT_MODE)"
Expand Down Expand Up @@ -108,13 +101,6 @@ help: ## Show detailed help for all targets
@echo "🎵 AUDIO MANAGEMENT:"
@echo " audio-manage Interactive audio file management"
@echo
@echo "🔄 MYCELIA SYNC:"
@echo " mycelia-sync-status Show Mycelia OAuth sync status for all users"
@echo " mycelia-sync-all Sync all Chronicle users to Mycelia OAuth"
@echo " mycelia-sync-user Sync specific user (EMAIL=user@example.com)"
@echo " mycelia-check-orphans Find Mycelia objects without Chronicle owner"
@echo " mycelia-reassign-orphans Reassign orphaned objects (EMAIL=admin@example.com)"
@echo
@echo "🧪 ROBOT FRAMEWORK TESTING:"
@echo " test-robot Run all Robot Framework tests"
@echo " test-robot-integration Run integration tests only"
Expand Down Expand Up @@ -347,42 +333,6 @@ audio-manage: ## Interactive audio file management
@echo "🎵 Starting audio file management..."
@$(SCRIPTS_DIR)/manage-audio-files.sh

# ========================================
# MYCELIA SYNC
# ========================================

mycelia-sync-status: ## Show Mycelia OAuth sync status for all users
@echo "📊 Checking Mycelia OAuth sync status..."
@cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --status

mycelia-sync-all: ## Sync all Chronicle users to Mycelia OAuth
@echo "🔄 Syncing all Chronicle users to Mycelia OAuth..."
@echo "⚠️ This will create OAuth credentials for users without them"
@read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1
@cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --sync-all

mycelia-sync-user: ## Sync specific user to Mycelia OAuth (usage: make mycelia-sync-user EMAIL=user@example.com)
@echo "👤 Syncing specific user to Mycelia OAuth..."
@if [ -z "$(EMAIL)" ]; then \
echo "❌ EMAIL parameter is required. Usage: make mycelia-sync-user EMAIL=user@example.com"; \
exit 1; \
fi
@cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --email $(EMAIL)

mycelia-check-orphans: ## Find Mycelia objects without Chronicle owner
@echo "🔍 Checking for orphaned Mycelia objects..."
@cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --check-orphans

mycelia-reassign-orphans: ## Reassign orphaned objects to user (usage: make mycelia-reassign-orphans EMAIL=admin@example.com)
@echo "♻️ Reassigning orphaned Mycelia objects..."
@if [ -z "$(EMAIL)" ]; then \
echo "❌ EMAIL parameter is required. Usage: make mycelia-reassign-orphans EMAIL=admin@example.com"; \
exit 1; \
fi
@echo "⚠️ This will reassign all orphaned objects to: $(EMAIL)"
@read -p "Continue? (y/N): " confirm && [ "$$confirm" = "y" ] || exit 1
@cd backends/advanced && uv run python scripts/sync_chronicle_mycelia.py --reassign-orphans --target-email $(EMAIL)

# ========================================
# TESTING TARGETS
# ========================================
Expand Down
76 changes: 52 additions & 24 deletions backends/advanced/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,40 +1,53 @@
# ============================================
# Base stage - common setup
# Builder stage - install dependencies
# ============================================
FROM python:3.12-slim-bookworm AS base
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS builder

# Install system dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends \
# Install system dependencies needed for building
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
libsndfile1 \
git \
curl \
ffmpeg \
&& rm -rf /var/lib/apt/lists/*

# Install uv
COPY --from=ghcr.io/astral-sh/uv:0.6.10 /uv /uvx /bin/
ENV UV_COMPILE_BYTECODE=1
ENV UV_LINK_MODE=copy

# Set up working directory
WORKDIR /app

# Copy package structure and dependency files
COPY pyproject.toml README.md ./
COPY uv.lock .
RUN mkdir -p src/advanced_omi_backend
COPY src/advanced_omi_backend/__init__.py src/advanced_omi_backend/
# Copy dependency files first (cache-friendly)
COPY pyproject.toml uv.lock ./

# Export locked deps to requirements.txt (handles extras, git sources, custom indexes)
# Install to system Python (no venv) - container IS the isolation
RUN --mount=type=cache,target=/root/.cache/uv \
uv export --frozen --no-dev --extra deepgram --no-emit-project -o requirements.txt && \
uv pip install --system -r requirements.txt


# ============================================
# Production stage - production dependencies only
# Production stage
# ============================================
FROM base AS prod
FROM python:3.12-slim-bookworm AS prod

RUN apt-get update && apt-get install -y --no-install-recommends \
libsndfile1 \
curl \
ffmpeg \
&& rm -rf /var/lib/apt/lists/*

WORKDIR /app

# Install production dependencies only
RUN uv sync --extra deepgram
# Copy installed packages from builder
COPY --from=builder /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin

# Copy all application code
# Source layout needs PYTHONPATH
ENV PYTHONPATH=/app/src

# Copy application code
COPY . .

# Copy configuration files if they exist
Expand All @@ -44,19 +57,35 @@ COPY diarization_config.json* ./
COPY start.sh ./
RUN chmod +x start.sh

# Run the application
CMD ["./start.sh"]


# ============================================
# Dev/Test stage - includes test dependencies
# ============================================
FROM base AS dev
FROM python:3.12-slim-bookworm AS dev

RUN apt-get update && apt-get install -y --no-install-recommends \
libsndfile1 \
curl \
ffmpeg \
build-essential \
git \
&& rm -rf /var/lib/apt/lists/*

WORKDIR /app

# For dev, install deps + test group using uv temporarily
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
COPY pyproject.toml uv.lock ./
RUN --mount=type=cache,target=/root/.cache/uv \
uv export --frozen --extra deepgram --group test --no-emit-project -o requirements.txt && \
uv pip install --system -r requirements.txt && \
rm /bin/uv /bin/uvx

# Install production + test dependencies
RUN uv sync --extra deepgram --group test
ENV PYTHONPATH=/app/src

# Copy all application code
# Copy application code
COPY . .

# Copy configuration files if they exist
Expand All @@ -66,5 +95,4 @@ COPY diarization_config.json* ./
COPY start.sh ./
RUN chmod +x start.sh

# Run the application
CMD ["./start.sh"]
20 changes: 10 additions & 10 deletions backends/advanced/cleanup.sh
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#!/bin/bash
# Wrapper script for cleanup_state.py
# Usage: ./cleanup.sh --backup --export-audio
# Chronicle Cleanup & Backup Tool
#
# This script runs the cleanup_state.py script inside the chronicle-backend container
# to handle data ownership and permissions correctly.
# This script runs cleanup_state.py inside the chronicle-backend container.
#
# Examples:
# ./cleanup.sh --dry-run # Preview what would be deleted
# ./cleanup.sh --backup # Cleanup with metadata backup
# ./cleanup.sh --backup --export-audio # Full backup including audio
# ./cleanup.sh --backup --force # Skip confirmation prompts
# Usage:
# ./cleanup.sh --dry-run Preview what would happen
# ./cleanup.sh --backup-only Back up everything (no cleanup)
# ./cleanup.sh --backup-only --export-audio Back up with audio WAV files
# ./cleanup.sh --backup Back up then clean
# ./cleanup.sh --backup --export-audio Back up with audio then clean
# ./cleanup.sh --backup --force Skip confirmation prompt

cd "$(dirname "$0")"
docker compose exec chronicle-backend uv run python src/scripts/cleanup_state.py "$@"
docker compose exec chronicle-backend python src/scripts/cleanup_state.py "$@"
Loading
Loading