Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@ DATABASE_URL="file:./prisma/dev.db"
# Optional: custom API base URL (proxy or local model server)
# ANTHROPIC_BASE_URL=

# ── Ollama (local LLMs, no API key needed) ──────────────────────────
# Install: https://ollama.com • Start: ollama serve • Pull model: ollama pull llama3.1
# OLLAMA_BASE_URL=http://localhost:11434

# ── Access control (optional) ────────────────────────────────────────

# Set BOTH to enable HTTP Basic Auth on the entire app.
Expand Down
24 changes: 17 additions & 7 deletions CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,26 @@ npx next dev

App runs at **http://localhost:3000**

## AI Authentication — No API Key Needed
## AI Providers

If the user is signed into Claude Code CLI, **Siftly uses their Claude subscription automatically**. No API key configuration required.
Siftly supports three AI providers — switch between them in Settings:

How it works:
- `lib/claude-cli-auth.ts` reads the OAuth token from the macOS keychain (`Claude Code-credentials`)
- Uses `authToken` + `anthropic-beta: oauth-2025-04-20` header in the Anthropic SDK
### Ollama (Local LLMs — Free, Private)
1. Install Ollama: https://ollama.com
2. Start the server: `ollama serve`
3. Pull a model: `ollama pull llama3.1`
4. In Siftly Settings, select **Ollama** and pick your model
5. No API key needed — everything runs locally

### Claude (Anthropic)
If signed into Claude Code CLI, **Siftly uses your Claude subscription automatically**. No API key needed.
- `lib/claude-cli-auth.ts` reads the OAuth token from the macOS keychain
- Falls back to: DB-saved API key → `ANTHROPIC_API_KEY` env var → local proxy

To verify it's working, hit: `GET /api/settings/cli-status`
### OpenAI
Set your OpenAI API key in Settings, or use Codex CLI auth.

To verify provider status: `GET /api/settings/cli-status`

## Key Commands

Expand Down Expand Up @@ -78,7 +88,7 @@ prisma/schema.prisma # SQLite schema (Bookmark, Category, MediaItem, Setting, I

- **Next.js 16** (App Router, TypeScript)
- **Prisma 7** + **SQLite** (local, zero setup, FTS5 built in)
- **Anthropic SDK** — vision, tagging, categorization, search
- **Anthropic SDK / OpenAI SDK / Ollama** — vision, tagging, categorization, search
- **@xyflow/react** — mindmap graph
- **Tailwind CSS v4**

Expand Down
4 changes: 2 additions & 2 deletions app/api/analyze/images/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
}

const provider = await getProvider()
const keyName = provider === 'openai' ? 'openaiApiKey' : 'anthropicApiKey'
const setting = await prisma.setting.findUnique({ where: { key: keyName } })
const keyName = provider === 'openai' ? 'openaiApiKey' : provider === 'ollama' ? null : 'anthropicApiKey'
const setting = keyName ? await prisma.setting.findUnique({ where: { key: keyName } }) : null
const dbKey = setting?.value?.trim()

let client: AIClient | null = null
Expand Down
22 changes: 13 additions & 9 deletions app/api/categorize/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,12 +111,15 @@ export async function POST(request: NextRequest): Promise<NextResponse> {

if (apiKey && typeof apiKey === 'string' && apiKey.trim() !== '') {
const currentProvider = await getProvider()
const keySlot = currentProvider === 'openai' ? 'openaiApiKey' : 'anthropicApiKey'
await prisma.setting.upsert({
where: { key: keySlot },
update: { value: apiKey.trim() },
create: { key: keySlot, value: apiKey.trim() },
})
// Ollama doesn't use API keys — skip saving
if (currentProvider !== 'ollama') {
const keySlot = currentProvider === 'openai' ? 'openaiApiKey' : 'anthropicApiKey'
await prisma.setting.upsert({
where: { key: keySlot },
update: { value: apiKey.trim() },
create: { key: keySlot, value: apiKey.trim() },
})
}
}

globalState.categorizationAbort = false
Expand Down Expand Up @@ -145,9 +148,10 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
})

const provider = await getProvider()
const keyName = provider === 'openai' ? 'openaiApiKey' : 'anthropicApiKey'
const dbApiKey =
(await prisma.setting.findUnique({ where: { key: keyName } }))?.value?.trim() || ''
const keyName = provider === 'openai' ? 'openaiApiKey' : provider === 'ollama' ? null : 'anthropicApiKey'
const dbApiKey = keyName
? ((await prisma.setting.findUnique({ where: { key: keyName } }))?.value?.trim() || '')
: '' // Ollama doesn't need an API key

void (async () => {
const counts = { visionTagged: 0, entitiesExtracted: 0, enriched: 0, categorized: 0 }
Expand Down
6 changes: 5 additions & 1 deletion app/api/search/ai/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ let _categoriesCacheExpiry = 0
async function getDbApiKey(): Promise<string> {
if (_apiKey !== null && Date.now() < _apiKeyExpiry) return _apiKey
const provider = await getProvider()
if (provider === 'ollama') { _apiKey = ''; _apiKeyExpiry = Date.now() + 60_000; return '' }
const keyName = provider === 'openai' ? 'openaiApiKey' : 'anthropicApiKey'
const setting = await prisma.setting.findUnique({ where: { key: keyName } })
const fromDb = setting?.value?.trim() ?? ''
Expand Down Expand Up @@ -350,9 +351,12 @@ Constraints:
: { matches: [], explanation: 'No results found.' }
}

// Ollama and other providers: skip CLI path for Ollama (it uses SDK directly)
// Try CLI first (works with ChatGPT OAuth), then fall back to SDK
let cliSucceeded = false
if (provider === 'openai' && await getCodexCliAvailability()) {
if (provider === 'ollama') {
// Ollama always uses the SDK path (OpenAI-compatible), skip CLI
} else if (provider === 'openai' && await getCodexCliAvailability()) {
try {
const result = await codexPrompt(prompt, { timeoutMs: 90_000 })
if (result.success && result.data) {
Expand Down
25 changes: 24 additions & 1 deletion app/api/settings/cli-status/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { NextResponse } from 'next/server'
import prisma from '@/lib/db'
import { getCliAuthStatus, getCliAvailability } from '@/lib/claude-cli-auth'
import { getCodexCliAuthStatus } from '@/lib/openai-auth'
import { getOllamaBaseUrl } from '@/lib/settings'

export async function GET(): Promise<NextResponse> {
const oauthStatus = getCliAuthStatus()
Expand All @@ -10,18 +11,40 @@ export async function GET(): Promise<NextResponse> {
// Read provider directly from DB (not cached) — this endpoint is called
// right after the user toggles the provider, so it must be fresh.
const providerSetting = await prisma.setting.findUnique({ where: { key: 'aiProvider' } })
const provider = providerSetting?.value === 'openai' ? 'openai' : 'anthropic'
const val = providerSetting?.value
const provider = val === 'openai' ? 'openai' : val === 'ollama' ? 'ollama' : 'anthropic'

// Only check CLI subprocess availability if OAuth credentials exist
const cliDirectAvailable = oauthStatus.available && !oauthStatus.expired
? await getCliAvailability()
: false

// Check Ollama availability by hitting its API
let ollamaStatus: { available: boolean; error?: string } = { available: false }
if (provider === 'ollama') {
try {
const baseUrl = await getOllamaBaseUrl()
const res = await fetch(`${baseUrl}/api/tags`, { signal: AbortSignal.timeout(3000) })
if (res.ok) {
const data = await res.json() as { models?: { name: string }[] }
ollamaStatus = { available: true }
if (data.models) {
(ollamaStatus as { available: boolean; models?: string[] }).models = data.models.map(m => m.name)
}
} else {
ollamaStatus = { available: false, error: `HTTP ${res.status}` }
}
} catch (err) {
ollamaStatus = { available: false, error: err instanceof Error ? err.message : 'Connection failed' }
}
}

return NextResponse.json({
...oauthStatus,
cliDirectAvailable,
mode: cliDirectAvailable ? 'cli' : oauthStatus.available ? 'oauth' : 'api-key',
codex: codexStatus,
ollama: ollamaStatus,
provider,
})
}
40 changes: 37 additions & 3 deletions app/api/settings/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,14 @@ const ALLOWED_OPENAI_MODELS = [

export async function GET(): Promise<NextResponse> {
try {
const [anthropic, anthropicModel, provider, openai, openaiModel, xClientId, xClientSecret] = await Promise.all([
const [anthropic, anthropicModel, provider, openai, openaiModel, ollamaModel, ollamaBaseUrl, xClientId, xClientSecret] = await Promise.all([
prisma.setting.findUnique({ where: { key: 'anthropicApiKey' } }),
prisma.setting.findUnique({ where: { key: 'anthropicModel' } }),
prisma.setting.findUnique({ where: { key: 'aiProvider' } }),
prisma.setting.findUnique({ where: { key: 'openaiApiKey' } }),
prisma.setting.findUnique({ where: { key: 'openaiModel' } }),
prisma.setting.findUnique({ where: { key: 'ollamaModel' } }),
prisma.setting.findUnique({ where: { key: 'ollamaBaseUrl' } }),
prisma.setting.findUnique({ where: { key: 'x_oauth_client_id' } }),
prisma.setting.findUnique({ where: { key: 'x_oauth_client_secret' } }),
])
Expand All @@ -42,6 +44,8 @@ export async function GET(): Promise<NextResponse> {
openaiApiKey: maskKey(openai?.value ?? null),
hasOpenaiKey: openai !== null,
openaiModel: openaiModel?.value ?? 'gpt-4.1-mini',
ollamaModel: ollamaModel?.value ?? 'llama3.1',
ollamaBaseUrl: ollamaBaseUrl?.value ?? 'http://localhost:11434',
xOAuthClientId: maskKey(xClientId?.value ?? null),
xOAuthClientSecret: maskKey(xClientSecret?.value ?? null),
hasXOAuth: !!xClientId?.value,
Expand All @@ -62,6 +66,8 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
provider?: string
openaiApiKey?: string
openaiModel?: string
ollamaModel?: string
ollamaBaseUrl?: string
xOAuthClientId?: string
xOAuthClientSecret?: string
} = {}
Expand All @@ -71,11 +77,11 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
}

const { anthropicApiKey, anthropicModel, provider, openaiApiKey, openaiModel } = body
const { anthropicApiKey, anthropicModel, provider, openaiApiKey, openaiModel, ollamaModel, ollamaBaseUrl } = body

// Save provider if provided
if (provider !== undefined) {
if (provider !== 'anthropic' && provider !== 'openai') {
if (provider !== 'anthropic' && provider !== 'openai' && provider !== 'ollama') {
return NextResponse.json({ error: 'Invalid provider' }, { status: 400 })
}
await prisma.setting.upsert({
Expand Down Expand Up @@ -115,6 +121,34 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
return NextResponse.json({ saved: true })
}

// Save Ollama model if provided (free-form — user can type any model name)
if (ollamaModel !== undefined) {
if (typeof ollamaModel !== 'string' || ollamaModel.trim() === '') {
return NextResponse.json({ error: 'Invalid Ollama model' }, { status: 400 })
}
await prisma.setting.upsert({
where: { key: 'ollamaModel' },
update: { value: ollamaModel.trim() },
create: { key: 'ollamaModel', value: ollamaModel.trim() },
})
invalidateSettingsCache()
return NextResponse.json({ saved: true })
}

// Save Ollama base URL if provided
if (ollamaBaseUrl !== undefined) {
if (typeof ollamaBaseUrl !== 'string' || ollamaBaseUrl.trim() === '') {
return NextResponse.json({ error: 'Invalid Ollama base URL' }, { status: 400 })
}
await prisma.setting.upsert({
where: { key: 'ollamaBaseUrl' },
update: { value: ollamaBaseUrl.trim() },
create: { key: 'ollamaBaseUrl', value: ollamaBaseUrl.trim() },
})
invalidateSettingsCache()
return NextResponse.json({ saved: true })
}

// Save Anthropic key if provided
if (anthropicApiKey !== undefined) {
if (typeof anthropicApiKey !== 'string' || anthropicApiKey.trim() === '') {
Expand Down
25 changes: 25 additions & 0 deletions app/api/settings/test/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,5 +76,30 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
}
}

if (provider === 'ollama') {
try {
const { resolveOllamaClient } = await import('@/lib/ai-client')
const { getOllamaBaseUrl, getOllamaModel } = await import('@/lib/settings')
const baseUrl = await getOllamaBaseUrl()
const model = await getOllamaModel()
const client = await resolveOllamaClient(baseUrl)

await client.chat.completions.create({
model,
max_tokens: 5,
messages: [{ role: 'user', content: 'hi' }],
})
return NextResponse.json({ working: true })
} catch (err) {
const msg = err instanceof Error ? err.message : String(err)
const friendly = msg.includes('ECONNREFUSED')
? 'Cannot connect to Ollama — is it running? (ollama serve)'
: msg.includes('model')
? `Model not found — run: ollama pull <model-name>`
: msg.slice(0, 120)
return NextResponse.json({ working: false, error: friendly })
}
}

return NextResponse.json({ error: 'Unknown provider' }, { status: 400 })
}
Loading