Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified .coverage
Binary file not shown.
2,299 changes: 1,176 additions & 1,123 deletions coverage.xml

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "hatchling.build"

[project]
name = "slopometry"
version = "20260117-1"
version = "20260121-1"
description = "Opinionated code quality metrics for code agents and humans"
readme = "README.md"
requires-python = ">=3.13"
Expand Down
42 changes: 39 additions & 3 deletions src/slopometry/core/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,31 @@ def get_session_events(self, session_id: str) -> list[HookEvent]:
)
return events

def get_session_basic_info(self, session_id: str) -> tuple[datetime, int] | None:
"""Get minimal session info (start_time, total_events) without expensive computations.

Use this for operations that only need to verify a session exists and show basic info,
like cleanup confirmations.

Returns:
Tuple of (start_time, total_events) or None if session not found.
"""
with self._get_db_connection() as conn:
conn.row_factory = sqlite3.Row
row = conn.execute(
"""
SELECT MIN(timestamp) as start_time, COUNT(*) as total_events
FROM hook_events
WHERE session_id = ?
""",
(session_id,),
).fetchone()

if not row or row["total_events"] == 0:
return None

return datetime.fromisoformat(row["start_time"]), row["total_events"]

def get_session_statistics(self, session_id: str) -> SessionStatistics | None:
"""Calculate statistics for a session using optimized SQL aggregations.

Expand Down Expand Up @@ -1005,8 +1030,8 @@ def save_experiment_progress(self, progress: ExperimentProgress) -> None:
INSERT INTO experiment_progress (
experiment_id, timestamp, current_metrics, target_metrics,
cli_score, complexity_score, halstead_score, maintainability_score,
qpe_score, smell_penalty, effort_tier
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
qpe_score, smell_penalty
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
progress.experiment_id,
Expand All @@ -1019,7 +1044,6 @@ def save_experiment_progress(self, progress: ExperimentProgress) -> None:
progress.maintainability_score,
progress.qpe_score,
progress.smell_penalty,
progress.effort_tier.value if progress.effort_tier else None,
),
)
conn.commit()
Expand Down Expand Up @@ -1797,6 +1821,18 @@ def get_project_history(self, project_path: str) -> list[LeaderboardEntry]:
for row in rows
]

def clear_leaderboard(self) -> int:
"""Clear all leaderboard entries.

Returns:
Number of entries deleted
"""
with self._get_db_connection() as conn:
cursor = conn.execute("SELECT COUNT(*) FROM qpe_leaderboard")
count = cursor.fetchone()[0]
conn.execute("DELETE FROM qpe_leaderboard")
return count


class SessionManager:
"""Manages sequence numbering for Claude Code sessions."""
Expand Down
11 changes: 7 additions & 4 deletions src/slopometry/core/git_tracker.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Git state tracking for Claude Code sessions."""

import logging
import shutil
import subprocess
import tarfile
Expand All @@ -10,6 +11,8 @@

from slopometry.core.models import GitState

logger = logging.getLogger(__name__)


class GitOperationError(Exception):
"""Raised when a git operation fails unexpectedly.
Expand Down Expand Up @@ -140,8 +143,8 @@ def _get_current_commit_sha(self) -> str | None:
if result.returncode == 0:
return result.stdout.strip()

except (subprocess.TimeoutExpired, subprocess.SubprocessError, OSError):
pass
except (subprocess.TimeoutExpired, subprocess.SubprocessError, OSError) as e:
logger.debug(f"Failed to get current commit SHA: {e}")

return None

Expand Down Expand Up @@ -175,8 +178,8 @@ def get_python_files_from_commit(self, commit_ref: str = "HEAD~1") -> list[str]:
python_files = [f for f in all_files if f.endswith(".py")]
return python_files

except (subprocess.TimeoutExpired, subprocess.SubprocessError, OSError):
pass
except (subprocess.TimeoutExpired, subprocess.SubprocessError, OSError) as e:
logger.debug(f"Failed to get Python files from commit {commit_ref}: {e}")

return []

Expand Down
9 changes: 5 additions & 4 deletions src/slopometry/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1334,9 +1334,10 @@ class CrossProjectComparison(BaseModel):


class LeaderboardEntry(BaseModel):
"""A persistent record of a project's QPE score at a specific commit.
"""A persistent record of a project's quality score at a specific commit.

Used for tracking QPE scores over time and comparing projects.
Used for cross-project quality comparison. Stores absolute quality (qpe_absolute)
rather than effort-normalized QPE, since effort varies between projects.
"""

id: int | None = Field(default=None, description="Database ID")
Expand All @@ -1345,10 +1346,10 @@ class LeaderboardEntry(BaseModel):
commit_sha_short: str = Field(description="7-character short git hash")
commit_sha_full: str = Field(description="Full git hash for deduplication")
measured_at: datetime = Field(default_factory=datetime.now, description="Date of the analyzed commit")
qpe_score: float = Field(description="Quality-per-effort score")
qpe_score: float = Field(description="Absolute quality score (qpe_absolute) for cross-project comparison")
mi_normalized: float = Field(description="Maintainability Index normalized to 0-1")
smell_penalty: float = Field(description="Penalty from code smells")
adjusted_quality: float = Field(description="MI after smell penalty applied")
adjusted_quality: float = Field(description="MI × (1 - smell_penalty) + bonuses")
effort_factor: float = Field(description="log(total_halstead_effort + 1)")
total_effort: float = Field(description="Total Halstead Effort")
metrics_json: str = Field(description="Full ExtendedComplexityMetrics as JSON")
Expand Down
61 changes: 60 additions & 1 deletion src/slopometry/core/project_guard.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,30 @@
"""Guard against running analysis in directories with multiple projects."""

import logging
import os
import subprocess
from pathlib import Path

logger = logging.getLogger(__name__)

BLOCKED_DIRECTORIES = {
Path.home(),
Path("/"),
Path("/usr"),
Path("/opt"),
Path("/var"),
Path("/tmp"),
}


class UnsafeDirectoryError(Exception):
"""Raised when analysis is attempted in a blocked directory like home."""

def __init__(self, directory: Path, reason: str):
self.directory = directory
self.reason = reason
super().__init__(f"Refusing to analyze {directory}: {reason}")


class MultiProjectError(Exception):
"""Raised when analysis is attempted in a directory with multiple projects."""
Expand Down Expand Up @@ -86,16 +105,56 @@ def scan_dir(path: Path, depth: int) -> None:
return projects


def _is_blocked_directory(path: Path) -> str | None:
"""Check if path is a blocked directory.

Returns:
Reason string if blocked, None if allowed
"""
resolved = path.resolve()

for blocked in BLOCKED_DIRECTORIES:
try:
if resolved == blocked.resolve():
return f"'{resolved}' is a system/home directory"
except (OSError, ValueError):
continue

xdg_data = os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share")
xdg_cache = os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")
xdg_config = os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config")

sensitive_paths = [
Path(xdg_data),
Path(xdg_cache),
Path(xdg_config),
Path.home() / ".local",
]

for sensitive in sensitive_paths:
try:
if resolved == sensitive.resolve():
return f"'{resolved}' is a user data/cache directory"
except (OSError, ValueError):
continue

return None


def guard_single_project(root: Path, max_depth: int = 2) -> None:
"""Raise MultiProjectError if directory contains multiple projects.
"""Raise error if directory is unsafe or contains multiple projects.

Args:
root: Directory to check
max_depth: Maximum directory depth to search

Raises:
UnsafeDirectoryError: If directory is blocked (home, /, etc.)
MultiProjectError: If multiple git repositories found
"""
if reason := _is_blocked_directory(root):
raise UnsafeDirectoryError(root, reason)

projects = detect_multi_project_directory(root, max_depth=max_depth, max_projects=1)

if len(projects) > 1:
Expand Down
11 changes: 7 additions & 4 deletions src/slopometry/core/project_tracker.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
"""Project identification logic."""

import logging
import subprocess
from pathlib import Path

import toml

from slopometry.core.models import Project, ProjectSource

logger = logging.getLogger(__name__)


class ProjectTracker:
"""Determines the project based on git, pyproject.toml, or working directory."""
Expand Down Expand Up @@ -51,8 +54,8 @@ def _get_project_from_git(self) -> Project | None:
)
if result.returncode == 0 and result.stdout.strip():
return Project(name=result.stdout.strip(), source=ProjectSource.GIT)
except (subprocess.TimeoutExpired, subprocess.SubprocessError, FileNotFoundError):
pass
except (subprocess.TimeoutExpired, subprocess.SubprocessError, FileNotFoundError) as e:
logger.error(f"Git operation failed in {self.working_dir}: {e}")

return None

Expand All @@ -67,7 +70,7 @@ def _get_project_from_pyproject(self) -> Project | None:
project_name = data.get("project", {}).get("name")
if project_name and isinstance(project_name, str):
return Project(name=project_name, source=ProjectSource.PYPROJECT)
except (toml.TomlDecodeError, OSError, KeyError, TypeError):
pass
except (toml.TomlDecodeError, OSError, KeyError, TypeError) as e:
logger.error(f"Failed to parse pyproject.toml at {pyproject_path}: {e}")

return None
Loading