Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
6554a94
feat: log patterns migrations
dawkaka Jan 22, 2026
19e5557
log pattern model types
dawkaka Jan 22, 2026
c729678
feat: complete log patterns partition
dawkaka Jan 22, 2026
598d185
Auto-format code with fourmolu
github-actions[bot] Jan 22, 2026
5e03ab0
ui for log patterns
dawkaka Jan 22, 2026
d983b09
Auto-format code with fourmolu
github-actions[bot] Jan 22, 2026
fc40a76
fix drain tests
dawkaka Jan 22, 2026
b55e22f
fix hlint errors
dawkaka Jan 22, 2026
282bfb8
use select where
dawkaka Jan 22, 2026
c5f3125
fetch log ids at once to avoid N+1 queries
dawkaka Jan 22, 2026
f2abf87
Auto-format code with fourmolu
github-actions[bot] Jan 22, 2026
83e8e1f
feat: use text quasiquotes
dawkaka Jan 22, 2026
642b376
add issue columns
dawkaka Jan 22, 2026
07f5f18
claude review fixes
dawkaka Jan 22, 2026
684e41e
Auto-format code with fourmolu
github-actions[bot] Jan 22, 2026
a254f11
Remove focus outline from AI search input
tonyalaribe Jan 22, 2026
bf14999
chore: multiway if
dawkaka Jan 23, 2026
127bdbe
Auto-format code with fourmolu
github-actions[bot] Jan 23, 2026
5bbf760
auto set basleine to established after 24 hours
dawkaka Jan 23, 2026
2a04530
updaqte log patterns
dawkaka Jan 24, 2026
2733507
Auto-format code with fourmolu
github-actions[bot] Jan 24, 2026
eabc206
fetch log pattern stats without joining tables
dawkaka Jan 27, 2026
e0566a6
Auto-format code with fourmolu
github-actions[bot] Jan 27, 2026
bd731dc
use text neat interpolation
dawkaka Jan 27, 2026
6c3b074
remove skip
dawkaka Jan 27, 2026
f9f6cbd
improve drain algorithm tokenization
dawkaka Jan 28, 2026
8d48445
add field path to log patterns table
dawkaka Jan 28, 2026
3d22265
add body field path
dawkaka Jan 28, 2026
3e42aef
add field path to drain
dawkaka Jan 28, 2026
d819123
Auto-format code with fourmolu
github-actions[bot] Jan 28, 2026
30873f9
drain improvements
dawkaka Jan 29, 2026
8de17bf
Auto-format code with fourmolu
github-actions[bot] Jan 29, 2026
110a345
chore: fix hlint
dawkaka Jan 29, 2026
76156c2
Auto-format code with fourmolu
github-actions[bot] Jan 29, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions monoscope.cabal
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ library
Models.Apis.RequestDumps
Models.Apis.Shapes
Models.Apis.Slack
Models.Apis.LogPatterns
Models.Projects.Dashboards
Models.Projects.GitSync
Models.Projects.ProjectApiKeys
Expand Down
181 changes: 149 additions & 32 deletions src/BackgroundJobs.hs

Large diffs are not rendered by default.

215 changes: 168 additions & 47 deletions src/Models/Apis/Issues.hs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ module Models.Apis.Issues (
APIChangeData (..),
RuntimeExceptionData (..),
QueryAlertData (..),
LogPatternRateChangeData (..),
LogPatternData (..),

-- * Database Operations
insertIssue,
Expand All @@ -44,6 +46,8 @@ module Models.Apis.Issues (
issueIdText,
parseIssueType,
issueTypeToText,
createLogPatternIssue,
createLogPatternRateChangeIssue,

-- * AI Conversations
AIConversation (..),
Expand Down Expand Up @@ -81,6 +85,7 @@ import Effectful (Eff)
import Effectful.PostgreSQL qualified as PG
import Models.Apis.Anomalies (PayloadChange)
import Models.Apis.Anomalies qualified as Anomalies
import Models.Apis.LogPatterns qualified as LogPatterns
import Models.Apis.RequestDumps qualified as RequestDumps
import Models.Projects.Projects qualified as Projects
import Models.Users.Users qualified as Users
Expand All @@ -104,6 +109,8 @@ data IssueType
= APIChange
| RuntimeException
| QueryAlert
| LogPattern
| LogPatternRateChange
deriving stock (Eq, Generic, Show)
deriving anyclass (NFData)
deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.ConstructorTagModifier '[DAE.CamelToSnake]] IssueType
Expand All @@ -117,13 +124,17 @@ issueTypeToText :: IssueType -> Text
issueTypeToText APIChange = "api_change" -- Maps to anomaly_type 'shape' in DB
issueTypeToText RuntimeException = "runtime_exception"
issueTypeToText QueryAlert = "query_alert"
issueTypeToText LogPattern = "log_pattern"
issueTypeToText LogPatternRateChange = "log_pattern_rate_change"


parseIssueType :: Text -> Maybe IssueType
parseIssueType "api_change" = Just APIChange
parseIssueType "shape" = Just APIChange -- Handle DB anomaly_type
parseIssueType "runtime_exception" = Just RuntimeException
parseIssueType "query_alert" = Just QueryAlert
parseIssueType "log_pattern" = Just LogPattern
parseIssueType "log_pattern_rate_change" = Just LogPatternRateChange
parseIssueType _ = Nothing


Expand Down Expand Up @@ -197,30 +208,23 @@ data Issue = Issue
, updatedAt :: ZonedTime
, projectId :: Projects.ProjectId
, issueType :: IssueType
, endpointHash :: Text -- For API changes, empty for others
-- Status fields
, sourceType :: Text
, targetHash :: Text
, endpointHash :: Text
, acknowledgedAt :: Maybe ZonedTime
, acknowledgedBy :: Maybe Users.UserId
, archivedAt :: Maybe ZonedTime
, -- Issue details
title :: Text
, service :: Text
, title :: Text
, service :: Maybe Text
, environment :: Maybe Text
, critical :: Bool
, severity :: Text -- "critical", "warning", "info"
-- Impact metrics
, affectedRequests :: Int
, affectedClients :: Int
, errorRate :: Maybe Double
, -- Actions
recommendedAction :: Text
, recommendedAction :: Text
, migrationComplexity :: Text -- "low", "medium", "high", "n/a"
-- Data payload (polymorphic based on issueType)
, issueData :: Aeson AE.Value
, -- Payload changes tracking (for API changes)
requestPayloads :: Aeson [PayloadChange]
, requestPayloads :: Aeson [PayloadChange]
, responsePayloads :: Aeson [PayloadChange]
, -- LLM enhancement tracking
llmEnhancedAt :: Maybe UTCTime
, llmEnhancedAt :: Maybe UTCTime
, llmEnhancementVersion :: Maybe Int
}
deriving stock (Generic, Show)
Expand All @@ -235,18 +239,18 @@ instance Default Issue where
, createdAt = error "createdAt must be set"
, updatedAt = error "updatedAt must be set"
, projectId = def
, issueType = def
, issueType = error "issueType must be set"
, sourceType = ""
, targetHash = ""
, endpointHash = ""
, acknowledgedAt = Nothing
, acknowledgedBy = Nothing
, archivedAt = Nothing
, title = ""
, service = ""
, service = Nothing
, environment = Nothing
, critical = False
, severity = "info"
, affectedRequests = 0
, affectedClients = 0
, errorRate = Nothing
, recommendedAction = ""
, migrationComplexity = "low"
, issueData = Aeson AE.Null
Expand All @@ -269,7 +273,7 @@ data IssueL = IssueL
, acknowledgedBy :: Maybe Users.UserId
, archivedAt :: Maybe ZonedTime
, title :: Text
, service :: Text
, service :: Maybe Text
, critical :: Bool
, severity :: Text -- Computed in query
, affectedRequests :: Int -- Will be converted from affected_payloads in query
Expand All @@ -291,6 +295,9 @@ data IssueL = IssueL
deriving anyclass (FromRow, NFData)


-- | Insert a single issue
-- Note: ON CONFLICT only applies to api_change issues that are open (not acknowledged/archived)
-- Other issue types will fail on duplicate inserts as intended
-- | Insert a single issue
-- Note: ON CONFLICT only applies to api_change issues that are open (not acknowledged/archived)
-- Other issue types will fail on duplicate inserts as intended
Expand All @@ -300,24 +307,18 @@ insertIssue issue = void $ PG.execute q issue
q =
[sql|
INSERT INTO apis.issues (
id, created_at, updated_at, project_id, issue_type, endpoint_hash,
id, created_at, updated_at, project_id, issue_type, source_type, target_hash, endpoint_hash,
acknowledged_at, acknowledged_by, archived_at,
title, service, critical, severity,
affected_requests, affected_clients, error_rate,
title, service, environment, critical, severity,
recommended_action, migration_complexity,
issue_data, request_payloads, response_payloads,
llm_enhanced_at, llm_enhancement_version
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (project_id, endpoint_hash)
WHERE issue_type = 'api_change'
AND acknowledged_at IS NULL
AND archived_at IS NULL
AND endpoint_hash != ''
ON CONFLICT (project_id, target_hash, issue_type)
WHERE acknowledged_at IS NULL AND archived_at IS NULL
DO UPDATE SET
updated_at = EXCLUDED.updated_at,
affected_requests = issues.affected_requests + EXCLUDED.affected_requests,
affected_clients = GREATEST(issues.affected_clients, EXCLUDED.affected_clients),
issue_data = issues.issue_data || EXCLUDED.issue_data
issue_data = EXCLUDED.issue_data
|]


Expand Down Expand Up @@ -351,7 +352,7 @@ selectIssues pid _typeM isAcknowledged isArchived limit offset timeRangeM sortM
q =
[text|
SELECT id, created_at, updated_at, project_id, issue_type::text, endpoint_hash, acknowledged_at, acknowledged_by, archived_at, title, service, critical,
CASE WHEN critical THEN 'critical' ELSE 'info' END, affected_requests, affected_clients, NULL::double precision,
CASE WHEN critical THEN 'critical' ELSE 'info' END, 0::int, 0::int, NULL::double precision,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why did you set these to 0?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Those are not part of the new issues table.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are we adding it to the query?

recommended_action, migration_complexity, issue_data, request_payloads, response_payloads, NULL::timestamp with time zone, NULL::int, 0::bigint, updated_at
FROM apis.issues WHERE project_id = ? $timefilter $ackF $archF $orderBy LIMIT ? OFFSET ?
|]
Expand Down Expand Up @@ -389,7 +390,6 @@ updateIssueWithNewAnomaly issueId newData = void $ PG.execute q (Aeson newData,
|]


-- | Update issue enhancement
updateIssueEnhancement :: DB es => IssueId -> Text -> Text -> Text -> Eff es ()
updateIssueEnhancement issueId title action complexity = void $ PG.execute q params
where
Expand Down Expand Up @@ -464,16 +464,16 @@ createAPIChangeIssue projectId endpointHash anomalies = do
, projectId = projectId
, issueType = APIChange
, endpointHash = endpointHash
, sourceType = ""
, targetHash = ""
, environment = Nothing
, acknowledgedAt = Nothing
, acknowledgedBy = Nothing
, archivedAt = Nothing
, title = "API structure has changed"
, service = Anomalies.detectService Nothing firstAnomaly.endpointUrlPath
, service = Just $ Anomalies.detectService Nothing firstAnomaly.endpointUrlPath
, critical = isCritical
, severity = if isCritical then "critical" else "warning"
, affectedRequests = 0
, affectedClients = 0
, errorRate = Nothing
, recommendedAction = "Review the API changes and update your integration accordingly."
, migrationComplexity = if breakingChanges > 5 then "high" else if breakingChanges > 0 then "medium" else "low"
, issueData = Aeson $ AE.toJSON apiChangeData
Expand Down Expand Up @@ -510,16 +510,16 @@ createRuntimeExceptionIssue projectId atError = do
, projectId = projectId
, issueType = RuntimeException
, endpointHash = fromMaybe "" atError.hash
, sourceType = ""
, targetHash = ""
, environment = Nothing
, acknowledgedAt = Nothing
, acknowledgedBy = Nothing
, archivedAt = Nothing
, title = atError.rootErrorType <> ": " <> T.take 100 atError.message
, service = fromMaybe "unknown-service" atError.serviceName
, service = atError.serviceName
, critical = True
, severity = "critical"
, affectedRequests = 1
, affectedClients = 0
, errorRate = Nothing
, recommendedAction = "Investigate the error and implement a fix."
, migrationComplexity = "n/a"
, issueData = Aeson $ AE.toJSON exceptionData
Expand Down Expand Up @@ -556,16 +556,16 @@ createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thr
, projectId = projectId
, issueType = QueryAlert
, endpointHash = ""
, sourceType = ""
, targetHash = ""
, environment = Nothing
, acknowledgedAt = Nothing
, acknowledgedBy = Nothing
, archivedAt = Nothing
, title = queryName <> " threshold " <> thresholdType <> " " <> show threshold
, service = "Monitoring"
, service = Just "Monitoring"
, critical = True
, severity = "warning"
, affectedRequests = 0
, affectedClients = 0
, errorRate = Nothing
, recommendedAction = "Review the query results and take appropriate action."
, migrationComplexity = "n/a"
, issueData = Aeson $ AE.toJSON alertData
Expand Down Expand Up @@ -663,3 +663,124 @@ slackThreadToConversationId cid ts = textToConversationId (cid <> ":" <> ts)

discordThreadToConversationId :: Text -> UUIDId "conversation"
discordThreadToConversationId = textToConversationId


-- | Create an issue for a log pattern rate change
createLogPatternRateChangeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> Text -> IO Issue
createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineStddev direction = do
now <- getCurrentTime
let zScoreVal = if baselineStddev > 0 then abs (currentRate - baselineMean) / baselineStddev else 0
changePercentVal = if baselineMean > 0 then abs ((currentRate / baselineMean) - 1) * 100 else 0
rateChangeData =
LogPatternRateChangeData
{ patternHash = lp.patternHash
, logPattern = lp.logPattern
, sampleMessage = lp.sampleMessage
, logLevel = lp.logLevel
, serviceName = lp.serviceName
, currentRatePerHour = currentRate
, baselineMean = baselineMean
, baselineStddev = baselineStddev
, zScore = zScoreVal
, changePercent = changePercentVal
, changeDirection = direction
, detectedAt = now
}
severity =
if
| direction == "spike" && lp.logLevel == Just "error" -> "critical"
| direction == "spike" -> "warning"
| otherwise -> "info"
mkIssue projectId LogPatternRateChange lp.patternHash lp.patternHash lp.serviceName (direction == "spike" && lp.logLevel == Just "error") severity ("Log Pattern " <> T.toTitle direction <> ": " <> T.take 60 lp.logPattern <> " (" <> show (round changePercentVal :: Int) <> "%)") ("Log pattern volume " <> direction <> " detected. Current: " <> show (round currentRate :: Int) <> "/hr, Baseline: " <> show (round baselineMean :: Int) <> "/hr (" <> show (round zScoreVal :: Int) <> " std devs).") "n/a" rateChangeData


-- | Create an issue for a new log pattern
createLogPatternIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> IO Issue
createLogPatternIssue projectId lp = do
now <- getCurrentTime
let logPatternData =
LogPatternData
{ patternHash = lp.patternHash
, logPattern = lp.logPattern
, sampleMessage = lp.sampleMessage
, logLevel = lp.logLevel
, serviceName = lp.serviceName
, firstSeenAt = now
, occurrenceCount = fromIntegral lp.occurrenceCount
}
severity = case lp.logLevel of
Just "error" -> "critical"
Just "warning" -> "warning"
_ -> "info"
mkIssue projectId LogPattern lp.patternHash lp.patternHash lp.serviceName (lp.logLevel == Just "error") severity ("New Log Pattern: " <> T.take 100 lp.logPattern) "A new log pattern has been detected. Review to ensure it's expected behavior." "n/a" logPatternData


-- | Log Pattern issue data (new pattern detected)
data LogPatternData = LogPatternData
{ patternHash :: Text
, logPattern :: Text
, sampleMessage :: Maybe Text
, logLevel :: Maybe Text
, serviceName :: Maybe Text
, firstSeenAt :: UTCTime
, occurrenceCount :: Int
}
deriving stock (Generic, Show)
deriving anyclass (NFData)
deriving (FromField, ToField) via Aeson LogPatternData
deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] LogPatternData


-- | Log Pattern Rate Change issue data (volume spike/drop)
data LogPatternRateChangeData = LogPatternRateChangeData
{ patternHash :: Text
, logPattern :: Text
, sampleMessage :: Maybe Text
, logLevel :: Maybe Text
, serviceName :: Maybe Text
, currentRatePerHour :: Double
, baselineMean :: Double
, baselineStddev :: Double
, zScore :: Double -- standard deviations from baseline
, changePercent :: Double -- percentage change from baseline
, changeDirection :: Text -- "spike" or "drop"
, detectedAt :: UTCTime
}
deriving stock (Generic, Show)
deriving anyclass (NFData)
deriving (FromField, ToField) via Aeson LogPatternRateChangeData
deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] LogPatternRateChangeData


-- | Helper to create an issue with common defaults
mkIssue :: AE.ToJSON a => Projects.ProjectId -> IssueType -> Text -> Text -> Maybe Text -> Bool -> Text -> Text -> Text -> Text -> a -> IO Issue
mkIssue projectId issueType targetHash endpointHash service critical severity title recommendedAction migrationComplexity issueData = do
issueId <- UUIDId <$> UUID4.nextRandom
now <- getCurrentTime
zonedNow <- utcToLocalZonedTime now
pure
Issue
{ id = issueId
, createdAt = zonedNow
, updatedAt = zonedNow
, projectId = projectId
, issueType = issueType
, sourceType = issueTypeToText issueType
, targetHash = targetHash
, endpointHash = endpointHash
, acknowledgedAt = Nothing
, acknowledgedBy = Nothing
, archivedAt = Nothing
, title = title
, service = service
, environment = Nothing
, critical = critical
, severity = severity
, recommendedAction = recommendedAction
, migrationComplexity = migrationComplexity
, issueData = Aeson $ AE.toJSON issueData
, requestPayloads = Aeson []
, responsePayloads = Aeson []
, llmEnhancedAt = Nothing
, llmEnhancementVersion = Nothing
}
Loading