From dcd5f9a8dbf9712aae8edeac8fb56480711f41a1 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Tue, 6 Jan 2026 12:31:36 +0000 Subject: [PATCH 01/71] reworking anomalies --- src/BackgroundJobs.hs | 16 +- src/Pages/Anomalies.hs | 219 ++++++++++-------- static/migrations/0019_add_trace_id.sql | 1 - ...ata_for_anomalies_issue_update_trigger.sql | 34 +++ 4 files changed, 155 insertions(+), 115 deletions(-) create mode 100644 static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 92984951f..f2360654c 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -352,7 +352,7 @@ processBackgroundJob authCtx bgJob = GitSyncFromRepo pid -> gitSyncFromRepo pid GitSyncPushDashboard pid dashboardId -> gitSyncPushDashboard pid (UUIDId dashboardId) GitSyncPushAllDashboards pid -> gitSyncPushAllDashboards pid - QueryMonitorsCheck -> checkTriggeredQueryMonitors + QueryMonitorsCheck -> pass -- checkTriggeredQueryMonitors -- | Run hourly scheduled tasks for all projects @@ -1073,20 +1073,6 @@ emailQueryMonitorAlert :: Monitors.QueryMonitorEvaled -> CI.CI Text -> Maybe Use emailQueryMonitorAlert monitorE@Monitors.QueryMonitorEvaled{alertConfig} email userM = whenJust userM (const pass) --- FIXME: implement query alert email using postmark --- sendEmail --- (CI.original email) --- [fmt| 🤖 APITOOLKIT: log monitor triggered `{alertConfig.title}` |] --- [fmtTrim| --- Hi {user.firstName},
--- --- The monitor: `{alertConfig.title}` was triggered and got above it's defined threshold. --- ---

--- Regards, --- Apitoolkit team --- |] - -- | Process new anomalies detected by database triggers -- This job is created by the apis.new_anomaly_proc() stored procedure -- when new entities (endpoints, shapes, fields, formats, errors) are inserted. diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index b6e1eb10e..ddcaac3e1 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -269,36 +269,45 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do h3_ [class_ "text-textStrong text-2xl font-semibold"] $ toHtml issue.title p_ [class_ "text-sm text-textWeak max-w-3xl"] $ toHtml issue.recommendedAction - -- Metrics & Timeline Row (8-column grid: 4 stats + chart) - div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do - -- Stats (1 column each) - statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing - statBox_ (Just pid) Nothing "Affected Clients" "" (show issue.affectedClients) Nothing Nothing - whenJust errM $ \err -> do - timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt - timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.updatedAt - -- Timeline (4 columns) - div_ [class_ "col-span-4"] - $ Widget.widget_ - $ (def :: Widget.Widget) - { Widget.standalone = Just True - , Widget.id = Just $ issueId <> "-timeline" - , Widget.wType = Widget.WTTimeseries - , Widget.title = Just "Error trends" - , Widget.showTooltip = Just True - , Widget.xAxis = Just (def{Widget.showAxisLabel = Just True}) - , Widget.yAxis = Just (def{Widget.showOnlyMaxLabel = Just True}) - , Widget.query = Just "status_code == \"ERROR\" | summarize count(*) by bin_auto(timestamp), status_code" - , Widget._projectId = Just issue.projectId - , Widget.hideLegend = Just True - } + -- -- Metrics & Timeline Row (8-column grid: 4 stats + chart) + -- div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do + -- -- Stats (1 column each) + -- statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing + -- statBox_ (Just pid) Nothing "Affected Clients" "" (show issue.affectedClients) Nothing Nothing + -- whenJust errM $ \err -> do + -- timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt + -- timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.updatedAt + -- Timeline (4 columns) + let widget = + div_ [class_ "col-span-4"] + $ Widget.widget_ + $ (def :: Widget.Widget) + { Widget.standalone = Just True + , Widget.id = Just $ issueId <> "-timeline" + , Widget.wType = Widget.WTTimeseries + , Widget.title = Just "Error trends" + , Widget.showTooltip = Just True + , Widget.xAxis = Just (def{Widget.showAxisLabel = Just True}) + , Widget.yAxis = Just (def{Widget.showOnlyMaxLabel = Just True}) + , Widget.query = Just "status_code == \"ERROR\" | summarize count(*) by bin_auto(timestamp), status_code" + , Widget._projectId = Just issue.projectId + , Widget.hideLegend = Just True + } -- Two Column Layout - div_ [class_ "flex flex-col gap-4"] do - div_ [class_ "grid grid-cols-2 gap-4 w-full"] do - case issue.issueType of - Issues.RuntimeException -> do - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> do + case issue.issueType of + Issues.RuntimeException -> do + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> do + div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do + -- Stats (1 column each) + statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing + statBox_ (Just pid) Nothing "Affected Clients" "" (show issue.affectedClients) Nothing Nothing + whenJust errM $ \err -> do + timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt + timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.updatedAt + widget + div_ [class_ "flex flex-col gap-4"] do + div_ [class_ "grid grid-cols-2 gap-4 w-full"] do div_ [class_ "surface-raised rounded-2xl overflow-hidden"] do div_ [class_ "px-4 py-3 border-b border-strokeWeak flex items-center justify-between"] do div_ [class_ "flex items-center gap-2"] do @@ -343,62 +352,77 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do div_ [] do span_ [class_ "text-sm text-textWeak"] "Service:" span_ [class_ "ml-2 text-sm"] $ toHtml $ fromMaybe "Unknown service" err.errorData.serviceName - _ -> pass - Issues.QueryAlert -> do - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (alertData :: Issues.QueryAlertData) -> do - div_ [class_ "mb-4"] do - span_ [class_ "text-sm text-textWeak mb-2 block font-medium"] "Query:" - div_ [class_ "bg-fillInformation-weak border border-strokeInformation-weak rounded-lg p-3 text-sm font-mono text-fillInformation-strong max-w-2xl overflow-x-auto"] - $ toHtml alertData.queryExpression - _ -> pass _ -> pass - - div_ [class_ "surface-raised rounded-2xl overflow-hidden", id_ "error-details-container"] do - div_ [class_ "px-4 border-b border-b-strokeWeak flex items-center justify-between"] do - div_ [class_ "flex items-center gap-2"] do - faSprite_ "magnifying-glass-chart" "regular" "w-4 h-4 text-iconNeutral" - h4_ [class_ "text-textStrong text-lg font-medium"] "Investigation" - div_ [class_ "flex items-center"] do - let aUrl = "/p/" <> pid.toText <> "/anomalies/" <> issueId <> "" - a_ [href_ $ aUrl <> "?first_occurrence=true", class_ $ (if isFirst then "text-textBrand font-medium" else "text-textWeak hover:text-textStrong") <> " text-xs py-3 px-3 cursor-pointer transition-colors", term "data-tippy-content" "Show first trace the error occured"] "First" - a_ [href_ aUrl, class_ $ (if isFirst then "text-textWeak hover:text-textStrong" else "text-textBrand font-medium") <> " text-xs py-3 px-3 cursor-pointer transition-colors", term "data-tippy-content" "Show recent trace the error occured"] "Recent" - span_ [class_ "mx-4 w-px h-4 bg-strokeWeak"] pass - button_ [class_ "text-xs py-3 px-3 cursor-pointer err-tab t-tab-active font-medium", onclick_ "navigatable(this, '#span-content', '#error-details-container', 't-tab-active', 'err')"] "Trace" - button_ [class_ "text-xs py-3 px-3 cursor-pointer err-tab font-medium", onclick_ "navigatable(this, '#log-content', '#error-details-container', 't-tab-active', 'err')"] "Logs" - button_ [class_ "text-xs py-3 px-3 cursor-pointer err-tab font-medium", onclick_ "navigatable(this, '#replay-content', '#error-details-container', 't-tab-active', 'err')"] "Replay" - div_ [class_ "p-2 w-full overflow-x-hidden"] do - div_ [class_ "flex w-full err-tab-content", id_ "span-content"] do - div_ [id_ "trace_container", class_ "grow-1 max-w-[80%] w-1/2 min-w-[20%] shrink-1"] do - whenJust tr $ \t -> - tracePage pid t spanRecs - unless (isJust tr) - $ div_ [class_ "flex flex-col items-center justify-center h-48"] do - faSprite_ "inbox-full" "regular" "w-6 h-6 text-textWeak" - span_ [class_ "mt-2 text-sm text-textWeak"] "No trace data available for this error." - div_ [class_ "transition-opacity duration-200 mx-1", id_ "resizer-details_width-wrapper"] $ resizer_ "log_details_container" "details_width" False - div_ [class_ "grow-0 relative shrink-0 overflow-y-auto overflow-x-hidden max-h-[500px] w-1/2 w-c-scroll overflow-x-hidden overflow-y-auto", id_ "log_details_container"] do - span_ [class_ "htmx-indicator query-indicator absolute loading left-1/2 -translate-x-1/2 loading-dots absoute z-10 top-10", id_ "details_indicator"] "" - let (spanId, createdAt) = case spanRecs V.!? 0 of - Just sr -> (sr.uSpanId, formatUTC sr.timestamp) - Nothing -> ("", "") - let url = "/p/" <> pid.toText <> "/log_explorer/" <> spanId <> "/" <> createdAt <> "/detailed" - div_ [hxGet_ url, hxTarget_ "#log_details_container", hxSwap_ "innerHtml", hxTrigger_ "intersect one", hxIndicator_ "#details_indicator", term "hx-sync" "this:replace"] pass - - div_ [id_ "log-content", class_ "hidden err-tab-content"] do + Issues.APIChange -> do + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (changeData :: Issues.APIChangeData) -> do + div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do + statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing + statBox_ (Just pid) Nothing "New fields" "" (show $ V.length changeData.newFields) Nothing Nothing + statBox_ (Just pid) Nothing "Modified fields" "" (show $ V.length changeData.modifiedFields) Nothing Nothing + statBox_ (Just pid) Nothing "Deleted fields" "" (show $ V.length changeData.deletedFields) Nothing Nothing + widget div_ [class_ "flex flex-col gap-4"] do - virtualTable pid (Just ("/p/" <> pid.toText <> "/log_explorer?json=true&query=" <> toUriStr ("kind==\"log\" AND context___trace_id==\"" <> fromMaybe "" (errM >>= (\x -> x.recentTraceId)) <> "\""))) - - div_ [id_ "replay-content", class_ "hidden err-tab-content"] do - let withSessionIds = V.catMaybes $ V.map (\sr -> (`lookupValueText` "id") =<< Map.lookup "session" =<< sr.attributes) spanRecs - unless (V.null withSessionIds) do - let sessionId = V.head withSessionIds - div_ [class_ "border border-r border-l w-max mx-auto"] - $ termRaw "session-replay" [id_ "sessionReplay", term "initialSession" sessionId, class_ "shrink-1 flex flex-col", term "projectId" pid.toText, term "containerId" "sessionPlayerWrapper"] ("" :: Text) + div_ [class_ "w-full"] do + div_ [class_ "flex items-center gap-2"] do + span_ [class_ $ "badge " <> methodFillColor changeData.endpointMethod] $ toHtml changeData.endpointMethod + span_ [class_ "monospace px-2 py-1 rounded text-xs text-textStrong"] $ toHtml changeData.endpointPath + div_ [class_ "mt-4 border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised"] $ renderPayloadChanges issue.id issue.issueType issue.requestPayloads issue.responsePayloads + _ -> pass + Issues.QueryAlert -> do + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (alertData :: Issues.QueryAlertData) -> do + div_ [class_ "mb-4"] do + span_ [class_ "text-sm text-textWeak mb-2 block font-medium"] "Query:" + div_ [class_ "bg-fillInformation-weak border border-strokeInformation-weak rounded-lg p-3 text-sm font-mono text-fillInformation-strong max-w-2xl overflow-x-auto"] + $ toHtml alertData.queryExpression + _ -> pass - when (V.null withSessionIds) - $ div_ [class_ "flex flex-col gap-4"] do - emptyState_ "No Replay Available" "No session replays associated with this trace" (Just "https://monoscope.tech/docs/sdks/Javascript/browser/") "Session Replay Guide" + div_ [class_ "surface-raised rounded-2xl overflow-hidden", id_ "error-details-container"] do + div_ [class_ "px-4 border-b border-b-strokeWeak flex items-center justify-between"] do + div_ [class_ "flex items-center gap-2"] do + faSprite_ "magnifying-glass-chart" "regular" "w-4 h-4 text-iconNeutral" + h4_ [class_ "text-textStrong text-lg font-medium"] "Investigation" + div_ [class_ "flex items-center"] do + let aUrl = "/p/" <> pid.toText <> "/anomalies/" <> issueId <> "" + a_ [href_ $ aUrl <> "?first_occurrence=true", class_ $ (if isFirst then "text-textBrand font-medium" else "text-textWeak hover:text-textStrong") <> " text-xs py-3 px-3 cursor-pointer transition-colors", term "data-tippy-content" "Show first trace the error occured"] "First" + a_ [href_ aUrl, class_ $ (if isFirst then "text-textWeak hover:text-textStrong" else "text-textBrand font-medium") <> " text-xs py-3 px-3 cursor-pointer transition-colors", term "data-tippy-content" "Show recent trace the error occured"] "Recent" + span_ [class_ "mx-4 w-px h-4 bg-strokeWeak"] pass + button_ [class_ "text-xs py-3 px-3 cursor-pointer err-tab t-tab-active font-medium", onclick_ "navigatable(this, '#span-content', '#error-details-container', 't-tab-active', 'err')"] "Trace" + button_ [class_ "text-xs py-3 px-3 cursor-pointer err-tab font-medium", onclick_ "navigatable(this, '#log-content', '#error-details-container', 't-tab-active', 'err')"] "Logs" + button_ [class_ "text-xs py-3 px-3 cursor-pointer err-tab font-medium", onclick_ "navigatable(this, '#replay-content', '#error-details-container', 't-tab-active', 'err')"] "Replay" + div_ [class_ "p-2 w-full overflow-x-hidden"] do + div_ [class_ "flex w-full err-tab-content", id_ "span-content"] do + div_ [id_ "trace_container", class_ "grow-1 max-w-[80%] w-1/2 min-w-[20%] shrink-1"] do + whenJust tr $ \t -> + tracePage pid t spanRecs + unless (isJust tr) + $ div_ [class_ "flex flex-col items-center justify-center h-48"] do + faSprite_ "inbox-full" "regular" "w-6 h-6 text-textWeak" + span_ [class_ "mt-2 text-sm text-textWeak"] "No trace data available for this error." + div_ [class_ "transition-opacity duration-200 mx-1", id_ "resizer-details_width-wrapper"] $ resizer_ "log_details_container" "details_width" False + div_ [class_ "grow-0 relative shrink-0 overflow-y-auto overflow-x-hidden max-h-[500px] w-1/2 w-c-scroll overflow-x-hidden overflow-y-auto", id_ "log_details_container"] do + span_ [class_ "htmx-indicator query-indicator absolute loading left-1/2 -translate-x-1/2 loading-dots absoute z-10 top-10", id_ "details_indicator"] "" + let (spanId, createdAt) = case spanRecs V.!? 0 of + Just sr -> (sr.uSpanId, formatUTC sr.timestamp) + Nothing -> ("", "") + let url = "/p/" <> pid.toText <> "/log_explorer/" <> spanId <> "/" <> createdAt <> "/detailed" + div_ [hxGet_ url, hxTarget_ "#log_details_container", hxSwap_ "innerHtml", hxTrigger_ "intersect one", hxIndicator_ "#details_indicator", term "hx-sync" "this:replace"] pass + + div_ [id_ "log-content", class_ "hidden err-tab-content"] do + div_ [class_ "flex flex-col gap-4"] do + virtualTable pid (Just ("/p/" <> pid.toText <> "/log_explorer?json=true&query=" <> toUriStr ("kind==\"log\" AND context___trace_id==\"" <> fromMaybe "" (errM >>= (\x -> x.recentTraceId)) <> "\""))) + + div_ [id_ "replay-content", class_ "hidden err-tab-content"] do + let withSessionIds = V.catMaybes $ V.map (\sr -> (`lookupValueText` "id") =<< Map.lookup "session" =<< sr.attributes) spanRecs + unless (V.null withSessionIds) do + let sessionId = V.head withSessionIds + div_ [class_ "border border-r border-l w-max mx-auto"] + $ termRaw "session-replay" [id_ "sessionReplay", term "initialSession" sessionId, class_ "shrink-1 flex flex-col", term "projectId" pid.toText, term "containerId" "sessionPlayerWrapper"] ("" :: Text) + + when (V.null withSessionIds) + $ div_ [class_ "flex flex-col gap-4"] do + emptyState_ "No Replay Available" "No session replays associated with this trace" (Just "https://monoscope.tech/docs/sdks/Javascript/browser/") "Session Replay Guide" -- AI Chat section (inline with page content) anomalyAIChat_ pid issue.id @@ -992,7 +1016,7 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue summary_ [class_ "inline-flex items-center cursor-pointer whitespace-nowrap text-sm font-medium transition-all rounded-md gap-1.5 text-textBrand hover:text-textBrand/80 list-none"] do faSprite_ "chevron-right" "regular" "h-4 w-4 mr-1 transition-transform group-open:rotate-90" "View detailed payload changes" - div_ [class_ "mt-4 border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised"] $ renderPayloadChanges issue + div_ [class_ "mt-4 border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised"] $ renderPayloadChanges issue.id issue.issueType issue.requestPayloads issue.responsePayloads -- Action buttons div_ [class_ "flex items-center gap-3 mt-4 pt-4 border-t border-strokeWeak"] do @@ -1029,39 +1053,34 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Render payload changes section -renderPayloadChanges :: Issues.IssueL -> Html () -renderPayloadChanges issue = - when (issue.issueType == Issues.APIChange) do - let requestChanges = getAeson issue.requestPayloads :: [Anomalies.PayloadChange] - let responseChanges = getAeson issue.responsePayloads :: [Anomalies.PayloadChange] +renderPayloadChanges :: Issues.IssueId -> Issues.IssueType -> Aeson [PayloadChange] -> Aeson [PayloadChange] -> Html () +renderPayloadChanges issueId issueType requestPayloads responsePayloads = + when (issueType == Issues.APIChange) do + let requestChanges = getAeson requestPayloads :: [Anomalies.PayloadChange] + let responseChanges = getAeson responsePayloads :: [Anomalies.PayloadChange] when (not (null requestChanges) || not (null responseChanges)) do div_ [class_ "border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised group/payloadtabs"] do div_ [class_ "flex flex-col gap-2"] do - -- Tab navigation using radio buttons div_ [role_ "tablist", Aria.orientation_ "horizontal", class_ "text-muted-foreground h-9 items-center justify-center rounded-xl p-[3px] w-full grid grid-cols-2 bg-fillWeak"] do - -- Response tab (default active) label_ [ role_ "tab" , class_ "h-[calc(100%-1px)] flex-1 justify-center rounded-xl border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap transition-all flex items-center gap-2 cursor-pointer has-[:checked]:bg-bgRaised has-[:checked]:text-textStrong bg-transparent text-textWeak" ] do - input_ [type_ "radio", name_ ("payload-tab-" <> Issues.issueIdText issue.id), class_ "hidden payload-tab-response", checked_] + input_ [type_ "radio", name_ ("payload-tab-" <> Issues.issueIdText issueId), class_ "hidden payload-tab-response", checked_] faSprite_ "arrow-right" "regular" "w-4 h-4" span_ [] $ "Response Payloads (" <> show (length responseChanges) <> ")" - -- Request tab label_ [ role_ "tab" , class_ "h-[calc(100%-1px)] flex-1 justify-center rounded-xl border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap transition-all flex items-center gap-2 cursor-pointer has-[:checked]:bg-bgRaised has-[:checked]:text-textStrong bg-transparent text-textWeak" ] do - input_ [type_ "radio", name_ ("payload-tab-" <> Issues.issueIdText issue.id), class_ "hidden payload-tab-request"] + input_ [type_ "radio", name_ ("payload-tab-" <> Issues.issueIdText issueId), class_ "hidden payload-tab-request"] faSprite_ "arrow-right" "regular" "w-4 h-4 rotate-180" span_ [] $ "Request Payloads (" <> show (length requestChanges) <> ")" - -- Tab panels - -- Response panel (visible when response tab is selected) div_ [ role_ "tabpanel" , class_ "flex-1 outline-none p-4 space-y-4 hidden group-has-[.payload-tab-response:checked]/payloadtabs:block" @@ -1070,8 +1089,6 @@ renderPayloadChanges issue = if null responseChanges then div_ [class_ "text-center py-8 text-textWeak"] "No response payload changes" else forM_ responseChanges (renderPayloadChange True) - - -- Request panel (visible when request tab is selected) div_ [ role_ "tabpanel" , class_ "flex-1 outline-none p-4 space-y-4 hidden group-has-[.payload-tab-request:checked]/payloadtabs:block" @@ -1213,7 +1230,9 @@ anomalyAcknowledgeButton :: Projects.ProjectId -> Issues.IssueId -> Bool -> Text anomalyAcknowledgeButton pid aid acked host = do let acknowledgeAnomalyEndpoint = "/p/" <> pid.toText <> "/anomalies/" <> Issues.issueIdText aid <> if acked then "/unacknowledge" else "/acknowledge?host=" <> host a_ - [ class_ $ "btn btn-sm gap-1.5 " <> if acked then "bg-fillSuccess-weak text-textSuccess border-strokeSuccess-weak" else "btn-primary" + [ class_ + $ "inline-flex items-center gap-2 cursor-pointer py-2 px-3 rounded-xl " + <> (if acked then "bg-fillSuccess-weak text-textSuccess" else "btn-primary") , term "data-tippy-content" "acknowledge issue" , hxGet_ acknowledgeAnomalyEndpoint , hxSwap_ "outerHTML" @@ -1227,7 +1246,9 @@ anomalyArchiveButton :: Projects.ProjectId -> Issues.IssueId -> Bool -> Html () anomalyArchiveButton pid aid archived = do let archiveAnomalyEndpoint = "/p/" <> pid.toText <> "/anomalies/" <> Issues.issueIdText aid <> if archived then "/unarchive" else "/archive" a_ - [ class_ $ "btn btn-sm btn-ghost gap-1.5 " <> if archived then "bg-fillWarning-weak text-textWarning border-strokeWarning-weak" else "" + [ class_ + $ "inline-flex items-center gap-2 cursor-pointer py-2 px-3 rounded-xl " + <> (if archived then " bg-fillSuccess-weak text-textSuccess" else "btn-primary") , term "data-tippy-content" $ if archived then "unarchive" else "archive" , hxGet_ archiveAnomalyEndpoint , hxSwap_ "outerHTML" diff --git a/static/migrations/0019_add_trace_id.sql b/static/migrations/0019_add_trace_id.sql index 647284fe2..813567eca 100644 --- a/static/migrations/0019_add_trace_id.sql +++ b/static/migrations/0019_add_trace_id.sql @@ -8,7 +8,6 @@ ALTER TABLE apis.errors ADD COLUMN first_trace_id TEXT, ADD COLUMN recent_trace_id TEXT; --- remove first_trace_id and recent_trace_id from apis.issues ALTER TABLE apis.issues DROP COLUMN IF EXISTS first_trace_id, diff --git a/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql b/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql new file mode 100644 index 000000000..4941221c0 --- /dev/null +++ b/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql @@ -0,0 +1,34 @@ +BEGIN; + +ALTER TABLE apis.endpoints + ADD COLUMN first_trace_id, + ADD COLUMN recent_trace_id, + ADD COLUMN service; + +ALTER TABLE apis.shapes + ADD COLUMN first_trace_id, + ADD COLUMN recent_trace_id, + ADD COLUMN service; + +CREATE OR REPLACE FUNCTION apis.update_occurance() +RETURNS trigger AS $$ +DECLARE + target_hash TEXT; +BEGIN + target_hash := NEW.hash; + UPDATE apis.issues + SET + affected_requests = affected_requests + 1, + updated_at = NOW() + WHERE endpoint_hash = hash; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER trg_update_issue_from_endpoints AFTER UPDATE ON apis.endpoints FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); +CREATE TRIGGER trg_update_issue_from_shapes AFTER UPDATE ON apis.shapes FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); +CREATE TRIGGER trg_update_issue_from_format AFTER UPDATE ON apis.format FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); +CREATE TRIGGER trg_update_issue_from_fields AFTER UPDATE ON apis.fields FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); +CREATE TRIGGER trg_update_issue_from_errors AFTER UPDATE ON apis.errors FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); + +COMMIT; \ No newline at end of file From 491ea3080426749d594de823ea7e4b337ce7afd0 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 7 Jan 2026 13:12:55 +0000 Subject: [PATCH 02/71] tables setup --- src/Models/Apis/LogPatterns.hs | 251 ++++++++++++++++++ .../0001_create_monoscope_tables.sql | 2 +- .../0026_anomaly_detection_baselines.sql | 153 +++++++++++ .../0027_log_pattern_monitoring.sql | 115 ++++++++ 4 files changed, 520 insertions(+), 1 deletion(-) create mode 100644 src/Models/Apis/LogPatterns.hs create mode 100644 static/migrations/0026_anomaly_detection_baselines.sql create mode 100644 static/migrations/0027_log_pattern_monitoring.sql diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs new file mode 100644 index 000000000..9c4d14480 --- /dev/null +++ b/src/Models/Apis/LogPatterns.hs @@ -0,0 +1,251 @@ +module Models.Apis.LogPatterns ( + LogPattern (..), + LogPatternId, + LogPatternState (..), + getLogPatterns, + getLogPatternByHash, + getNewLogPatterns, + acknowledgeLogPatterns, + ignoreLogPatterns, + upsertLogPattern, + updateLogPatternStats, + updateBaseline, +) +where + +import Data.Aeson qualified as AE +import Data.Text qualified as T +import Data.Time +import Data.UUID qualified as UUID +import Data.Vector qualified as V +import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) +import Database.PostgreSQL.Simple (FromRow, ToRow) +import Database.PostgreSQL.Simple.FromField (FromField, ResultError (ConversionFailed, UnexpectedNull), fromField, returnError) +import Database.PostgreSQL.Simple.SqlQQ (sql) +import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField, toField) +import Deriving.Aeson qualified as DAE +import Effectful (Eff) +import Effectful.PostgreSQL qualified as PG +import Models.Projects.Projects qualified as Projects +import Models.Users.Users qualified as Users +import Relude hiding (id) +import System.Types (DB) + + +newtype LogPatternId = LogPatternId {unLogPatternId :: Int64} + deriving stock (Generic, Show) + deriving newtype (AE.FromJSON, AE.ToJSON, Eq, FromField, NFData, Ord, ToField) + + +data LogPatternState + = LPSNew + | LPSAcknowledged + | LPSIgnored + deriving stock (Eq, Generic, Show) + deriving anyclass (NFData) + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] LogPatternState + + +logPatternStateToText :: LogPatternState -> Text +logPatternStateToText LPSNew = "new" +logPatternStateToText LPSAcknowledged = "acknowledged" +logPatternStateToText LPSIgnored = "ignored" + + +parseLogPatternState :: (Eq s, IsString s) => s -> Maybe LogPatternState +parseLogPatternState "new" = Just LPSNew +parseLogPatternState "acknowledged" = Just LPSAcknowledged +parseLogPatternState "ignored" = Just LPSIgnored +parseLogPatternState _ = Nothing + + +instance ToField LogPatternState where + toField = Escape . encodeUtf8 <$> logPatternStateToText + + +instance FromField LogPatternState where + fromField f mdata = + case mdata of + Nothing -> returnError UnexpectedNull f "" + Just bs -> + case parseLogPatternState bs of + Just s -> pure s + Nothing -> returnError ConversionFailed f $ "Conversion error: Expected log pattern state, got " <> decodeUtf8 bs <> " instead." + + +data LogPattern = LogPattern + { id :: LogPatternId + , projectId :: Projects.ProjectId + , createdAt :: ZonedTime + , updatedAt :: ZonedTime + , pattern :: Text + , patternHash :: Text + , serviceName :: Maybe Text + , logLevel :: Maybe Text + , sampleMessage :: Maybe Text + , firstSeenAt :: ZonedTime + , lastSeenAt :: ZonedTime + , occurrenceCount :: Int64 + , state :: LogPatternState + , acknowledgedBy :: Maybe Users.UserId + , acknowledgedAt :: Maybe ZonedTime + , baselineState :: Text + , baselineVolumeHourlyMean :: Maybe Double + , baselineVolumeHourlyStddev :: Maybe Double + , baselineSamples :: Int + , baselineUpdatedAt :: Maybe ZonedTime + } + deriving stock (Generic, Show) + deriving anyclass (FromRow, NFData, ToRow) + deriving + (Entity) + via (GenericEntity '[Schema "apis", TableName "log_patterns", PrimaryKey "id", FieldModifiers '[CamelToSnake]] LogPattern) + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] LogPattern + + +-- | Get all log patterns for a project +getLogPatterns :: DB es => Projects.ProjectId -> Maybe LogPatternState -> Int -> Int -> Eff es [LogPattern] +getLogPatterns pid mstate limit offset = PG.query q (pid, maybe "%" logPatternStateToText mstate, limit, offset) + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + service_name, log_level, sample_message, first_seen_at, last_seen_at, + occurrence_count, state, acknowledged_by, acknowledged_at, + baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, + baseline_samples, baseline_updated_at + FROM apis.log_patterns + WHERE project_id = ? AND state LIKE ? + ORDER BY last_seen_at DESC + LIMIT ? OFFSET ? + |] + + +-- | Get log pattern by hash +getLogPatternByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe LogPattern) +getLogPatternByHash pid hash = do + results <- PG.query q (pid, hash) + return $ listToMaybe results + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + service_name, log_level, sample_message, first_seen_at, last_seen_at, + occurrence_count, state, acknowledged_by, acknowledged_at, + baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, + baseline_samples, baseline_updated_at + FROM apis.log_patterns + WHERE project_id = ? AND pattern_hash = ? + |] + + +-- | Get new (unacknowledged) log patterns for a project +getNewLogPatterns :: DB es => Projects.ProjectId -> Eff es [LogPattern] +getNewLogPatterns pid = PG.query q (pid,) + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + service_name, log_level, sample_message, first_seen_at, last_seen_at, + occurrence_count, state, acknowledged_by, acknowledged_at, + baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, + baseline_samples, baseline_updated_at + FROM apis.log_patterns + WHERE project_id = ? AND state = 'new' + ORDER BY first_seen_at DESC + |] + + +-- | Acknowledge log patterns +acknowledgeLogPatterns :: DB es => Users.UserId -> V.Vector Text -> Eff es Int64 +acknowledgeLogPatterns uid patternHashes + | V.null patternHashes = pure 0 + | otherwise = PG.execute q (uid, patternHashes) + where + q = + [sql| + UPDATE apis.log_patterns + SET state = 'acknowledged', acknowledged_by = ?, acknowledged_at = NOW() + WHERE pattern_hash = ANY(?) + |] + + +-- | Ignore log patterns (won't alert on them) +ignoreLogPatterns :: DB es => V.Vector Text -> Eff es Int64 +ignoreLogPatterns patternHashes + | V.null patternHashes = pure 0 + | otherwise = PG.execute q (patternHashes,) + where + q = + [sql| + UPDATE apis.log_patterns + SET state = 'ignored' + WHERE pattern_hash = ANY(?) + |] + + +-- | Upsert a log pattern (insert or update occurrence count) +upsertLogPattern :: + DB es => + Projects.ProjectId -> + Text -> -- pattern + Text -> -- pattern_hash + Maybe Text -> -- service_name + Maybe Text -> -- log_level + Maybe Text -> -- sample_message + Eff es Int64 +upsertLogPattern pid pat patHash serviceName logLevel sampleMsg = + PG.execute q (pid, pat, patHash, serviceName, logLevel, sampleMsg) + where + q = + [sql| + INSERT INTO apis.log_patterns (project_id, pattern, pattern_hash, service_name, log_level, sample_message) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT (project_id, pattern_hash) DO UPDATE SET + last_seen_at = NOW(), + occurrence_count = apis.log_patterns.occurrence_count + 1, + sample_message = COALESCE(EXCLUDED.sample_message, apis.log_patterns.sample_message) + |] + + +-- | Update log pattern statistics (occurrence count, last seen) +updateLogPatternStats :: DB es => Projects.ProjectId -> Text -> Int64 -> Eff es Int64 +updateLogPatternStats pid patHash additionalCount = + PG.execute q (additionalCount, pid, patHash) + where + q = + [sql| + UPDATE apis.log_patterns + SET occurrence_count = occurrence_count + ?, + last_seen_at = NOW() + WHERE project_id = ? AND pattern_hash = ? + |] + + +-- | Update baseline data for a log pattern +updateBaseline :: + DB es => + Projects.ProjectId -> + Text -> -- pattern_hash + Text -> -- baseline_state ('learning' or 'established') + Double -> -- hourly_mean + Double -> -- hourly_stddev + Int -> -- samples + Eff es Int64 +updateBaseline pid patHash bState hourlyMean hourlyStddev samples = + PG.execute q (bState, hourlyMean, hourlyStddev, samples, pid, patHash) + where + q = + [sql| + UPDATE apis.log_patterns + SET baseline_state = ?, + baseline_volume_hourly_mean = ?, + baseline_volume_hourly_stddev = ?, + baseline_samples = ?, + baseline_updated_at = NOW() + WHERE project_id = ? AND pattern_hash = ? + |] diff --git a/static/migrations/0001_create_monoscope_tables.sql b/static/migrations/0001_create_monoscope_tables.sql index 5f4faab27..3763b7574 100644 --- a/static/migrations/0001_create_monoscope_tables.sql +++ b/static/migrations/0001_create_monoscope_tables.sql @@ -212,7 +212,7 @@ CREATE INDEX IF NOT EXISTS idx_apis_endpoints_project_id ON apis.endpoints(proje CREATE UNIQUE INDEX IF NOT EXISTS idx_apis_endpoints_hash ON apis.endpoints(hash); ----------------------------------------------------------------------- --- SHAPES table +-- SHAPES table -- ----------------------------------------------------------------------- CREATE TABLE IF NOT EXISTS apis.shapes diff --git a/static/migrations/0026_anomaly_detection_baselines.sql b/static/migrations/0026_anomaly_detection_baselines.sql new file mode 100644 index 000000000..d71dba0fc --- /dev/null +++ b/static/migrations/0026_anomaly_detection_baselines.sql @@ -0,0 +1,153 @@ +BEGIN; + +-- ============================================================================ +-- 2. ENDPOINT BASELINES +-- ============================================================================ +-- Per-endpoint, service and log pattern behavioral baselines for detecting spikes/degradations. +-- Dimensions: error_rate, latency, volume + +CREATE TABLE IF NOT EXISTS apis.baselines ( + id BIGSERIAL PRIMARY KEY, + project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, + subject_type TEXT NOT NULL, -- 'endpoint', 'service', 'log_pattern' + subject_key TEXT NOT NULL, -- endpoint_hash or service_name or pattern_hash + + state TEXT NOT NULL DEFAULT 'learning', -- 'learning', 'established' + min_observations INT DEFAULT 1000, -- need this many data points to establish + + baseline_data JSONB NOT NULL DEFAULT '{}', + /* + error_rate: { "mean": 0.02, "stddev": 0.008, "samples": 5000 } + latency: { "mean": 65, "stddev": 40, "p50": 45, "p95": 120, "p99": 250, "samples": 5000 } + volume: { "mean": 150, "stddev": 35, "samples": 1440 } + */ + + baseline_window_hours INT DEFAULT 24, + + last_calculated_at TIMESTAMPTZ, + established_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + UNIQUE(project_id, subject_key, dimension) +); + +SELECT manage_updated_at('apis.baselines'); + +CREATE INDEX IF NOT EXISTS idx_baselines_lookup +ON apis.baselines(project_id, subject_key, subject_type); + +CREATE INDEX IF NOT EXISTS idx_baselines_established +ON apis.baselines(project_id, state) +WHERE state = 'established'; + + +-- ============================================================================ +-- 3. ERRORS TABLE (RECREATED WITH LIFECYCLE) +-- ============================================================================ +-- Recreate errors table with lifecycle tracking. +-- States: new, escalating, ongoing, resolved, regressed + +-- Drop existing trigger and table +DROP TRIGGER IF EXISTS error_created_anomaly ON apis.errors; +DROP TABLE IF EXISTS apis.errors; + + +CREATE TABLE apis.errors ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + exception_type TEXT NOT NULL, + message TEXT NOT NULL, + stacktrace TEXT NOT NULL, + hash TEXT NOT NULL, + + environment TEXT NOT NULL, + service TEXT, + runtime TEXT, + error_data JSONB NOT NULL DEFAULT '{}', + representative_message TEXT, + first_event_id UUID, + last_event_id UUID, + state TEXT NOT NULL DEFAULT 'new', + assignee_id UUID, + assigned_at TIMESTAMPTZ, + resolved_at TIMESTAMPTZ, + regressed_at TIMESTAMPTZ, + + occurrences_1m INT NOT NULL DEFAULT 0, + occurrences_5m INT NOT NULL DEFAULT 0, + occurrences_1h INT NOT NULL DEFAULT 0, + occurrences_24h INT NOT NULL DEFAULT 0, + + quiet_minutes INT NOT NULL DEFAULT 0, + resolution_threshold_minutes INT NOT NULL DEFAULT 30, + + baseline_state TEXT NOT NULL DEFAULT 'learning', + baseline_samples INT NOT NULL DEFAULT 0, + baseline_updated_at TIMESTAMPTZ, + baseline_error_rate_mean FLOAT, + baseline_error_rate_stddev FLOAT, + baseline_updated_at TIMESTAMPTZ, + + is_ignored BOOLEAN DEFAULT false, + ignored_until TIMESTAMPTZ, + PRIMARY KEY (id) +); +SELECT manage_updated_at('apis.errors'); + +CREATE UNIQUE INDEX uniq_error_group ON apis.errors (project_id, hash, environment); +CREATE INDEX idx_errors_project_state ON apis.errors (project_id, state); +CREATE INDEX idx_errors_last_seen ON apis.errors (project_id, last_event_id); +CREATE UNIQUE INDEX idx_apis_errors_project_id_hash ON apis.errors(project_id, hash); +CREATE INDEX idx_apis_errors_project_id ON apis.errors(project_id); +CREATE INDEX idx_errors_active ON apis.errors(project_id, state, last_seen_at DESC) WHERE state != 'resolved'; +CREATE INDEX idx_errors_state ON apis.errors(project_id, state); +CREATE TRIGGER error_created_anomaly AFTER INSERT ON apis.errors FOR EACH ROW EXECUTE PROCEDURE apis.new_anomaly_proc('runtime_exception', 'created', 'skip_anomaly_record'); + +-- all data here that is not in errors (top level) go into error_data jsonb +-- Whenever the same error occurs, we update the error_data with latest values for these fields +-- Then an on update trigger on errors can update these fields from error_data if needed + +CREATE TABLE apis.error_events ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, + error_id UUID NOT NULL REFERENCES apis.errors(id) ON DELETE CASCADE, + occurred_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + target_hash TEXT NOT NULL REFERENCES apis.errors(hash) ON DELETE CASCADE, + exception_type TEXT NOT NULL, + message TEXT NOT NULL, + stack_trace TEXT NOT NULL, + service_name TEXT NOT NULL, + + release TEXT, + platform_version TEXT, + + request_method TEXT, + request_path TEXT, + endpoint_hash TEXT, + + trace_id TEXT, + span_id TEXT, + parent_span_id TEXT, + + user_id TEXT, + user_email TEXT, + user_ip INET, + session_id TEXT, + + sample_rate FLOAT NOT NULL DEFAULT 1.0, + ingestion_id UUID +); + +-- Indexes for efficient queries +CREATE INDEX idx_error_events_error ON apis.error_events (error_id, occurred_at DESC); +CREATE INDEX idx_error_events_project ON apis.error_events (project_id, occurred_at DESC); +CREATE INDEX idx_error_events_trace ON apis.error_events (trace_id); +CREATE INDEX idx_error_events_service ON apis.error_events (service_name); +CREATE INDEX idx_error_events_tags USING GIN (tags); + + +COMMIT; diff --git a/static/migrations/0027_log_pattern_monitoring.sql b/static/migrations/0027_log_pattern_monitoring.sql new file mode 100644 index 000000000..ff49b0500 --- /dev/null +++ b/static/migrations/0027_log_pattern_monitoring.sql @@ -0,0 +1,115 @@ +BEGIN; + +CREATE TABLE IF NOT EXISTS apis.log_patterns ( + id BIGSERIAL PRIMARY KEY, + project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + pattern TEXT NOT NULL, + pattern_hash TEXT NOT NULL, + + service_name TEXT, + log_level TEXT, + sample_message TEXT, + + first_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + occurrence_count BIGINT NOT NULL DEFAULT 1, + + state TEXT NOT NULL DEFAULT 'new', -- 'new', 'acknowledged', 'ignored' + acknowledged_by UUID REFERENCES users.users(id), + acknowledged_at TIMESTAMPTZ, + + -- Baseline for volume spike detection + baseline_state TEXT NOT NULL DEFAULT 'learning', -- 'learning', 'established' + baseline_volume_hourly_mean FLOAT, + baseline_volume_hourly_stddev FLOAT, + baseline_samples INT NOT NULL DEFAULT 0, + baseline_updated_at TIMESTAMPTZ, + + UNIQUE(project_id, pattern_hash) +); + +SELECT manage_updated_at('apis.log_patterns'); + +-- Indexes for efficient queries +CREATE INDEX IF NOT EXISTS idx_log_patterns_project ON apis.log_patterns(project_id); +CREATE INDEX IF NOT EXISTS idx_log_patterns_project_state ON apis.log_patterns(project_id, state); +CREATE INDEX IF NOT EXISTS idx_log_patterns_last_seen ON apis.log_patterns(project_id, last_seen_at DESC); +CREATE INDEX IF NOT EXISTS idx_log_patterns_service ON apis.log_patterns(project_id, service_name); + + +CREATE OR REPLACE FUNCTION apis.new_log_pattern_proc() RETURNS trigger AS $$ +DECLARE + existing_job_id INT; + existing_pattern_hashes JSONB; +BEGIN + IF TG_WHEN <> 'AFTER' THEN + RAISE EXCEPTION 'apis.new_log_pattern_proc() may only run as an AFTER trigger'; + END IF; + + -- Look for existing queued job to batch patterns together + SELECT id, payload->'patternHashes' + INTO existing_job_id, existing_pattern_hashes + FROM background_jobs + WHERE payload->>'tag' = 'NewLogPattern' + AND payload->>'projectId' = NEW.project_id::TEXT + AND status = 'queued' + ORDER BY run_at ASC + LIMIT 1; + + IF existing_job_id IS NOT NULL THEN + -- Append to existing job + UPDATE background_jobs SET payload = jsonb_build_object( + 'tag', 'NewLogPattern', + 'projectId', NEW.project_id, + 'createdAt', to_jsonb(NOW()), + 'patternHashes', existing_pattern_hashes || to_jsonb(NEW.pattern_hash), + 'serviceName', NEW.service_name + ) WHERE id = existing_job_id; + ELSE + -- Create new job + INSERT INTO background_jobs (run_at, status, payload) + VALUES ( + NOW(), + 'queued', + jsonb_build_object( + 'tag', 'NewLogPattern', + 'projectId', NEW.project_id, + 'createdAt', to_jsonb(NOW()), + 'patternHashes', jsonb_build_array(NEW.pattern_hash), + 'serviceName', NEW.service_name + ) + ); + END IF; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER log_pattern_created_notify + AFTER INSERT ON apis.log_patterns + FOR EACH ROW + EXECUTE PROCEDURE apis.new_log_pattern_proc(); + +ALTER TABLE projects.projects +ADD COLUMN IF NOT EXISTS log_pattern_alerts BOOLEAN NOT NULL DEFAULT false; + + +ALTER TABLE apis.endpoints +ADD COLUMN IF NOT EXISTS baseline_state TEXT NOT NULL DEFAULT 'learning', +ADD COLUMN IF NOT EXISTS baseline_samples INT NOT NULL DEFAULT 0, +ADD COLUMN IF NOT EXISTS baseline_updated_at TIMESTAMPTZ, +ADD COLUMN IF NOT EXISTS baseline_error_rate_mean FLOAT, +ADD COLUMN IF NOT EXISTS baseline_error_rate_stddev FLOAT, +ADD COLUMN IF NOT EXISTS baseline_latency_mean FLOAT, +ADD COLUMN IF NOT EXISTS baseline_latency_stddev FLOAT, +ADD COLUMN IF NOT EXISTS baseline_latency_p95 FLOAT, +ADD COLUMN IF NOT EXISTS baseline_latency_p99 FLOAT, +ADD COLUMN IF NOT EXISTS baseline_volume_hourly_mean FLOAT, +ADD COLUMN IF NOT EXISTS baseline_volume_hourly_stddev FLOAT; + + + +COMMIT; From 9a9a995415ed50292805fa3bfa391159f2100d5f Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 7 Jan 2026 16:22:39 +0000 Subject: [PATCH 03/71] towards errors and log patterns anomaly detection --- src/BackgroundJobs.hs | 220 ++++++- src/Models/Apis/Errors.hs | 591 ++++++++++++++++++ src/Models/Apis/Issues.hs | 219 +++++++ src/Models/Apis/LogPatterns.hs | 136 ++++ .../0026_anomaly_detection_baselines.sql | 1 - .../0027_log_pattern_monitoring.sql | 49 +- 6 files changed, 1174 insertions(+), 42 deletions(-) create mode 100644 src/Models/Apis/Errors.hs diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index f2360654c..449418fc8 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -38,6 +38,8 @@ import Log (LogLevel (..), Logger, runLogT) import Log qualified as LogLegacy import Models.Apis.Anomalies qualified as Anomalies import Models.Apis.Endpoints qualified as Endpoints +import Models.Apis.Errors qualified as Errors +import Models.Apis.LogPatterns qualified as LogPatterns import Models.Apis.Fields.Facets qualified as Facets import Models.Apis.Fields.Types qualified as Fields import Models.Apis.Issues qualified as Issues @@ -80,7 +82,7 @@ import System.Logging qualified as Log import System.Tracing (SpanStatus (..), Tracing, addEvent, setStatus, withSpan) import System.Types (ATBackgroundCtx, DB, runBackground) import UnliftIO.Exception (bracket, catch, try) -import Utils (DBField) +import Utils (DBField, toXXHash) data BgJobs @@ -114,6 +116,14 @@ data BgJobs | GitSyncFromRepo Projects.ProjectId | GitSyncPushDashboard Projects.ProjectId UUID.UUID -- projectId, dashboardId | GitSyncPushAllDashboards Projects.ProjectId -- Push all existing dashboards to repo + | -- Error processing jobs + ErrorBaselineCalculation Projects.ProjectId -- Calculate baselines for all errors in a project + | ErrorSpikeDetection Projects.ProjectId -- Detect error spikes and create issues + | NewErrorDetected Projects.ProjectId Text -- projectId, error hash - creates issue for new error + | -- Log pattern processing jobs + LogPatternBaselineCalculation Projects.ProjectId -- Calculate baselines for log patterns + | LogPatternSpikeDetection Projects.ProjectId -- Detect log pattern volume spikes + | NewLogPatternDetected Projects.ProjectId Text -- projectId, pattern hash - notify about new pattern deriving stock (Generic, Show) deriving anyclass (AE.FromJSON, AE.ToJSON) @@ -353,6 +363,14 @@ processBackgroundJob authCtx bgJob = GitSyncPushDashboard pid dashboardId -> gitSyncPushDashboard pid (UUIDId dashboardId) GitSyncPushAllDashboards pid -> gitSyncPushAllDashboards pid QueryMonitorsCheck -> pass -- checkTriggeredQueryMonitors + -- Error processing jobs + ErrorBaselineCalculation pid -> calculateErrorBaselines pid + ErrorSpikeDetection pid -> detectErrorSpikes pid authCtx + NewErrorDetected pid errorHash -> processNewError pid errorHash authCtx + -- Log pattern processing jobs + LogPatternBaselineCalculation pid -> calculateLogPatternBaselines pid + LogPatternSpikeDetection pid -> detectLogPatternSpikes pid authCtx + NewLogPatternDetected pid patternHash -> processNewLogPattern pid patternHash authCtx -- | Run hourly scheduled tasks for all projects @@ -382,6 +400,16 @@ runHourlyJob scheduledTime hour = do let batchJob = BackgroundJobs.GenerateOtelFacetsBatch (V.fromList batch) scheduledTime createJob conn "background_jobs" batchJob + -- Schedule baseline calculation and spike detection for active projects + liftIO $ withResource ctx.jobsPool \conn -> + forM_ activeProjects \pid -> do + -- Error baseline and spike detection + createJob conn "background_jobs" $ ErrorBaselineCalculation pid + createJob conn "background_jobs" $ ErrorSpikeDetection pid + -- Log pattern baseline and spike detection + createJob conn "background_jobs" $ LogPatternBaselineCalculation pid + createJob conn "background_jobs" $ LogPatternSpikeDetection pid + -- Cleanup expired query cache entries deletedCount <- QueryCache.cleanupExpiredCache Relude.when (deletedCount > 0) $ Log.logInfo "Cleaned up expired query cache entries" ("deleted_count", AE.toJSON deletedCount) @@ -507,9 +535,19 @@ processPatterns kind fieldName events pid scheduledTime since = do forM_ newPatterns \(patternTxt, ids) -> do let q = [text|UPDATE otel_logs_and_spans SET $fieldName = ? WHERE project_id = ? AND timestamp > ? AND id::text = ANY(?)|] - unless (V.null ids) - $ void - $ PG.execute (Query $ encodeUtf8 q) (patternTxt, pid, since, V.filter (/= "") ids) + unless (V.null ids) $ do + -- Update otel_logs_and_spans with pattern + void $ PG.execute (Query $ encodeUtf8 q) (patternTxt, pid, since, V.filter (/= "") ids) + + -- Also store in apis.log_patterns table (only for log patterns, not summaries) + Relude.when (kind == "log" && not (T.null patternTxt)) $ do + let patternHash = toXXHash patternTxt + -- Get a sample message from the first non-empty id + sampleMsg = case V.find (/= "") (V.map snd events) of + Just msg -> Just (T.take 500 msg) + Nothing -> Nothing + -- Upsert the pattern (increments count if exists, inserts if new) + void $ LogPatterns.upsertLogPattern pid patternTxt patternHash Nothing Nothing sampleMsg -- | Process a batch of (id, content) pairs through Drain @@ -1623,3 +1661,177 @@ monitorStatus triggerLessThan warnThreshold alertThreshold alertRecovery warnRec where breached t = if triggerLessThan then t >= value else t <= value recovered r t = if triggerLessThan then value > fromMaybe t r else value < fromMaybe t r + + +calculateErrorBaselines :: Projects.ProjectId -> ATBackgroundCtx () +calculateErrorBaselines pid = do + Log.logInfo "Calculating error baselines" pid + errors <- Errors.getActiveErrors pid + + forM_ errors \err -> do + -- Get hourly stats from error_events over last 7 days (168 hours) + statsM <- Errors.getErrorEventStats err.id 168 + + case statsM of + Nothing -> pass + Just stats -> do + let newSamples = stats.totalHours + newMean = stats.hourlyMean + newStddev = stats.hourlyStddev + -- Establish baseline after 24 hours of data + newState = if newSamples >= 24 then "established" else "learning" + _ <- Errors.updateBaseline err.id newState newMean newStddev newSamples + pass + + Log.logInfo "Finished calculating error baselines" (pid, length errors) + + +-- | Detect error spikes and create issues +-- Uses error_events table for current rate calculation +detectErrorSpikes :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () +detectErrorSpikes pid authCtx = do + Log.logInfo "Detecting error spikes" pid + + -- Get all errors with their current hour counts in one query + errorsWithRates <- Errors.getErrorsWithCurrentRates pid + + forM_ errorsWithRates \errRate -> do + -- Only check errors with established baselines + case (errRate.baselineState, errRate.baselineMean, errRate.baselineStddev) of + ("established", Just mean, Just stddev) | stddev > 0 -> do + let currentRate = fromIntegral errRate.currentHourCount :: Double + zScore = (currentRate - mean) / stddev + isSpike = zScore > 3.0 && currentRate > mean + 5 + + Relude.when isSpike $ do + Log.logInfo "Error spike detected" (errRate.errorId, errRate.exceptionType, currentRate, mean, zScore) + + -- Get full error record for issue creation + errorM <- Errors.getErrorById errRate.errorId + whenJust errorM \err -> do + _ <- Errors.updateErrorState err.id Errors.ESEscalating + issue <- liftIO $ Issues.createErrorSpikeIssue pid err currentRate mean stddev + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for error spike" (pid, err.id, issue.id) + + _ -> pass -- Skip errors without established baseline + + Log.logInfo "Finished error spike detection" pid + + +-- | Process a new error and create an issue +processNewError :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () +processNewError pid errorHash authCtx = do + Log.logInfo "Processing new error" (pid, errorHash) + + -- Get the error by hash + errorM <- Errors.getErrorByHash pid errorHash "production" + + case errorM of + Nothing -> Log.logAttention "Error not found for new error processing" (pid, errorHash) + Just err -> do + -- Only create issue for truly new errors (state = 'new') + Relude.when (err.state == Errors.ESNew) $ do + -- Create a runtime exception issue + issue <- liftIO $ Issues.createNewErrorIssue pid err + Issues.insertIssue issue + + -- Queue LLM enhancement + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for new error" (pid, err.id, issue.id) + + +-- ============================================================================ +-- Log Pattern Processing Jobs +-- ============================================================================ + +-- | Calculate baselines for log patterns +-- Uses hourly counts from otel_logs_and_spans over the last 7 days +calculateLogPatternBaselines :: Projects.ProjectId -> ATBackgroundCtx () +calculateLogPatternBaselines pid = do + Log.logInfo "Calculating log pattern baselines" pid + + -- Get all non-ignored patterns + patterns <- LogPatterns.getLogPatterns pid Nothing 1000 0 + + forM_ patterns \lp -> do + -- Get hourly stats from otel_logs_and_spans over last 7 days (168 hours) + statsM <- LogPatterns.getPatternStats pid lp.pattern 168 + + case statsM of + Nothing -> pass + Just stats -> do + let newSamples = stats.totalHours + newMean = stats.hourlyMean + newStddev = stats.hourlyStddev + -- Establish baseline after 24 hours of data + newState = if newSamples >= 24 then "established" else "learning" + _ <- LogPatterns.updateBaseline pid lp.patternHash newState newMean newStddev newSamples + pass + + Log.logInfo "Finished calculating log pattern baselines" (pid, length patterns) + + +-- | Detect log pattern volume spikes and create issues +-- Uses otel_logs_and_spans table for current rate calculation +detectLogPatternSpikes :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () +detectLogPatternSpikes pid authCtx = do + Log.logInfo "Detecting log pattern spikes" pid + + -- Get all patterns with their current hour counts in one query + patternsWithRates <- LogPatterns.getPatternsWithCurrentRates pid + + forM_ patternsWithRates \lpRate -> do + -- Only check patterns with established baselines + case (lpRate.baselineState, lpRate.baselineMean, lpRate.baselineStddev) of + ("established", Just mean, Just stddev) | stddev > 0 -> do + let currentRate = fromIntegral lpRate.currentHourCount :: Double + zScore = (currentRate - mean) / stddev + -- Spike detection: >3 std devs AND at least 10 more events than baseline + isSpike = zScore > 3.0 && currentRate > mean + 10 + + Relude.when isSpike $ do + Log.logInfo "Log pattern spike detected" (lpRate.patternId, lpRate.pattern, currentRate, mean, zScore) + + -- Get full pattern record for issue creation + patternM <- LogPatterns.getLogPatternById lpRate.patternId + whenJust patternM \lp -> do + issue <- liftIO $ Issues.createLogPatternSpikeIssue pid lp currentRate mean stddev + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for log pattern spike" (pid, lp.id, issue.id) + + _ -> pass -- Skip patterns without established baseline + + Log.logInfo "Finished log pattern spike detection" pid + + +-- | Process a new log pattern and create an issue +processNewLogPattern :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () +processNewLogPattern pid patternHash authCtx = do + Log.logInfo "Processing new log pattern" (pid, patternHash) + + -- Get the pattern by hash + patternM <- LogPatterns.getLogPatternByHash pid patternHash + + case patternM of + Nothing -> Log.logAttention "Log pattern not found for new pattern processing" (pid, patternHash) + Just lp -> do + -- Only create issue for truly new patterns (state = 'new') + Relude.when (lp.state == LogPatterns.LPSNew) $ do + -- Create a new log pattern issue + issue <- liftIO $ Issues.createNewLogPatternIssue pid lp + Issues.insertIssue issue + + -- Queue LLM enhancement + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs new file mode 100644 index 000000000..3412a7f20 --- /dev/null +++ b/src/Models/Apis/Errors.hs @@ -0,0 +1,591 @@ +module Models.Apis.Errors ( + Error (..), + ErrorId, + ErrorState (..), + ErrorEvent (..), + ErrorEventId, + -- Queries + getErrors, + getErrorById, + getErrorByHash, + getActiveErrors, + upsertError, + insertErrorEvent, + updateOccurrenceCounts, + updateErrorState, + updateBaseline, + resolveError, + assignError, + -- Error Events (for baseline/spike detection) + HourlyBucket (..), + ErrorEventStats (..), + ErrorWithCurrentRate (..), + getHourlyErrorCounts, + getCurrentHourErrorCount, + getErrorEventStats, + checkErrorSpike, + getErrorsWithCurrentRates, +) +where + +import Data.Aeson qualified as AE +import Data.Time +import Data.UUID qualified as UUID +import Data.Vector qualified as V +import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) +import Database.PostgreSQL.Simple (FromRow, Only (..), ToRow) +import Database.PostgreSQL.Simple.FromField (FromField, ResultError (ConversionFailed, UnexpectedNull), fromField, returnError) +import Database.PostgreSQL.Simple.SqlQQ (sql) +import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField, toField) +import Deriving.Aeson qualified as DAE +import Effectful (Eff) +import Effectful.PostgreSQL qualified as PG +import Models.Projects.Projects qualified as Projects +import Models.Users.Users qualified as Users +import Relude hiding (id) +import System.Types (DB) + + +newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} + deriving stock (Generic, Show) + deriving newtype (AE.FromJSON, AE.ToJSON, Eq, FromField, NFData, Ord, ToField) + + +newtype ErrorEventId = ErrorEventId {unErrorEventId :: UUID.UUID} + deriving stock (Generic, Show) + deriving newtype (AE.FromJSON, AE.ToJSON, Eq, FromField, NFData, Ord, ToField) + + +data ErrorState + = ESNew + | ESEscalating + | ESOngoing + | ESResolved + | ESRegressed + deriving stock (Eq, Generic, Show) + deriving anyclass (NFData) + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ErrorState + + +errorStateToText :: ErrorState -> Text +errorStateToText ESNew = "new" +errorStateToText ESEscalating = "escalating" +errorStateToText ESOngoing = "ongoing" +errorStateToText ESResolved = "resolved" +errorStateToText ESRegressed = "regressed" + + +parseErrorState :: (Eq s, IsString s) => s -> Maybe ErrorState +parseErrorState "new" = Just ESNew +parseErrorState "escalating" = Just ESEscalating +parseErrorState "ongoing" = Just ESOngoing +parseErrorState "resolved" = Just ESResolved +parseErrorState "regressed" = Just ESRegressed +parseErrorState _ = Nothing + + +instance ToField ErrorState where + toField = Escape . encodeUtf8 <$> errorStateToText + + +instance FromField ErrorState where + fromField f mdata = + case mdata of + Nothing -> returnError UnexpectedNull f "" + Just bs -> + case parseErrorState bs of + Just s -> pure s + Nothing -> returnError ConversionFailed f $ "Conversion error: Expected error state, got " <> decodeUtf8 bs <> " instead." + + +data Error = Error + { id :: ErrorId + , projectId :: Projects.ProjectId + , createdAt :: ZonedTime + , updatedAt :: ZonedTime + , exceptionType :: Text + , message :: Text + , stacktrace :: Text + , hash :: Text + , environment :: Text + , service :: Maybe Text + , runtime :: Maybe Text + , errorData :: AE.Value + , representativeMessage :: Maybe Text + , firstEventId :: Maybe ErrorEventId + , lastEventId :: Maybe ErrorEventId + , state :: ErrorState + , assigneeId :: Maybe Users.UserId + , assignedAt :: Maybe ZonedTime + , resolvedAt :: Maybe ZonedTime + , regressedAt :: Maybe ZonedTime + , occurrences1m :: Int + , occurrences5m :: Int + , occurrences1h :: Int + , occurrences24h :: Int + , quietMinutes :: Int + , resolutionThresholdMinutes :: Int + , baselineState :: Text + , baselineSamples :: Int + , baselineErrorRateMean :: Maybe Double + , baselineErrorRateStddev :: Maybe Double + , baselineUpdatedAt :: Maybe ZonedTime + , isIgnored :: Bool + , ignoredUntil :: Maybe ZonedTime + } + deriving stock (Generic, Show) + deriving anyclass (FromRow, NFData, ToRow) + deriving + (Entity) + via (GenericEntity '[Schema "apis", TableName "errors", PrimaryKey "id", FieldModifiers '[CamelToSnake]] Error) + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] Error + + +data ErrorEvent = ErrorEvent + { id :: ErrorEventId + , projectId :: Projects.ProjectId + , errorId :: ErrorId + , occurredAt :: ZonedTime + , targetHash :: Text + , exceptionType :: Text + , message :: Text + , stackTrace :: Text + , serviceName :: Text + , release :: Maybe Text + , platformVersion :: Maybe Text + , requestMethod :: Maybe Text + , requestPath :: Maybe Text + , endpointHash :: Maybe Text + , traceId :: Maybe Text + , spanId :: Maybe Text + , parentSpanId :: Maybe Text + , userId :: Maybe Text + , userEmail :: Maybe Text + , userIp :: Maybe Text + , sessionId :: Maybe Text + , sampleRate :: Double + , ingestionId :: Maybe UUID.UUID + } + deriving stock (Generic, Show) + deriving anyclass (FromRow, NFData, ToRow) + deriving + (Entity) + via (GenericEntity '[Schema "apis", TableName "error_events", PrimaryKey "id", FieldModifiers '[CamelToSnake]] ErrorEvent) + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ErrorEvent + + +-- | Get errors for a project with optional state filter +getErrors :: DB es => Projects.ProjectId -> Maybe ErrorState -> Int -> Int -> Eff es [Error] +getErrors pid mstate limit offset = PG.query q (pid, maybe "%" errorStateToText mstate, limit, offset) + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, + exception_type, message, stacktrace, hash, + environment, service, runtime, error_data, + representative_message, first_event_id, last_event_id, + state, assignee_id, assigned_at, resolved_at, regressed_at, + occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, + quiet_minutes, resolution_threshold_minutes, + baseline_state, baseline_samples, + baseline_error_rate_mean, baseline_error_rate_stddev, baseline_updated_at, + is_ignored, ignored_until + FROM apis.errors + WHERE project_id = ? AND state LIKE ? + ORDER BY updated_at DESC + LIMIT ? OFFSET ? + |] + + +-- | Get error by ID +getErrorById :: DB es => ErrorId -> Eff es (Maybe Error) +getErrorById eid = do + results <- PG.query q (eid,) + return $ listToMaybe results + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, + exception_type, message, stacktrace, hash, + environment, service, runtime, error_data, + representative_message, first_event_id, last_event_id, + state, assignee_id, assigned_at, resolved_at, regressed_at, + occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, + quiet_minutes, resolution_threshold_minutes, + baseline_state, baseline_samples, + baseline_error_rate_mean, baseline_error_rate_stddev, baseline_updated_at, + is_ignored, ignored_until + FROM apis.errors + WHERE id = ? + |] + + +-- | Get error by hash +getErrorByHash :: DB es => Projects.ProjectId -> Text -> Text -> Eff es (Maybe Error) +getErrorByHash pid hash env = do + results <- PG.query q (pid, hash, env) + return $ listToMaybe results + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, + exception_type, message, stacktrace, hash, + environment, service, runtime, error_data, + representative_message, first_event_id, last_event_id, + state, assignee_id, assigned_at, resolved_at, regressed_at, + occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, + quiet_minutes, resolution_threshold_minutes, + baseline_state, baseline_samples, + baseline_error_rate_mean, baseline_error_rate_stddev, baseline_updated_at, + is_ignored, ignored_until + FROM apis.errors + WHERE project_id = ? AND hash = ? AND environment = ? + |] + + +-- | Get active (non-resolved) errors +getActiveErrors :: DB es => Projects.ProjectId -> Eff es [Error] +getActiveErrors pid = PG.query q (pid,) + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, + exception_type, message, stacktrace, hash, + environment, service, runtime, error_data, + representative_message, first_event_id, last_event_id, + state, assignee_id, assigned_at, resolved_at, regressed_at, + occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, + quiet_minutes, resolution_threshold_minutes, + baseline_state, baseline_samples, + baseline_error_rate_mean, baseline_error_rate_stddev, baseline_updated_at, + is_ignored, ignored_until + FROM apis.errors + WHERE project_id = ? AND state != 'resolved' + ORDER BY updated_at DESC + |] + + +-- | Upsert an error (insert or update on conflict) +upsertError :: + DB es => + Projects.ProjectId -> + Text -> -- exception_type + Text -> -- message + Text -> -- stacktrace + Text -> -- hash + Text -> -- environment + Maybe Text -> -- service + Maybe Text -> -- runtime + Eff es ErrorId +upsertError pid exType msg stack hash env service runtime = do + results <- PG.query q (pid, exType, msg, stack, hash, env, service, runtime, msg) + case results of + [(Only eid)] -> return eid + _ -> error "upsertError: unexpected result" + where + q = + [sql| + INSERT INTO apis.errors ( + project_id, exception_type, message, stacktrace, hash, + environment, service, runtime, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 1, 1, 1, 1) + ON CONFLICT (project_id, hash, environment) DO UPDATE SET + updated_at = NOW(), + representative_message = EXCLUDED.message, + occurrences_1m = apis.errors.occurrences_1m + 1, + occurrences_5m = apis.errors.occurrences_5m + 1, + occurrences_1h = apis.errors.occurrences_1h + 1, + occurrences_24h = apis.errors.occurrences_24h + 1, + state = CASE + WHEN apis.errors.state = 'resolved' THEN 'regressed' + WHEN apis.errors.state = 'new' AND apis.errors.occurrences_1h > 10 THEN 'escalating' + ELSE apis.errors.state + END, + regressed_at = CASE + WHEN apis.errors.state = 'resolved' THEN NOW() + ELSE apis.errors.regressed_at + END + RETURNING id + |] + + +-- | Insert an error event +insertErrorEvent :: + DB es => + Projects.ProjectId -> + ErrorId -> + Text -> -- target_hash + Text -> -- exception_type + Text -> -- message + Text -> -- stack_trace + Text -> -- service_name + Maybe Text -> -- trace_id + Maybe Text -> -- span_id + Maybe Text -> -- user_id + Maybe Text -> -- user_email + Eff es ErrorEventId +insertErrorEvent pid errorId hash exType msg stack serviceName traceId spanId userId userEmail = do + results <- PG.query q (pid, errorId, hash, exType, msg, stack, serviceName, traceId, spanId, userId, userEmail) + case results of + [(Only eid)] -> return eid + _ -> error "insertErrorEvent: unexpected result" + where + q = + [sql| + INSERT INTO apis.error_events ( + project_id, error_id, target_hash, exception_type, message, + stack_trace, service_name, trace_id, span_id, user_id, user_email + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + RETURNING id + |] + + +-- | Update occurrence counts (called periodically to decay counts) +updateOccurrenceCounts :: DB es => Eff es Int64 +updateOccurrenceCounts = + PG.execute_ q + where + q = + [sql| + UPDATE apis.errors SET + occurrences_1m = 0, + occurrences_5m = CASE WHEN occurrences_5m > 0 THEN occurrences_5m - occurrences_1m ELSE 0 END, + occurrences_1h = CASE WHEN occurrences_1h > 0 THEN occurrences_1h - occurrences_5m ELSE 0 END, + occurrences_24h = CASE WHEN occurrences_24h > 0 THEN occurrences_24h - occurrences_1h ELSE 0 END, + quiet_minutes = quiet_minutes + 1, + state = CASE + WHEN state IN ('new', 'escalating', 'ongoing') AND quiet_minutes >= resolution_threshold_minutes THEN 'resolved' + ELSE state + END, + resolved_at = CASE + WHEN state IN ('new', 'escalating', 'ongoing') AND quiet_minutes >= resolution_threshold_minutes THEN NOW() + ELSE resolved_at + END + WHERE state != 'resolved' OR occurrences_24h > 0 + |] + + +-- | Update error state +updateErrorState :: DB es => ErrorId -> ErrorState -> Eff es Int64 +updateErrorState eid newState = + PG.execute q (errorStateToText newState, eid) + where + q = + [sql| + UPDATE apis.errors + SET state = ?, updated_at = NOW() + WHERE id = ? + |] + + +-- | Resolve an error manually +resolveError :: DB es => ErrorId -> Eff es Int64 +resolveError eid = + PG.execute q (eid,) + where + q = + [sql| + UPDATE apis.errors + SET state = 'resolved', resolved_at = NOW(), updated_at = NOW() + WHERE id = ? + |] + + +-- | Assign an error to a user +assignError :: DB es => ErrorId -> Users.UserId -> Eff es Int64 +assignError eid uid = + PG.execute q (uid, eid) + where + q = + [sql| + UPDATE apis.errors + SET assignee_id = ?, assigned_at = NOW(), updated_at = NOW() + WHERE id = ? + |] + + +-- | Update baseline data for an error +updateBaseline :: + DB es => + ErrorId -> + Text -> -- baseline_state ('learning' or 'established') + Double -> -- error_rate_mean + Double -> -- error_rate_stddev + Int -> -- samples + Eff es Int64 +updateBaseline eid bState rateMean rateStddev samples = + PG.execute q (bState, rateMean, rateStddev, samples, eid) + where + q = + [sql| + UPDATE apis.errors + SET baseline_state = ?, + baseline_error_rate_mean = ?, + baseline_error_rate_stddev = ?, + baseline_samples = ?, + baseline_updated_at = NOW() + WHERE id = ? + |] + + +-- ============================================================================ +-- Error Events Queries (for baseline calculation and spike detection) +-- ============================================================================ + +-- | Hourly bucket for error event aggregation +data HourlyBucket = HourlyBucket + { hourStart :: UTCTime + , eventCount :: Int + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +-- | Get hourly error counts for a specific error over a time range +-- Returns counts bucketed by hour for baseline calculation +getHourlyErrorCounts :: + DB es => + ErrorId -> + Int -> -- hours to look back + Eff es [HourlyBucket] +getHourlyErrorCounts eid hoursBack = + PG.query q (eid, hoursBack) + where + q = + [sql| + SELECT + date_trunc('hour', occurred_at) AS hour_start, + COUNT(*)::INT AS event_count + FROM apis.error_events + WHERE error_id = ? + AND occurred_at >= NOW() - INTERVAL '1 hour' * ? + GROUP BY date_trunc('hour', occurred_at) + ORDER BY hour_start + |] + + +-- | Get current hour error count for a specific error +getCurrentHourErrorCount :: DB es => ErrorId -> Eff es Int +getCurrentHourErrorCount eid = do + results <- PG.query q (eid,) + case results of + [Only count] -> return count + _ -> return 0 + where + q = + [sql| + SELECT COUNT(*)::INT + FROM apis.error_events + WHERE error_id = ? + AND occurred_at >= date_trunc('hour', NOW()) + |] + + +-- | Get error event stats for baseline calculation +-- Returns mean and stddev of hourly counts over the lookback period +data ErrorEventStats = ErrorEventStats + { hourlyMean :: Double + , hourlyStddev :: Double + , totalHours :: Int + , totalEvents :: Int + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +getErrorEventStats :: + DB es => + ErrorId -> + Int -> -- hours to look back + Eff es (Maybe ErrorEventStats) +getErrorEventStats eid hoursBack = do + results <- PG.query q (eid, hoursBack) + return $ listToMaybe results + where + q = + [sql| + WITH hourly_counts AS ( + SELECT + date_trunc('hour', occurred_at) AS hour_start, + COUNT(*) AS event_count + FROM apis.error_events + WHERE error_id = ? + AND occurred_at >= NOW() - INTERVAL '1 hour' * ? + GROUP BY date_trunc('hour', occurred_at) + ) + SELECT + COALESCE(AVG(event_count), 0)::FLOAT AS hourly_mean, + COALESCE(STDDEV(event_count), 0)::FLOAT AS hourly_stddev, + COUNT(*)::INT AS total_hours, + COALESCE(SUM(event_count), 0)::INT AS total_events + FROM hourly_counts + |] + + +-- | Check if an error is spiking compared to its baseline +-- Returns (isSpike, currentRate, zScore) if baseline is established +checkErrorSpike :: + DB es => + Error -> + Eff es (Maybe (Bool, Double, Double)) +checkErrorSpike err = do + case (err.baselineState, err.baselineErrorRateMean, err.baselineErrorRateStddev) of + ("established", Just mean, Just stddev) | stddev > 0 -> do + currentCount <- getCurrentHourErrorCount err.id + let currentRate = fromIntegral currentCount :: Double + zScore = (currentRate - mean) / stddev + -- Spike: > 3 std devs AND at least 5 more than mean (avoid noise on low-volume errors) + isSpike = zScore > 3.0 && currentRate > mean + 5 + return $ Just (isSpike, currentRate, zScore) + _ -> return Nothing + + +-- | Get all errors with their current hour counts (for batch spike detection) +data ErrorWithCurrentRate = ErrorWithCurrentRate + { errorId :: ErrorId + , projectId :: Projects.ProjectId + , exceptionType :: Text + , message :: Text + , service :: Maybe Text + , baselineState :: Text + , baselineMean :: Maybe Double + , baselineStddev :: Maybe Double + , currentHourCount :: Int + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +getErrorsWithCurrentRates :: DB es => Projects.ProjectId -> Eff es [ErrorWithCurrentRate] +getErrorsWithCurrentRates pid = + PG.query q (pid,) + where + q = + [sql| + SELECT + e.id, + e.project_id, + e.exception_type, + e.message, + e.service, + e.baseline_state, + e.baseline_error_rate_mean, + e.baseline_error_rate_stddev, + COALESCE(counts.current_count, 0)::INT AS current_hour_count + FROM apis.errors e + LEFT JOIN ( + SELECT error_id, COUNT(*) AS current_count + FROM apis.error_events + WHERE occurred_at >= date_trunc('hour', NOW()) + GROUP BY error_id + ) counts ON counts.error_id = e.id + WHERE e.project_id = ? + AND e.state != 'resolved' + AND e.is_ignored = false + |] diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 9b534e4a2..1936339fa 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -39,6 +39,10 @@ module Models.Apis.Issues ( createAPIChangeIssue, createRuntimeExceptionIssue, createQueryAlertIssue, + createNewErrorIssue, + createErrorSpikeIssue, + createNewLogPatternIssue, + createLogPatternSpikeIssue, -- * Utilities issueIdText, @@ -81,6 +85,8 @@ import Effectful (Eff) import Effectful.PostgreSQL qualified as PG import Models.Apis.Anomalies (PayloadChange) import Models.Apis.Anomalies qualified as Anomalies +import Models.Apis.Errors qualified as Errors +import Models.Apis.LogPatterns qualified as LogPatterns import Models.Apis.RequestDumps qualified as RequestDumps import Models.Projects.Projects qualified as Projects import Models.Users.Users qualified as Users @@ -576,6 +582,219 @@ createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thr } +-- | Create issue for a new error +createNewErrorIssue :: Projects.ProjectId -> Errors.Error -> IO Issue +createNewErrorIssue projectId err = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let exceptionData = + RuntimeExceptionData + { errorType = err.exceptionType + , errorMessage = err.message + , stackTrace = err.stacktrace + , requestPath = Nothing + , requestMethod = Nothing + , occurrenceCount = 1 + , firstSeen = now + , lastSeen = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = RuntimeException + , endpointHash = err.hash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "New Error: " <> err.exceptionType <> " - " <> T.take 80 err.message + , service = fromMaybe "unknown-service" err.service + , critical = True + , severity = "critical" + , affectedRequests = 1 + , affectedClients = 0 + , errorRate = Nothing + , recommendedAction = "Investigate the new error and implement a fix." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON exceptionData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create issue for an error spike +createErrorSpikeIssue :: + Projects.ProjectId -> + Errors.Error -> + Double -> -- current rate + Double -> -- baseline mean + Double -> -- baseline stddev + IO Issue +createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let zScore = if baselineStddev > 0 then (currentRate - baselineMean) / baselineStddev else 0 + increasePercent = if baselineMean > 0 then ((currentRate / baselineMean) - 1) * 100 else 0 + exceptionData = + RuntimeExceptionData + { errorType = err.exceptionType + , errorMessage = err.message + , stackTrace = err.stacktrace + , requestPath = Nothing + , requestMethod = Nothing + , occurrenceCount = round currentRate + , firstSeen = now + , lastSeen = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = RuntimeException + , endpointHash = err.hash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Error Spike: " <> err.exceptionType <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)" + , service = fromMaybe "unknown-service" err.service + , critical = True + , severity = "critical" + , affectedRequests = round currentRate + , affectedClients = 0 + , errorRate = Just currentRate + , recommendedAction = "Error rate has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent deployments or changes." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON exceptionData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for a new log pattern +createNewLogPatternIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> IO Issue +createNewLogPatternIssue projectId lp = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + -- Use RuntimeExceptionData for log patterns (similar to errors) + let exceptionData = + RuntimeExceptionData + { errorType = "LogPattern" + , errorMessage = fromMaybe lp.pattern lp.sampleMessage + , stackTrace = "" + , requestPath = Nothing + , requestMethod = Nothing + , occurrenceCount = fromIntegral lp.occurrenceCount + , firstSeen = now + , lastSeen = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = RuntimeException + , endpointHash = lp.patternHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "New Log Pattern: " <> T.take 100 lp.pattern + , service = fromMaybe "unknown-service" lp.serviceName + , critical = False + , severity = case lp.logLevel of + Just "error" -> "high" + Just "warning" -> "medium" + _ -> "low" + , affectedRequests = fromIntegral lp.occurrenceCount + , affectedClients = 0 + , errorRate = Nothing + , recommendedAction = "A new log pattern has been detected. Review to ensure it's expected behavior. Pattern first seen at: " <> T.pack (show lp.firstSeenAt) + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON exceptionData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for a log pattern volume spike +createLogPatternSpikeIssue :: + Projects.ProjectId -> + LogPatterns.LogPattern -> + Double -> -- current rate (events/hour) + Double -> -- baseline mean + Double -> -- baseline stddev + IO Issue +createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let zScore = if baselineStddev > 0 then (currentRate - baselineMean) / baselineStddev else 0 + increasePercent = if baselineMean > 0 then ((currentRate / baselineMean) - 1) * 100 else 0 + exceptionData = + RuntimeExceptionData + { errorType = "LogPatternSpike" + , errorMessage = fromMaybe lp.pattern lp.sampleMessage + , stackTrace = "" + , requestPath = Nothing + , requestMethod = Nothing + , occurrenceCount = round currentRate + , firstSeen = now + , lastSeen = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = RuntimeException + , endpointHash = lp.patternHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Log Pattern Spike: " <> T.take 60 lp.pattern <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)" + , service = fromMaybe "unknown-service" lp.serviceName + , critical = case lp.logLevel of + Just "error" -> True + _ -> False + , severity = case lp.logLevel of + Just "error" -> "critical" + Just "warning" -> "high" + _ -> "medium" + , affectedRequests = round currentRate + , affectedClients = 0 + , errorRate = Just currentRate + , recommendedAction = "Log pattern volume has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent changes." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON exceptionData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + -- | Conversation type for AI chats data ConversationType = CTAnomaly | CTTrace | CTLogExplorer | CTDashboard | CTSlackThread | CTDiscordThread deriving stock (Eq, Generic, Read, Show) diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 9c4d14480..bf35b5a66 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -10,6 +10,14 @@ module Models.Apis.LogPatterns ( upsertLogPattern, updateLogPatternStats, updateBaseline, + -- Pattern stats from otel_logs_and_spans + PatternStats (..), + getPatternStats, + getCurrentHourPatternCount, + -- Pattern with current rate for spike detection + LogPatternWithRate (..), + getPatternsWithCurrentRates, + getLogPatternById, ) where @@ -249,3 +257,131 @@ updateBaseline pid patHash bState hourlyMean hourlyStddev samples = baseline_updated_at = NOW() WHERE project_id = ? AND pattern_hash = ? |] + + +-- ============================================================================ +-- Log Pattern Occurrence Queries (from otel_logs_and_spans) +-- ============================================================================ + +-- | Stats for a log pattern from otel_logs_and_spans +data PatternStats = PatternStats + { hourlyMean :: Double + , hourlyStddev :: Double + , totalHours :: Int + , totalEvents :: Int + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +-- | Get pattern stats from otel_logs_and_spans +getPatternStats :: + DB es => + Projects.ProjectId -> + Text -> -- pattern (log_pattern value) + Int -> -- hours to look back + Eff es (Maybe PatternStats) +getPatternStats pid pattern hoursBack = do + results <- PG.query q (pid, pattern, hoursBack) + return $ listToMaybe results + where + q = + [sql| + WITH hourly_counts AS ( + SELECT + date_trunc('hour', timestamp) AS hour_start, + COUNT(*) AS event_count + FROM otel_logs_and_spans + WHERE project_id = ?::text + AND log_pattern = ? + AND timestamp >= NOW() - INTERVAL '1 hour' * ? + GROUP BY date_trunc('hour', timestamp) + ) + SELECT + COALESCE(AVG(event_count), 0)::FLOAT AS hourly_mean, + COALESCE(STDDEV(event_count), 0)::FLOAT AS hourly_stddev, + COUNT(*)::INT AS total_hours, + COALESCE(SUM(event_count), 0)::INT AS total_events + FROM hourly_counts + |] + + +-- | Get current hour count for a pattern +getCurrentHourPatternCount :: DB es => Projects.ProjectId -> Text -> Eff es Int +getCurrentHourPatternCount pid pattern = do + results <- PG.query q (pid, pattern) + case results of + [Only count] -> return count + _ -> return 0 + where + q = + [sql| + SELECT COUNT(*)::INT + FROM otel_logs_and_spans + WHERE project_id = ?::text + AND log_pattern = ? + AND timestamp >= date_trunc('hour', NOW()) + |] + + +-- | Log pattern with current rate (for batch spike detection) +data LogPatternWithRate = LogPatternWithRate + { patternId :: LogPatternId + , projectId :: Projects.ProjectId + , pattern :: Text + , patternHash :: Text + , baselineState :: Text + , baselineMean :: Maybe Double + , baselineStddev :: Maybe Double + , currentHourCount :: Int + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +-- | Get all patterns with their current hour counts +getPatternsWithCurrentRates :: DB es => Projects.ProjectId -> Eff es [LogPatternWithRate] +getPatternsWithCurrentRates pid = + PG.query q (pid, pid) + where + q = + [sql| + SELECT + lp.id, + lp.project_id, + lp.pattern, + lp.pattern_hash, + lp.baseline_state, + lp.baseline_volume_hourly_mean, + lp.baseline_volume_hourly_stddev, + COALESCE(counts.current_count, 0)::INT AS current_hour_count + FROM apis.log_patterns lp + LEFT JOIN ( + SELECT log_pattern, COUNT(*) AS current_count + FROM otel_logs_and_spans + WHERE project_id = ?::text + AND timestamp >= date_trunc('hour', NOW()) + AND log_pattern IS NOT NULL + GROUP BY log_pattern + ) counts ON counts.log_pattern = lp.pattern + WHERE lp.project_id = ? + AND lp.state != 'ignored' + |] + + +-- | Get a pattern by ID +getLogPatternById :: DB es => LogPatternId -> Eff es (Maybe LogPattern) +getLogPatternById lpid = do + results <- PG.query q (lpid,) + return $ listToMaybe results + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + service_name, log_level, sample_message, first_seen_at, last_seen_at, + occurrence_count, state, acknowledged_by, acknowledged_at, + baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, + baseline_samples, baseline_updated_at + FROM apis.log_patterns + WHERE id = ? + |] diff --git a/static/migrations/0026_anomaly_detection_baselines.sql b/static/migrations/0026_anomaly_detection_baselines.sql index d71dba0fc..ff0992051 100644 --- a/static/migrations/0026_anomaly_detection_baselines.sql +++ b/static/migrations/0026_anomaly_detection_baselines.sql @@ -87,7 +87,6 @@ CREATE TABLE apis.errors ( baseline_state TEXT NOT NULL DEFAULT 'learning', baseline_samples INT NOT NULL DEFAULT 0, - baseline_updated_at TIMESTAMPTZ, baseline_error_rate_mean FLOAT, baseline_error_rate_stddev FLOAT, baseline_updated_at TIMESTAMPTZ, diff --git a/static/migrations/0027_log_pattern_monitoring.sql b/static/migrations/0027_log_pattern_monitoring.sql index ff49b0500..a18cc2ff2 100644 --- a/static/migrations/0027_log_pattern_monitoring.sql +++ b/static/migrations/0027_log_pattern_monitoring.sql @@ -41,48 +41,23 @@ CREATE INDEX IF NOT EXISTS idx_log_patterns_service ON apis.log_patterns(project CREATE OR REPLACE FUNCTION apis.new_log_pattern_proc() RETURNS trigger AS $$ -DECLARE - existing_job_id INT; - existing_pattern_hashes JSONB; BEGIN IF TG_WHEN <> 'AFTER' THEN RAISE EXCEPTION 'apis.new_log_pattern_proc() may only run as an AFTER trigger'; END IF; - -- Look for existing queued job to batch patterns together - SELECT id, payload->'patternHashes' - INTO existing_job_id, existing_pattern_hashes - FROM background_jobs - WHERE payload->>'tag' = 'NewLogPattern' - AND payload->>'projectId' = NEW.project_id::TEXT - AND status = 'queued' - ORDER BY run_at ASC - LIMIT 1; - - IF existing_job_id IS NOT NULL THEN - -- Append to existing job - UPDATE background_jobs SET payload = jsonb_build_object( - 'tag', 'NewLogPattern', - 'projectId', NEW.project_id, - 'createdAt', to_jsonb(NOW()), - 'patternHashes', existing_pattern_hashes || to_jsonb(NEW.pattern_hash), - 'serviceName', NEW.service_name - ) WHERE id = existing_job_id; - ELSE - -- Create new job - INSERT INTO background_jobs (run_at, status, payload) - VALUES ( - NOW(), - 'queued', - jsonb_build_object( - 'tag', 'NewLogPattern', - 'projectId', NEW.project_id, - 'createdAt', to_jsonb(NOW()), - 'patternHashes', jsonb_build_array(NEW.pattern_hash), - 'serviceName', NEW.service_name - ) - ); - END IF; + -- Create a job for the new pattern + -- JSON format matches Aeson's derived FromJSON for: + -- NewLogPatternDetected Projects.ProjectId Text + INSERT INTO background_jobs (run_at, status, payload) + VALUES ( + NOW(), + 'queued', + jsonb_build_object( + 'tag', 'NewLogPatternDetected', + 'contents', jsonb_build_array(NEW.project_id, NEW.pattern_hash) + ) + ); RETURN NULL; END; From 545a9ad54afdbe6e902cc430b81027f7483fce7d Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 7 Jan 2026 20:29:39 +0000 Subject: [PATCH 04/71] processErrors --- monoscope.cabal | 2 + src/BackgroundJobs.hs | 1 - src/Models/Apis/Errors.hs | 170 +++++------------- src/Models/Apis/RequestDumps.hs | 14 +- .../0026_anomaly_detection_baselines.sql | 24 +-- .../migrations/0028_rebuild_issues_table.sql | 65 +++++++ 6 files changed, 134 insertions(+), 142 deletions(-) create mode 100644 static/migrations/0028_rebuild_issues_table.sql diff --git a/monoscope.cabal b/monoscope.cabal index 6cdaca782..cefed77aa 100644 --- a/monoscope.cabal +++ b/monoscope.cabal @@ -80,6 +80,8 @@ library Models.Apis.Monitors Models.Apis.Reports Models.Apis.RequestDumps + Models.Apis.Errors + Models.Apis.LogPatterns Models.Apis.Shapes Models.Apis.Slack Models.Projects.Dashboards diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 449418fc8..981b8892f 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1671,7 +1671,6 @@ calculateErrorBaselines pid = do forM_ errors \err -> do -- Get hourly stats from error_events over last 7 days (168 hours) statsM <- Errors.getErrorEventStats err.id 168 - case statsM of Nothing -> pass Just stats -> do diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 3412a7f20..d0f93d651 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -97,7 +97,7 @@ instance FromField ErrorState where Just bs -> case parseErrorState bs of Just s -> pure s - Nothing -> returnError ConversionFailed f $ "Conversion error: Expected error state, got " <> decodeUtf8 bs <> " instead." + Nothing -> ESNew data Error = Error @@ -271,81 +271,6 @@ getActiveErrors pid = PG.query q (pid,) |] --- | Upsert an error (insert or update on conflict) -upsertError :: - DB es => - Projects.ProjectId -> - Text -> -- exception_type - Text -> -- message - Text -> -- stacktrace - Text -> -- hash - Text -> -- environment - Maybe Text -> -- service - Maybe Text -> -- runtime - Eff es ErrorId -upsertError pid exType msg stack hash env service runtime = do - results <- PG.query q (pid, exType, msg, stack, hash, env, service, runtime, msg) - case results of - [(Only eid)] -> return eid - _ -> error "upsertError: unexpected result" - where - q = - [sql| - INSERT INTO apis.errors ( - project_id, exception_type, message, stacktrace, hash, - environment, service, runtime, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 1, 1, 1, 1) - ON CONFLICT (project_id, hash, environment) DO UPDATE SET - updated_at = NOW(), - representative_message = EXCLUDED.message, - occurrences_1m = apis.errors.occurrences_1m + 1, - occurrences_5m = apis.errors.occurrences_5m + 1, - occurrences_1h = apis.errors.occurrences_1h + 1, - occurrences_24h = apis.errors.occurrences_24h + 1, - state = CASE - WHEN apis.errors.state = 'resolved' THEN 'regressed' - WHEN apis.errors.state = 'new' AND apis.errors.occurrences_1h > 10 THEN 'escalating' - ELSE apis.errors.state - END, - regressed_at = CASE - WHEN apis.errors.state = 'resolved' THEN NOW() - ELSE apis.errors.regressed_at - END - RETURNING id - |] - - --- | Insert an error event -insertErrorEvent :: - DB es => - Projects.ProjectId -> - ErrorId -> - Text -> -- target_hash - Text -> -- exception_type - Text -> -- message - Text -> -- stack_trace - Text -> -- service_name - Maybe Text -> -- trace_id - Maybe Text -> -- span_id - Maybe Text -> -- user_id - Maybe Text -> -- user_email - Eff es ErrorEventId -insertErrorEvent pid errorId hash exType msg stack serviceName traceId spanId userId userEmail = do - results <- PG.query q (pid, errorId, hash, exType, msg, stack, serviceName, traceId, spanId, userId, userEmail) - case results of - [(Only eid)] -> return eid - _ -> error "insertErrorEvent: unexpected result" - where - q = - [sql| - INSERT INTO apis.error_events ( - project_id, error_id, target_hash, exception_type, message, - stack_trace, service_name, trace_id, span_id, user_id, user_email - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - RETURNING id - |] - - -- | Update occurrence counts (called periodically to decay counts) updateOccurrenceCounts :: DB es => Eff es Int64 updateOccurrenceCounts = @@ -371,54 +296,25 @@ updateOccurrenceCounts = |] --- | Update error state updateErrorState :: DB es => ErrorId -> ErrorState -> Eff es Int64 -updateErrorState eid newState = - PG.execute q (errorStateToText newState, eid) +updateErrorState eid newState = PG.execute q (errorStateToText newState, eid) where q = - [sql| - UPDATE apis.errors - SET state = ?, updated_at = NOW() - WHERE id = ? - |] + [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE id = ? |] - --- | Resolve an error manually resolveError :: DB es => ErrorId -> Eff es Int64 -resolveError eid = - PG.execute q (eid,) +resolveError eid = PG.execute q (eid,) where - q = - [sql| - UPDATE apis.errors - SET state = 'resolved', resolved_at = NOW(), updated_at = NOW() - WHERE id = ? - |] - + q = [sql| UPDATE apis.errors SET state = 'resolved', resolved_at = NOW(), updated_at = NOW() WHERE id = ? |] --- | Assign an error to a user assignError :: DB es => ErrorId -> Users.UserId -> Eff es Int64 -assignError eid uid = - PG.execute q (uid, eid) +assignError eid uid = PG.execute q (uid, eid) where - q = - [sql| - UPDATE apis.errors - SET assignee_id = ?, assigned_at = NOW(), updated_at = NOW() - WHERE id = ? - |] + q = [sql| UPDATE apis.errors SET assignee_id = ?, assigned_at = NOW(), updated_at = NOW() WHERE id = ? |] -- | Update baseline data for an error -updateBaseline :: - DB es => - ErrorId -> - Text -> -- baseline_state ('learning' or 'established') - Double -> -- error_rate_mean - Double -> -- error_rate_stddev - Int -> -- samples - Eff es Int64 +updateBaseline :: DB es => ErrorId -> Text -> Double -> Double -> Int -> Eff es Int64 updateBaseline eid bState rateMean rateStddev samples = PG.execute q (bState, rateMean, rateStddev, samples, eid) where @@ -433,11 +329,6 @@ updateBaseline eid bState rateMean rateStddev samples = WHERE id = ? |] - --- ============================================================================ --- Error Events Queries (for baseline calculation and spike detection) --- ============================================================================ - -- | Hourly bucket for error event aggregation data HourlyBucket = HourlyBucket { hourStart :: UTCTime @@ -499,11 +390,7 @@ data ErrorEventStats = ErrorEventStats deriving anyclass (FromRow) -getErrorEventStats :: - DB es => - ErrorId -> - Int -> -- hours to look back - Eff es (Maybe ErrorEventStats) +getErrorEventStats :: DB es => ErrorId -> Int -> Eff es (Maybe ErrorEventStats) getErrorEventStats eid hoursBack = do results <- PG.query q (eid, hoursBack) return $ listToMaybe results @@ -589,3 +476,42 @@ getErrorsWithCurrentRates pid = AND e.state != 'resolved' AND e.is_ignored = false |] + +-- | Upsert an error (insert or update on conflict) +upsertErrorQueryAndParam :: DB es => Projects.ProjectId -> (Query, [DBField]) +upsertErrorQueryAndParam pid err exType msg stack hash env service runtime = (q, params) + where + q = + [sql| + INSERT INTO apis.errors ( + project_id, exception_type, message, stacktrace, hash, + environment, service, runtime, error_data, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 1, 1, 1, 1) + ON CONFLICT (hash) DO UPDATE SET + updated_at = NOW(), + representative_message = EXCLUDED.message, + occurrences_1m = apis.errors.occurrences_1m + 1, + occurrences_5m = apis.errors.occurrences_5m + 1, + occurrences_1h = apis.errors.occurrences_1h + 1, + occurrences_24h = apis.errors.occurrences_24h + 1, + state = CASE + WHEN apis.errors.state = 'resolved' THEN 'regressed' + WHEN apis.errors.state = 'new' AND apis.errors.occurrences_1h > 10 THEN 'escalating' + ELSE apis.errors.state + END, + regressed_at = CASE + WHEN apis.errors.state = 'resolved' THEN NOW() + ELSE apis.errors.regressed_at + END + |] + params = + [ MkDBField err.projectId + , MkDBField err.errorType + , MkDBField err.message + , MkDBField err.stackTrace + , MkDBField err.hash + , MkDBField err.environment + , MkDBField err.serviceName + , MkDBField err.runtime + , MkDBField err + ] diff --git a/src/Models/Apis/RequestDumps.hs b/src/Models/Apis/RequestDumps.hs index 2c8a61dcd..2fd06f781 100644 --- a/src/Models/Apis/RequestDumps.hs +++ b/src/Models/Apis/RequestDumps.hs @@ -178,6 +178,7 @@ removeQueryParams statusCode urlPath = (before, after) -> before -- Query parameters found, stripping them + data ATError = ATError { projectId :: Maybe Projects.ProjectId , when :: UTCTime @@ -190,10 +191,17 @@ data ATError = ATError , technology :: Maybe SDKTypes , requestMethod :: Maybe Text , requestPath :: Maybe Text - , spanId :: Maybe Text - , traceId :: Maybe Text , serviceName :: Maybe Text - , stack :: Maybe Text + , environment :: Maybe Text + , runtime :: Maybe Text, + , traceId :: Maybe Text, + , spanId :: Maybe Text + , parentSpanId :: Maybe Text + , endpointHash :: Maybe Text + , userId :: Maybe Text + , userEmail :: Maybe Text + , userIp :: Maybe Text + , sessionId :: Maybe Text } deriving stock (Generic, Show) deriving anyclass (Default, NFData) diff --git a/static/migrations/0026_anomaly_detection_baselines.sql b/static/migrations/0026_anomaly_detection_baselines.sql index ff0992051..336139411 100644 --- a/static/migrations/0026_anomaly_detection_baselines.sql +++ b/static/migrations/0026_anomaly_detection_baselines.sql @@ -53,6 +53,8 @@ DROP TRIGGER IF EXISTS error_created_anomaly ON apis.errors; DROP TABLE IF EXISTS apis.errors; + + CREATE TABLE apis.errors ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, @@ -68,7 +70,6 @@ CREATE TABLE apis.errors ( service TEXT, runtime TEXT, error_data JSONB NOT NULL DEFAULT '{}', - representative_message TEXT, first_event_id UUID, last_event_id UUID, state TEXT NOT NULL DEFAULT 'new', @@ -113,7 +114,6 @@ CREATE TRIGGER error_created_anomaly AFTER INSERT ON apis.errors FOR EACH ROW EX CREATE TABLE apis.error_events ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, - error_id UUID NOT NULL REFERENCES apis.errors(id) ON DELETE CASCADE, occurred_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), target_hash TEXT NOT NULL REFERENCES apis.errors(hash) ON DELETE CASCADE, exception_type TEXT NOT NULL, @@ -121,32 +121,24 @@ CREATE TABLE apis.error_events ( stack_trace TEXT NOT NULL, service_name TEXT NOT NULL, - release TEXT, - platform_version TEXT, - - request_method TEXT, - request_path TEXT, - endpoint_hash TEXT, - + release TEXT, + environment TEXT, + request_method TEXT, + request_path TEXT, + endpoint_hash TEXT, trace_id TEXT, span_id TEXT, parent_span_id TEXT, - user_id TEXT, user_email TEXT, user_ip INET, session_id TEXT, - sample_rate FLOAT NOT NULL DEFAULT 1.0, - ingestion_id UUID ); -- Indexes for efficient queries -CREATE INDEX idx_error_events_error ON apis.error_events (error_id, occurred_at DESC); CREATE INDEX idx_error_events_project ON apis.error_events (project_id, occurred_at DESC); -CREATE INDEX idx_error_events_trace ON apis.error_events (trace_id); +CREATE INDEX idx_error_event_error_hash ON apis.error_events (project_id, target_hash); CREATE INDEX idx_error_events_service ON apis.error_events (service_name); -CREATE INDEX idx_error_events_tags USING GIN (tags); - COMMIT; diff --git a/static/migrations/0028_rebuild_issues_table.sql b/static/migrations/0028_rebuild_issues_table.sql new file mode 100644 index 000000000..799c3542f --- /dev/null +++ b/static/migrations/0028_rebuild_issues_table.sql @@ -0,0 +1,65 @@ +BEGIN; + + +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'new_error'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'new_log_pattern'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'api-change'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'error_escalating'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'error_regressed'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'log_pattern_rate_change'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'endpoint_latency_degradation'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'endpoint_error_rate_spike'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'endpoint_volume_rate_change'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'alert-triggered'; + +DROP TABLE IF EXISTS apis.issues CASCADE; + +CREATE TABLE apis.issues ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + issue_type apis.issue_type NOT NULL, + source_type TEXT NOT NULL, -- 'error', 'log_pattern', 'endpoint' + target_hash TEXT NOT NULL, -- links to error.hash, log_pattern.pattern_hash, endpoint.hash + + title TEXT NOT NULL DEFAULT '', + service TEXT, + environment TEXT, + severity TEXT NOT NULL DEFAULT 'warning', -- 'critical', 'warning', 'info' + critical BOOLEAN NOT NULL DEFAULT FALSE, + + -- Lifecycle + first_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + occurrence_count INT NOT NULL DEFAULT 1, + acknowledged_at TIMESTAMPTZ, + acknowledged_by UUID REFERENCES users.users(id) ON DELETE SET NULL, + resolved_at TIMESTAMPTZ, + archived_at TIMESTAMPTZ, + + -- Flexible category-specific data + issue_data JSONB NOT NULL DEFAULT '{}' +); + +SELECT manage_updated_at('apis.issues'); + +-- Indexes +CREATE INDEX idx_issues_project_id ON apis.issues(project_id); +CREATE INDEX idx_issues_project_created ON apis.issues(project_id, created_at DESC); +CREATE INDEX idx_issues_issue_type ON apis.issues(project_id, issue_type); +CREATE INDEX idx_issues_source ON apis.issues(project_id, source_type, target_hash); +CREATE INDEX idx_issues_severity ON apis.issues(project_id, severity); +CREATE INDEX idx_issues_critical ON apis.issues(project_id, critical) WHERE critical = TRUE; +CREATE INDEX idx_issues_open ON apis.issues(project_id, acknowledged_at, archived_at) + WHERE acknowledged_at IS NULL AND archived_at IS NULL; +CREATE INDEX idx_issues_unresolved ON apis.issues(project_id, resolved_at) + WHERE resolved_at IS NULL AND archived_at IS NULL; + +-- Dedupe: one open issue per source per issue type +CREATE UNIQUE INDEX idx_issues_unique_open + ON apis.issues(project_id, issue_type, target_hash) + WHERE resolved_at IS NULL AND archived_at IS NULL; + +COMMIT; From 1a1bf26e37e6f118a34759a84b4ec65ee9ad301d Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 7 Jan 2026 22:38:46 +0000 Subject: [PATCH 05/71] make compile --- package.yaml | 1 - src/BackgroundJobs.hs | 4 +-- src/Models/Apis/Errors.hs | 21 ++++++------ src/Models/Apis/Issues.hs | 8 ++--- src/Models/Apis/LogPatterns.hs | 53 ++++++------------------------- src/Models/Apis/RequestDumps.hs | 4 +-- src/Models/Telemetry/Telemetry.hs | 2 +- src/Pages/Anomalies.hs | 2 +- src/Pkg/Mail.hs | 2 +- 9 files changed, 32 insertions(+), 65 deletions(-) diff --git a/package.yaml b/package.yaml index cb44df54d..61d686ab5 100644 --- a/package.yaml +++ b/package.yaml @@ -13,7 +13,6 @@ custom-setup: dependencies: - base - Cabal - - proto-lens-setup build-tools: - proto-lens-protoc:proto-lens-protoc diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 981b8892f..78190b3c5 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1760,7 +1760,7 @@ calculateLogPatternBaselines pid = do forM_ patterns \lp -> do -- Get hourly stats from otel_logs_and_spans over last 7 days (168 hours) - statsM <- LogPatterns.getPatternStats pid lp.pattern 168 + statsM <- LogPatterns.getPatternStats pid lp.logPattern 168 case statsM of Nothing -> pass @@ -1795,7 +1795,7 @@ detectLogPatternSpikes pid authCtx = do isSpike = zScore > 3.0 && currentRate > mean + 10 Relude.when isSpike $ do - Log.logInfo "Log pattern spike detected" (lpRate.patternId, lpRate.pattern, currentRate, mean, zScore) + Log.logInfo "Log pattern spike detected" (lpRate.patternId, lpRate.logPattern, currentRate, mean, zScore) -- Get full pattern record for issue creation patternM <- LogPatterns.getLogPatternById lpRate.patternId diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index d0f93d651..3190c2467 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -9,8 +9,6 @@ module Models.Apis.Errors ( getErrorById, getErrorByHash, getActiveErrors, - upsertError, - insertErrorEvent, updateOccurrenceCounts, updateErrorState, updateBaseline, @@ -37,6 +35,7 @@ import Database.PostgreSQL.Simple (FromRow, Only (..), ToRow) import Database.PostgreSQL.Simple.FromField (FromField, ResultError (ConversionFailed, UnexpectedNull), fromField, returnError) import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField, toField) +import Database.PostgreSQL.Simple.Types (Query (Query)) import Deriving.Aeson qualified as DAE import Effectful (Eff) import Effectful.PostgreSQL qualified as PG @@ -44,6 +43,8 @@ import Models.Projects.Projects qualified as Projects import Models.Users.Users qualified as Users import Relude hiding (id) import System.Types (DB) +import Utils (DBField (MkDBField)) +import Models.Apis.RequestDumps qualified as RequestDump newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -97,7 +98,7 @@ instance FromField ErrorState where Just bs -> case parseErrorState bs of Just s -> pure s - Nothing -> ESNew + Nothing -> pure ESNew data Error = Error @@ -206,7 +207,7 @@ getErrors pid mstate limit offset = PG.query q (pid, maybe "%" errorStateToText -- | Get error by ID getErrorById :: DB es => ErrorId -> Eff es (Maybe Error) getErrorById eid = do - results <- PG.query q (eid,) + results <- PG.query q (Only eid) return $ listToMaybe results where q = @@ -251,7 +252,7 @@ getErrorByHash pid hash env = do -- | Get active (non-resolved) errors getActiveErrors :: DB es => Projects.ProjectId -> Eff es [Error] -getActiveErrors pid = PG.query q (pid,) +getActiveErrors pid = PG.query q (Only pid) where q = [sql| @@ -303,7 +304,7 @@ updateErrorState eid newState = PG.execute q (errorStateToText newState, eid) [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE id = ? |] resolveError :: DB es => ErrorId -> Eff es Int64 -resolveError eid = PG.execute q (eid,) +resolveError eid = PG.execute q (Only eid) where q = [sql| UPDATE apis.errors SET state = 'resolved', resolved_at = NOW(), updated_at = NOW() WHERE id = ? |] @@ -364,7 +365,7 @@ getHourlyErrorCounts eid hoursBack = -- | Get current hour error count for a specific error getCurrentHourErrorCount :: DB es => ErrorId -> Eff es Int getCurrentHourErrorCount eid = do - results <- PG.query q (eid,) + results <- PG.query q (Only eid) case results of [Only count] -> return count _ -> return 0 @@ -451,7 +452,7 @@ data ErrorWithCurrentRate = ErrorWithCurrentRate getErrorsWithCurrentRates :: DB es => Projects.ProjectId -> Eff es [ErrorWithCurrentRate] getErrorsWithCurrentRates pid = - PG.query q (pid,) + PG.query q (pid) where q = [sql| @@ -478,8 +479,8 @@ getErrorsWithCurrentRates pid = |] -- | Upsert an error (insert or update on conflict) -upsertErrorQueryAndParam :: DB es => Projects.ProjectId -> (Query, [DBField]) -upsertErrorQueryAndParam pid err exType msg stack hash env service runtime = (q, params) +upsertErrorQueryAndParam :: DB es => Projects.ProjectId -> RequestDump.ATError -> (Query, [DBField]) +upsertErrorQueryAndParam pid err = (q, params) where q = [sql| diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 1936339fa..a38991736 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -695,7 +695,7 @@ createNewLogPatternIssue projectId lp = do let exceptionData = RuntimeExceptionData { errorType = "LogPattern" - , errorMessage = fromMaybe lp.pattern lp.sampleMessage + , errorMessage = fromMaybe lp.logPattern lp.sampleMessage , stackTrace = "" , requestPath = Nothing , requestMethod = Nothing @@ -715,7 +715,7 @@ createNewLogPatternIssue projectId lp = do , acknowledgedAt = Nothing , acknowledgedBy = Nothing , archivedAt = Nothing - , title = "New Log Pattern: " <> T.take 100 lp.pattern + , title = "New Log Pattern: " <> T.take 100 lp.logPattern , service = fromMaybe "unknown-service" lp.serviceName , critical = False , severity = case lp.logLevel of @@ -753,7 +753,7 @@ createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev exceptionData = RuntimeExceptionData { errorType = "LogPatternSpike" - , errorMessage = fromMaybe lp.pattern lp.sampleMessage + , errorMessage = fromMaybe lp.logPattern lp.sampleMessage , stackTrace = "" , requestPath = Nothing , requestMethod = Nothing @@ -773,7 +773,7 @@ createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev , acknowledgedAt = Nothing , acknowledgedBy = Nothing , archivedAt = Nothing - , title = "Log Pattern Spike: " <> T.take 60 lp.pattern <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)" + , title = "Log Pattern Spike: " <> T.take 60 lp.logPattern <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)" , service = fromMaybe "unknown-service" lp.serviceName , critical = case lp.logLevel of Just "error" -> True diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index bf35b5a66..8a86e18e2 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -4,9 +4,7 @@ module Models.Apis.LogPatterns ( LogPatternState (..), getLogPatterns, getLogPatternByHash, - getNewLogPatterns, acknowledgeLogPatterns, - ignoreLogPatterns, upsertLogPattern, updateLogPatternStats, updateBaseline, @@ -27,7 +25,7 @@ import Data.Time import Data.UUID qualified as UUID import Data.Vector qualified as V import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) -import Database.PostgreSQL.Simple (FromRow, ToRow) +import Database.PostgreSQL.Simple (FromRow, Only (Only),ToRow) import Database.PostgreSQL.Simple.FromField (FromField, ResultError (ConversionFailed, UnexpectedNull), fromField, returnError) import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField, toField) @@ -88,7 +86,7 @@ data LogPattern = LogPattern , projectId :: Projects.ProjectId , createdAt :: ZonedTime , updatedAt :: ZonedTime - , pattern :: Text + , logPattern :: Text , patternHash :: Text , serviceName :: Maybe Text , logLevel :: Maybe Text @@ -151,22 +149,6 @@ getLogPatternByHash pid hash = do |] --- | Get new (unacknowledged) log patterns for a project -getNewLogPatterns :: DB es => Projects.ProjectId -> Eff es [LogPattern] -getNewLogPatterns pid = PG.query q (pid,) - where - q = - [sql| - SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, - service_name, log_level, sample_message, first_seen_at, last_seen_at, - occurrence_count, state, acknowledged_by, acknowledged_at, - baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, - baseline_samples, baseline_updated_at - FROM apis.log_patterns - WHERE project_id = ? AND state = 'new' - ORDER BY first_seen_at DESC - |] - -- | Acknowledge log patterns acknowledgeLogPatterns :: DB es => Users.UserId -> V.Vector Text -> Eff es Int64 @@ -181,21 +163,6 @@ acknowledgeLogPatterns uid patternHashes WHERE pattern_hash = ANY(?) |] - --- | Ignore log patterns (won't alert on them) -ignoreLogPatterns :: DB es => V.Vector Text -> Eff es Int64 -ignoreLogPatterns patternHashes - | V.null patternHashes = pure 0 - | otherwise = PG.execute q (patternHashes,) - where - q = - [sql| - UPDATE apis.log_patterns - SET state = 'ignored' - WHERE pattern_hash = ANY(?) - |] - - -- | Upsert a log pattern (insert or update occurrence count) upsertLogPattern :: DB es => @@ -281,8 +248,8 @@ getPatternStats :: Text -> -- pattern (log_pattern value) Int -> -- hours to look back Eff es (Maybe PatternStats) -getPatternStats pid pattern hoursBack = do - results <- PG.query q (pid, pattern, hoursBack) +getPatternStats pid pattern' hoursBack = do + results <- PG.query q (pid, pattern', hoursBack) return $ listToMaybe results where q = @@ -308,8 +275,8 @@ getPatternStats pid pattern hoursBack = do -- | Get current hour count for a pattern getCurrentHourPatternCount :: DB es => Projects.ProjectId -> Text -> Eff es Int -getCurrentHourPatternCount pid pattern = do - results <- PG.query q (pid, pattern) +getCurrentHourPatternCount pid pattern' = do + results <- PG.query q (pid, pattern') case results of [Only count] -> return count _ -> return 0 @@ -328,7 +295,7 @@ getCurrentHourPatternCount pid pattern = do data LogPatternWithRate = LogPatternWithRate { patternId :: LogPatternId , projectId :: Projects.ProjectId - , pattern :: Text + , logPattern :: Text , patternHash :: Text , baselineState :: Text , baselineMean :: Maybe Double @@ -349,7 +316,7 @@ getPatternsWithCurrentRates pid = SELECT lp.id, lp.project_id, - lp.pattern, + lp.log_pattern, lp.pattern_hash, lp.baseline_state, lp.baseline_volume_hourly_mean, @@ -363,7 +330,7 @@ getPatternsWithCurrentRates pid = AND timestamp >= date_trunc('hour', NOW()) AND log_pattern IS NOT NULL GROUP BY log_pattern - ) counts ON counts.log_pattern = lp.pattern + ) counts ON counts.log_pattern = lp.log_pattern WHERE lp.project_id = ? AND lp.state != 'ignored' |] @@ -372,7 +339,7 @@ getPatternsWithCurrentRates pid = -- | Get a pattern by ID getLogPatternById :: DB es => LogPatternId -> Eff es (Maybe LogPattern) getLogPatternById lpid = do - results <- PG.query q (lpid,) + results <- PG.query q (Only lpid) return $ listToMaybe results where q = diff --git a/src/Models/Apis/RequestDumps.hs b/src/Models/Apis/RequestDumps.hs index 2fd06f781..fb641cf1b 100644 --- a/src/Models/Apis/RequestDumps.hs +++ b/src/Models/Apis/RequestDumps.hs @@ -193,8 +193,8 @@ data ATError = ATError , requestPath :: Maybe Text , serviceName :: Maybe Text , environment :: Maybe Text - , runtime :: Maybe Text, - , traceId :: Maybe Text, + , runtime :: Maybe Text + , traceId :: Maybe Text , spanId :: Maybe Text , parentSpanId :: Maybe Text , endpointHash :: Maybe Text diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index 6073df0dd..71d33bd4f 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -1087,7 +1087,7 @@ extractATError spanObj (AE.Object o) = do , requestPath = urlPath , spanId = spanId , traceId = trId - , stack = tech + , runtime = tech } extractATError _ _ = Nothing diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index ddcaac3e1..95680fcb7 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -345,7 +345,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do faSprite_ "code" "regular" "w-4 h-4" div_ [] do span_ [class_ "text-sm text-textWeak"] "Stack:" - span_ [class_ "ml-2 text-sm"] $ toHtml $ fromMaybe "Unknown stack" err.errorData.stack + span_ [class_ "ml-2 text-sm"] $ toHtml $ fromMaybe "Unknown stack" err.errorData.runtime div_ [class_ "flex items-center gap-2"] do faSprite_ "server" "regular" "w-3 h-3" diff --git a/src/Pkg/Mail.hs b/src/Pkg/Mail.hs index d5cadefd7..b13187854 100644 --- a/src/Pkg/Mail.hs +++ b/src/Pkg/Mail.hs @@ -209,7 +209,7 @@ slackErrorAlert err project channelId projectUrl = , AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("```" <> err.message <> "\n```")]] , AE.object [ "type" AE..= "context" - , "elements" AE..= AE.Array (V.fromList $ AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*Stack:* `" <> fromMaybe "" err.stack <> "`")] : [AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*Endpoint:* " <> enp)]]) + , "elements" AE..= AE.Array (V.fromList $ AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*Stack:* `" <> fromMaybe "" err.runtime <> "`")] : [AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*Endpoint:* " <> enp)]]) ] , AE.object [ "type" AE..= "context" From cb527f886c67036caadecff099979823a4487f08 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 7 Jan 2026 22:39:17 +0000 Subject: [PATCH 06/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 6 +--- src/Models/Apis/Errors.hs | 34 +++++++++++---------- src/Models/Apis/Issues.hs | 28 +++++++++--------- src/Models/Apis/LogPatterns.hs | 52 ++++++++++++++++----------------- src/Models/Apis/RequestDumps.hs | 1 - src/Pages/Anomalies.hs | 4 +-- 6 files changed, 62 insertions(+), 63 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 78190b3c5..158a35b0f 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -39,11 +39,11 @@ import Log qualified as LogLegacy import Models.Apis.Anomalies qualified as Anomalies import Models.Apis.Endpoints qualified as Endpoints import Models.Apis.Errors qualified as Errors -import Models.Apis.LogPatterns qualified as LogPatterns import Models.Apis.Fields.Facets qualified as Facets import Models.Apis.Fields.Types qualified as Fields import Models.Apis.Issues qualified as Issues import Models.Apis.Issues.Enhancement qualified as Enhancement +import Models.Apis.LogPatterns qualified as LogPatterns import Models.Apis.Monitors qualified as Monitors import Models.Apis.Reports qualified as Reports import Models.Apis.RequestDumps (ATError (..)) @@ -1715,9 +1715,7 @@ detectErrorSpikes pid authCtx = do void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) Log.logInfo "Created issue for error spike" (pid, err.id, issue.id) - _ -> pass -- Skip errors without established baseline - Log.logInfo "Finished error spike detection" pid @@ -1806,9 +1804,7 @@ detectLogPatternSpikes pid authCtx = do void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) Log.logInfo "Created issue for log pattern spike" (pid, lp.id, issue.id) - _ -> pass -- Skip patterns without established baseline - Log.logInfo "Finished log pattern spike detection" pid diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 3190c2467..c0a95aea3 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -39,12 +39,12 @@ import Database.PostgreSQL.Simple.Types (Query (Query)) import Deriving.Aeson qualified as DAE import Effectful (Eff) import Effectful.PostgreSQL qualified as PG +import Models.Apis.RequestDumps qualified as RequestDump import Models.Projects.Projects qualified as Projects import Models.Users.Users qualified as Users import Relude hiding (id) import System.Types (DB) import Utils (DBField (MkDBField)) -import Models.Apis.RequestDumps qualified as RequestDump newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -252,7 +252,7 @@ getErrorByHash pid hash env = do -- | Get active (non-resolved) errors getActiveErrors :: DB es => Projects.ProjectId -> Eff es [Error] -getActiveErrors pid = PG.query q (Only pid) +getActiveErrors pid = PG.query q (Only pid) where q = [sql| @@ -303,19 +303,21 @@ updateErrorState eid newState = PG.execute q (errorStateToText newState, eid) q = [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE id = ? |] + resolveError :: DB es => ErrorId -> Eff es Int64 -resolveError eid = PG.execute q (Only eid) +resolveError eid = PG.execute q (Only eid) where q = [sql| UPDATE apis.errors SET state = 'resolved', resolved_at = NOW(), updated_at = NOW() WHERE id = ? |] + assignError :: DB es => ErrorId -> Users.UserId -> Eff es Int64 assignError eid uid = PG.execute q (uid, eid) where - q = [sql| UPDATE apis.errors SET assignee_id = ?, assigned_at = NOW(), updated_at = NOW() WHERE id = ? |] + q = [sql| UPDATE apis.errors SET assignee_id = ?, assigned_at = NOW(), updated_at = NOW() WHERE id = ? |] -- | Update baseline data for an error -updateBaseline :: DB es => ErrorId -> Text -> Double -> Double -> Int -> Eff es Int64 +updateBaseline :: DB es => ErrorId -> Text -> Double -> Double -> Int -> Eff es Int64 updateBaseline eid bState rateMean rateStddev samples = PG.execute q (bState, rateMean, rateStddev, samples, eid) where @@ -330,6 +332,7 @@ updateBaseline eid bState rateMean rateStddev samples = WHERE id = ? |] + -- | Hourly bucket for error event aggregation data HourlyBucket = HourlyBucket { hourStart :: UTCTime @@ -341,11 +344,11 @@ data HourlyBucket = HourlyBucket -- | Get hourly error counts for a specific error over a time range -- Returns counts bucketed by hour for baseline calculation -getHourlyErrorCounts :: - DB es => - ErrorId -> - Int -> -- hours to look back - Eff es [HourlyBucket] +getHourlyErrorCounts + :: DB es + => ErrorId + -> Int -- hours to look back + -> Eff es [HourlyBucket] getHourlyErrorCounts eid hoursBack = PG.query q (eid, hoursBack) where @@ -391,7 +394,7 @@ data ErrorEventStats = ErrorEventStats deriving anyclass (FromRow) -getErrorEventStats :: DB es => ErrorId -> Int -> Eff es (Maybe ErrorEventStats) +getErrorEventStats :: DB es => ErrorId -> Int -> Eff es (Maybe ErrorEventStats) getErrorEventStats eid hoursBack = do results <- PG.query q (eid, hoursBack) return $ listToMaybe results @@ -418,10 +421,10 @@ getErrorEventStats eid hoursBack = do -- | Check if an error is spiking compared to its baseline -- Returns (isSpike, currentRate, zScore) if baseline is established -checkErrorSpike :: - DB es => - Error -> - Eff es (Maybe (Bool, Double, Double)) +checkErrorSpike + :: DB es + => Error + -> Eff es (Maybe (Bool, Double, Double)) checkErrorSpike err = do case (err.baselineState, err.baselineErrorRateMean, err.baselineErrorRateStddev) of ("established", Just mean, Just stddev) | stddev > 0 -> do @@ -478,6 +481,7 @@ getErrorsWithCurrentRates pid = AND e.is_ignored = false |] + -- | Upsert an error (insert or update on conflict) upsertErrorQueryAndParam :: DB es => Projects.ProjectId -> RequestDump.ATError -> (Query, [DBField]) upsertErrorQueryAndParam pid err = (q, params) diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index a38991736..29382853e 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -630,13 +630,13 @@ createNewErrorIssue projectId err = do -- | Create issue for an error spike -createErrorSpikeIssue :: - Projects.ProjectId -> - Errors.Error -> - Double -> -- current rate - Double -> -- baseline mean - Double -> -- baseline stddev - IO Issue +createErrorSpikeIssue + :: Projects.ProjectId + -> Errors.Error + -> Double -- current rate + -> Double -- baseline mean + -> Double -- baseline stddev + -> IO Issue createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -736,13 +736,13 @@ createNewLogPatternIssue projectId lp = do -- | Create an issue for a log pattern volume spike -createLogPatternSpikeIssue :: - Projects.ProjectId -> - LogPatterns.LogPattern -> - Double -> -- current rate (events/hour) - Double -> -- baseline mean - Double -> -- baseline stddev - IO Issue +createLogPatternSpikeIssue + :: Projects.ProjectId + -> LogPatterns.LogPattern + -> Double -- current rate (events/hour) + -> Double -- baseline mean + -> Double -- baseline stddev + -> IO Issue createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 8a86e18e2..fca7c35c2 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -25,7 +25,7 @@ import Data.Time import Data.UUID qualified as UUID import Data.Vector qualified as V import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) -import Database.PostgreSQL.Simple (FromRow, Only (Only),ToRow) +import Database.PostgreSQL.Simple (FromRow, Only (Only), ToRow) import Database.PostgreSQL.Simple.FromField (FromField, ResultError (ConversionFailed, UnexpectedNull), fromField, returnError) import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField, toField) @@ -149,7 +149,6 @@ getLogPatternByHash pid hash = do |] - -- | Acknowledge log patterns acknowledgeLogPatterns :: DB es => Users.UserId -> V.Vector Text -> Eff es Int64 acknowledgeLogPatterns uid patternHashes @@ -163,16 +162,17 @@ acknowledgeLogPatterns uid patternHashes WHERE pattern_hash = ANY(?) |] + -- | Upsert a log pattern (insert or update occurrence count) -upsertLogPattern :: - DB es => - Projects.ProjectId -> - Text -> -- pattern - Text -> -- pattern_hash - Maybe Text -> -- service_name - Maybe Text -> -- log_level - Maybe Text -> -- sample_message - Eff es Int64 +upsertLogPattern + :: DB es + => Projects.ProjectId + -> Text -- pattern + -> Text -- pattern_hash + -> Maybe Text -- service_name + -> Maybe Text -- log_level + -> Maybe Text -- sample_message + -> Eff es Int64 upsertLogPattern pid pat patHash serviceName logLevel sampleMsg = PG.execute q (pid, pat, patHash, serviceName, logLevel, sampleMsg) where @@ -202,15 +202,15 @@ updateLogPatternStats pid patHash additionalCount = -- | Update baseline data for a log pattern -updateBaseline :: - DB es => - Projects.ProjectId -> - Text -> -- pattern_hash - Text -> -- baseline_state ('learning' or 'established') - Double -> -- hourly_mean - Double -> -- hourly_stddev - Int -> -- samples - Eff es Int64 +updateBaseline + :: DB es + => Projects.ProjectId + -> Text -- pattern_hash + -> Text -- baseline_state ('learning' or 'established') + -> Double -- hourly_mean + -> Double -- hourly_stddev + -> Int -- samples + -> Eff es Int64 updateBaseline pid patHash bState hourlyMean hourlyStddev samples = PG.execute q (bState, hourlyMean, hourlyStddev, samples, pid, patHash) where @@ -242,12 +242,12 @@ data PatternStats = PatternStats -- | Get pattern stats from otel_logs_and_spans -getPatternStats :: - DB es => - Projects.ProjectId -> - Text -> -- pattern (log_pattern value) - Int -> -- hours to look back - Eff es (Maybe PatternStats) +getPatternStats + :: DB es + => Projects.ProjectId + -> Text -- pattern (log_pattern value) + -> Int -- hours to look back + -> Eff es (Maybe PatternStats) getPatternStats pid pattern' hoursBack = do results <- PG.query q (pid, pattern', hoursBack) return $ listToMaybe results diff --git a/src/Models/Apis/RequestDumps.hs b/src/Models/Apis/RequestDumps.hs index fb641cf1b..cd9cefbad 100644 --- a/src/Models/Apis/RequestDumps.hs +++ b/src/Models/Apis/RequestDumps.hs @@ -178,7 +178,6 @@ removeQueryParams statusCode urlPath = (before, after) -> before -- Query parameters found, stripping them - data ATError = ATError { projectId :: Maybe Projects.ProjectId , when :: UTCTime diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 95680fcb7..af769b8e1 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -1232,7 +1232,7 @@ anomalyAcknowledgeButton pid aid acked host = do a_ [ class_ $ "inline-flex items-center gap-2 cursor-pointer py-2 px-3 rounded-xl " - <> (if acked then "bg-fillSuccess-weak text-textSuccess" else "btn-primary") + <> (if acked then "bg-fillSuccess-weak text-textSuccess" else "btn-primary") , term "data-tippy-content" "acknowledge issue" , hxGet_ acknowledgeAnomalyEndpoint , hxSwap_ "outerHTML" @@ -1248,7 +1248,7 @@ anomalyArchiveButton pid aid archived = do a_ [ class_ $ "inline-flex items-center gap-2 cursor-pointer py-2 px-3 rounded-xl " - <> (if archived then " bg-fillSuccess-weak text-textSuccess" else "btn-primary") + <> (if archived then " bg-fillSuccess-weak text-textSuccess" else "btn-primary") , term "data-tippy-content" $ if archived then "unarchive" else "archive" , hxGet_ archiveAnomalyEndpoint , hxSwap_ "outerHTML" From 72f7652a09c3c73ddf3a0db6ecb9376ee031bcfc Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 07:19:49 +0000 Subject: [PATCH 07/71] new error and log pattern monitoring triggers --- .../0026_anomaly_detection_baselines.sql | 24 ++++++++++++++++--- .../0027_log_pattern_monitoring.sql | 19 ++------------- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/static/migrations/0026_anomaly_detection_baselines.sql b/static/migrations/0026_anomaly_detection_baselines.sql index 336139411..9442eb520 100644 --- a/static/migrations/0026_anomaly_detection_baselines.sql +++ b/static/migrations/0026_anomaly_detection_baselines.sql @@ -52,8 +52,26 @@ WHERE state = 'established'; DROP TRIGGER IF EXISTS error_created_anomaly ON apis.errors; DROP TABLE IF EXISTS apis.errors; - - +CREATE OR REPLACE FUNCTION apis.new_error_proc() RETURNS trigger AS $$ +BEGIN + IF TG_WHEN <> 'AFTER' THEN + RAISE EXCEPTION 'apis.new_error_proc() may only run as an AFTER trigger'; + END IF; + -- Create a job for the new error + -- JSON format matches Aeson's derived FromJSON for: + -- NewErrorDetected Projects.ProjectId Text + INSERT INTO background_jobs (run_at, status, payload) + VALUES ( + NOW(), + 'queued', + jsonb_build_object( + 'tag', 'NewErrorDetected', + 'contents', jsonb_build_array(NEW.project_id, NEW.hash) + ) + ); + RETURN NULL; +END; +$$ LANGUAGE plpgsql; CREATE TABLE apis.errors ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), @@ -105,7 +123,7 @@ CREATE UNIQUE INDEX idx_apis_errors_project_id_hash ON apis.errors(project_id, h CREATE INDEX idx_apis_errors_project_id ON apis.errors(project_id); CREATE INDEX idx_errors_active ON apis.errors(project_id, state, last_seen_at DESC) WHERE state != 'resolved'; CREATE INDEX idx_errors_state ON apis.errors(project_id, state); -CREATE TRIGGER error_created_anomaly AFTER INSERT ON apis.errors FOR EACH ROW EXECUTE PROCEDURE apis.new_anomaly_proc('runtime_exception', 'created', 'skip_anomaly_record'); +CREATE TRIGGER error_created_anomaly AFTER INSERT ON apis.errors FOR EACH ROW EXECUTE PROCEDURE apis.new_error_proc('runtime_exception', 'created', 'skip_anomaly_record'); -- all data here that is not in errors (top level) go into error_data jsonb -- Whenever the same error occurs, we update the error_data with latest values for these fields diff --git a/static/migrations/0027_log_pattern_monitoring.sql b/static/migrations/0027_log_pattern_monitoring.sql index a18cc2ff2..fab4dfaac 100644 --- a/static/migrations/0027_log_pattern_monitoring.sql +++ b/static/migrations/0027_log_pattern_monitoring.sql @@ -6,7 +6,7 @@ CREATE TABLE IF NOT EXISTS apis.log_patterns ( created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - pattern TEXT NOT NULL, + log_pattern TEXT NOT NULL, pattern_hash TEXT NOT NULL, service_name TEXT, @@ -33,22 +33,16 @@ CREATE TABLE IF NOT EXISTS apis.log_patterns ( SELECT manage_updated_at('apis.log_patterns'); --- Indexes for efficient queries CREATE INDEX IF NOT EXISTS idx_log_patterns_project ON apis.log_patterns(project_id); CREATE INDEX IF NOT EXISTS idx_log_patterns_project_state ON apis.log_patterns(project_id, state); CREATE INDEX IF NOT EXISTS idx_log_patterns_last_seen ON apis.log_patterns(project_id, last_seen_at DESC); CREATE INDEX IF NOT EXISTS idx_log_patterns_service ON apis.log_patterns(project_id, service_name); - CREATE OR REPLACE FUNCTION apis.new_log_pattern_proc() RETURNS trigger AS $$ BEGIN IF TG_WHEN <> 'AFTER' THEN RAISE EXCEPTION 'apis.new_log_pattern_proc() may only run as an AFTER trigger'; END IF; - - -- Create a job for the new pattern - -- JSON format matches Aeson's derived FromJSON for: - -- NewLogPatternDetected Projects.ProjectId Text INSERT INTO background_jobs (run_at, status, payload) VALUES ( NOW(), @@ -58,18 +52,11 @@ BEGIN 'contents', jsonb_build_array(NEW.project_id, NEW.pattern_hash) ) ); - RETURN NULL; END; $$ LANGUAGE plpgsql; -CREATE OR REPLACE TRIGGER log_pattern_created_notify - AFTER INSERT ON apis.log_patterns - FOR EACH ROW - EXECUTE PROCEDURE apis.new_log_pattern_proc(); - -ALTER TABLE projects.projects -ADD COLUMN IF NOT EXISTS log_pattern_alerts BOOLEAN NOT NULL DEFAULT false; +CREATE OR REPLACE TRIGGER log_pattern_created_notify AFTER INSERT ON apis.log_patterns FOR EACH ROW EXECUTE PROCEDURE apis.new_log_pattern_proc(); ALTER TABLE apis.endpoints @@ -85,6 +72,4 @@ ADD COLUMN IF NOT EXISTS baseline_latency_p99 FLOAT, ADD COLUMN IF NOT EXISTS baseline_volume_hourly_mean FLOAT, ADD COLUMN IF NOT EXISTS baseline_volume_hourly_stddev FLOAT; - - COMMIT; From e6bcb2aca4e40e9b5c3b523e7890e109b7e7efae Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 07:50:05 +0000 Subject: [PATCH 08/71] get existing pattern from log patterns table --- src/BackgroundJobs.hs | 5 +---- src/Models/Apis/LogPatterns.hs | 37 +++++++++++----------------------- 2 files changed, 13 insertions(+), 29 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 158a35b0f..451b824ec 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -524,7 +524,7 @@ processPatterns :: Text -> Text -> V.Vector (Text, Text) -> Projects.ProjectId - processPatterns kind fieldName events pid scheduledTime since = do Relude.when (not $ V.null events) $ do let qq = [text| select $fieldName from otel_logs_and_spans where project_id= ? AND timestamp >= now() - interval '1 hour' and $fieldName is not null GROUP BY $fieldName ORDER BY count(*) desc limit 20|] - existingPatterns <- coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid + existingPatterns <- if kind == "summary" then coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid else LogPattern.getLogPatternTexts pid let known = V.fromList $ map ("",) existingPatterns combined = known <> events drainTree = processBatch (kind == "summary") combined scheduledTime Drain.emptyDrainTree @@ -539,14 +539,11 @@ processPatterns kind fieldName events pid scheduledTime since = do -- Update otel_logs_and_spans with pattern void $ PG.execute (Query $ encodeUtf8 q) (patternTxt, pid, since, V.filter (/= "") ids) - -- Also store in apis.log_patterns table (only for log patterns, not summaries) Relude.when (kind == "log" && not (T.null patternTxt)) $ do let patternHash = toXXHash patternTxt - -- Get a sample message from the first non-empty id sampleMsg = case V.find (/= "") (V.map snd events) of Just msg -> Just (T.take 500 msg) Nothing -> Nothing - -- Upsert the pattern (increments count if exists, inserts if new) void $ LogPatterns.upsertLogPattern pid patternTxt patternHash Nothing Nothing sampleMsg diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index fca7c35c2..61dbcda31 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -3,6 +3,7 @@ module Models.Apis.LogPatterns ( LogPatternId, LogPatternState (..), getLogPatterns, + getLogPatternTexts, getLogPatternByHash, acknowledgeLogPatterns, upsertLogPattern, @@ -119,7 +120,7 @@ getLogPatterns pid mstate limit offset = PG.query q (pid, maybe "%" logPatternSt where q = [sql| - SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + SELECT id, project_id, created_at, updated_at, log_pattern, pattern_hash, service_name, log_level, sample_message, first_seen_at, last_seen_at, occurrence_count, state, acknowledged_by, acknowledged_at, baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, @@ -130,7 +131,11 @@ getLogPatterns pid mstate limit offset = PG.query q (pid, maybe "%" logPatternSt LIMIT ? OFFSET ? |] - +getLogPatternTexts :: DB es => Projects.ProjectId -> Eff es [Text] +getLogPatternTexts pid = PG.query q (Only pid) + where + q = [sql| SELECT log_pattern FROM apis.log_patterns WHERE project_id = ?|] + -- | Get log pattern by hash getLogPatternByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe LogPattern) getLogPatternByHash pid hash = do @@ -139,7 +144,7 @@ getLogPatternByHash pid hash = do where q = [sql| - SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + SELECT id, project_id, created_at, updated_at, log_pattern, pattern_hash, service_name, log_level, sample_message, first_seen_at, last_seen_at, occurrence_count, state, acknowledged_by, acknowledged_at, baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, @@ -163,22 +168,13 @@ acknowledgeLogPatterns uid patternHashes |] --- | Upsert a log pattern (insert or update occurrence count) -upsertLogPattern - :: DB es - => Projects.ProjectId - -> Text -- pattern - -> Text -- pattern_hash - -> Maybe Text -- service_name - -> Maybe Text -- log_level - -> Maybe Text -- sample_message - -> Eff es Int64 +upsertLogPattern :: DB es => Projects.ProjectId -> Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Eff es Int64 upsertLogPattern pid pat patHash serviceName logLevel sampleMsg = PG.execute q (pid, pat, patHash, serviceName, logLevel, sampleMsg) where q = [sql| - INSERT INTO apis.log_patterns (project_id, pattern, pattern_hash, service_name, log_level, sample_message) + INSERT INTO apis.log_patterns (project_id, log_pattern, pattern_hash, service_name, log_level, sample_message) VALUES (?, ?, ?, ?, ?, ?) ON CONFLICT (project_id, pattern_hash) DO UPDATE SET last_seen_at = NOW(), @@ -226,10 +222,6 @@ updateBaseline pid patHash bState hourlyMean hourlyStddev samples = |] --- ============================================================================ --- Log Pattern Occurrence Queries (from otel_logs_and_spans) --- ============================================================================ - -- | Stats for a log pattern from otel_logs_and_spans data PatternStats = PatternStats { hourlyMean :: Double @@ -242,12 +234,7 @@ data PatternStats = PatternStats -- | Get pattern stats from otel_logs_and_spans -getPatternStats - :: DB es - => Projects.ProjectId - -> Text -- pattern (log_pattern value) - -> Int -- hours to look back - -> Eff es (Maybe PatternStats) +getPatternStats :: DB es => Projects.ProjectId -> Text -> Int -> Eff es (Maybe PatternStats) getPatternStats pid pattern' hoursBack = do results <- PG.query q (pid, pattern', hoursBack) return $ listToMaybe results @@ -344,7 +331,7 @@ getLogPatternById lpid = do where q = [sql| - SELECT id, project_id, created_at, updated_at, pattern, pattern_hash, + SELECT id, project_id, created_at, updated_at, log_pattern, pattern_hash, service_name, log_level, sample_message, first_seen_at, last_seen_at, occurrence_count, state, acknowledged_by, acknowledged_at, baseline_state, baseline_volume_hourly_mean, baseline_volume_hourly_stddev, From c5ede379f3ac965cf2d034fb07d9216401468697 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 8 Jan 2026 07:50:45 +0000 Subject: [PATCH 09/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 2 +- src/Models/Apis/LogPatterns.hs | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 451b824ec..f1e82a8ae 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -524,7 +524,7 @@ processPatterns :: Text -> Text -> V.Vector (Text, Text) -> Projects.ProjectId - processPatterns kind fieldName events pid scheduledTime since = do Relude.when (not $ V.null events) $ do let qq = [text| select $fieldName from otel_logs_and_spans where project_id= ? AND timestamp >= now() - interval '1 hour' and $fieldName is not null GROUP BY $fieldName ORDER BY count(*) desc limit 20|] - existingPatterns <- if kind == "summary" then coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid else LogPattern.getLogPatternTexts pid + existingPatterns <- if kind == "summary" then coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid else LogPattern.getLogPatternTexts pid let known = V.fromList $ map ("",) existingPatterns combined = known <> events drainTree = processBatch (kind == "summary") combined scheduledTime Drain.emptyDrainTree diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 61dbcda31..8ea04cc6e 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -131,11 +131,13 @@ getLogPatterns pid mstate limit offset = PG.query q (pid, maybe "%" logPatternSt LIMIT ? OFFSET ? |] + getLogPatternTexts :: DB es => Projects.ProjectId -> Eff es [Text] getLogPatternTexts pid = PG.query q (Only pid) where q = [sql| SELECT log_pattern FROM apis.log_patterns WHERE project_id = ?|] - + + -- | Get log pattern by hash getLogPatternByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe LogPattern) getLogPatternByHash pid hash = do @@ -168,7 +170,7 @@ acknowledgeLogPatterns uid patternHashes |] -upsertLogPattern :: DB es => Projects.ProjectId -> Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Eff es Int64 +upsertLogPattern :: DB es => Projects.ProjectId -> Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Eff es Int64 upsertLogPattern pid pat patHash serviceName logLevel sampleMsg = PG.execute q (pid, pat, patHash, serviceName, logLevel, sampleMsg) where From faaeeb1eaab535f46fa5a69e617ee31f91a20a40 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 09:36:07 +0000 Subject: [PATCH 10/71] Update cabal.project to include proto-lens-setup version constraint --- cabal.project | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 361bb2cf5..bc378f34e 100644 --- a/cabal.project +++ b/cabal.project @@ -109,5 +109,6 @@ source-repository-package constraints: streamly ^>=0.10.0, - effectful-postgresql +enable-otel + effectful-postgresql +enable-otel, + proto-lens-setup ==0.4.0.9 allow-newer: all From 68158cfbc9c5448fdf79589e346da23339a7b723 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 14:17:25 +0000 Subject: [PATCH 11/71] build foundation for error and log pattern issues --- cabal.project | 1 - src/Models/Apis/Anomalies.hs | 36 ---------- src/Models/Apis/Issues.hs | 68 ++++++++----------- src/Models/Apis/LogPatterns.hs | 4 +- src/Pages/Anomalies.hs | 10 +-- .../migrations/0028_rebuild_issues_table.sql | 28 +++++--- 6 files changed, 55 insertions(+), 92 deletions(-) diff --git a/cabal.project b/cabal.project index bc378f34e..d2c63590b 100644 --- a/cabal.project +++ b/cabal.project @@ -109,6 +109,5 @@ source-repository-package constraints: streamly ^>=0.10.0, - effectful-postgresql +enable-otel, proto-lens-setup ==0.4.0.9 allow-newer: all diff --git a/src/Models/Apis/Anomalies.hs b/src/Models/Apis/Anomalies.hs index ada7531b4..09fe77ede 100644 --- a/src/Models/Apis/Anomalies.hs +++ b/src/Models/Apis/Anomalies.hs @@ -1,7 +1,6 @@ module Models.Apis.Anomalies ( AnomalyVM (..), AnomalyActions (..), - Issue (..), IssueL (..), IssueEventAgg (..), AnomalyTypes (..), @@ -350,41 +349,6 @@ instance Default IssuesData where def = IDEmpty -data Issue = Issue - { id :: AnomalyId - , createdAt :: ZonedTime - , updatedAt :: ZonedTime - , projectId :: Projects.ProjectId - , acknowlegedAt :: Maybe ZonedTime - , anomalyType :: AnomalyTypes - , targetHash :: Text - , issueData :: IssuesData - , endpointId :: Maybe Endpoints.EndpointId - , acknowlegedBy :: Maybe Users.UserId - , archivedAt :: Maybe ZonedTime - , -- Enhanced UI fields - title :: Text - , service :: Text - , critical :: Bool - , breakingChanges :: Int - , incrementalChanges :: Int - , affectedRequests :: Int - , affectedClients :: Int - , estimatedRequests :: Text - , migrationComplexity :: Text - , recommendedAction :: Text - , requestPayloads :: Aeson [PayloadChange] - , responsePayloads :: Aeson [PayloadChange] - , -- New fields for anomaly grouping - anomalyHashes :: V.Vector Text - , endpointHash :: Text - } - deriving stock (Generic, Show) - deriving anyclass (Default, FromRow, NFData, ToRow) - deriving - (Entity) - via (GenericEntity '[Schema "apis", TableName "issues", PrimaryKey "id", FieldModifiers '[CamelToSnake]] Issue) - data IssueEventAgg = IssueEventAgg { count :: Int diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 29382853e..77929a885 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -110,6 +110,13 @@ data IssueType = APIChange | RuntimeException | QueryAlert + | LogPattern + | ErrorEscalating + | ErrorRegressed + | LogPatternRateChange + | EndpointLatencyDegradation + | EndpointErrorRateSpike + | EndpointVolumeRateChange deriving stock (Eq, Generic, Show) deriving anyclass (NFData) deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.ConstructorTagModifier '[DAE.CamelToSnake]] IssueType @@ -123,13 +130,26 @@ issueTypeToText :: IssueType -> Text issueTypeToText APIChange = "api_change" -- Maps to anomaly_type 'shape' in DB issueTypeToText RuntimeException = "runtime_exception" issueTypeToText QueryAlert = "query_alert" - +issueTypeToText LogPattern = "log_pattern" +issueTypeToText ErrorEscalating = "error_escalating" +issueTypeToText ErrorRegressed = "error_regressed" +issueTypeToText LogPatternRateChange = "log_pattern_rate_change" +issueTypeToText EndpointLatencyDegradation = "endpoint_latency_degradation" +issueTypeToText EndpointErrorRateSpike = "endpoint_error_rate_spike" +issueTypeToText EndpointVolumeRateChange = "endpoint_volume_rate_change" parseIssueType :: Text -> Maybe IssueType parseIssueType "api_change" = Just APIChange parseIssueType "shape" = Just APIChange -- Handle DB anomaly_type parseIssueType "runtime_exception" = Just RuntimeException parseIssueType "query_alert" = Just QueryAlert +parseIssueType "log_pattern" = Just LogPattern +parseIssueType "error_escalating" = Just ErrorEscalating +parseIssueType "error_regressed" = Just ErrorRegressed +parseIssueType "log_pattern_rate_change" = Just LogPatternRateChange +parseIssueType "endpoint_latency_degradation" = Just EndpointLatencyDegradation +parseIssueType "endpoint_error_rate_spike" = Just EndpointErrorRateSpike +parseIssueType "endpoint_volume_rate_change" = Just EndpointVolumeRateChange parseIssueType _ = Nothing @@ -144,7 +164,6 @@ instance FromField IssueType where Just t -> pure t Nothing -> returnError ConversionFailed f $ "Unknown issue type: " <> decodeUtf8 bs - -- | API Change issue data data APIChangeData = APIChangeData { endpointMethod :: Text @@ -203,30 +222,21 @@ data Issue = Issue , updatedAt :: ZonedTime , projectId :: Projects.ProjectId , issueType :: IssueType - , endpointHash :: Text -- For API changes, empty for others - -- Status fields + , target_hash :: Text -- links to error.hash, log_pattern.pattern_hash, endpoint.hash + , endpointHash :: Text -- For API changes, empty for othersxl , acknowledgedAt :: Maybe ZonedTime , acknowledgedBy :: Maybe Users.UserId , archivedAt :: Maybe ZonedTime - , -- Issue details - title :: Text + , title :: Text , service :: Text , critical :: Bool , severity :: Text -- "critical", "warning", "info" - -- Impact metrics - , affectedRequests :: Int - , affectedClients :: Int - , errorRate :: Maybe Double - , -- Actions - recommendedAction :: Text + , recommendedAction :: Text , migrationComplexity :: Text -- "low", "medium", "high", "n/a" - -- Data payload (polymorphic based on issueType) , issueData :: Aeson AE.Value - , -- Payload changes tracking (for API changes) - requestPayloads :: Aeson [PayloadChange] + , requestPayloads :: Aeson [PayloadChange] , responsePayloads :: Aeson [PayloadChange] - , -- LLM enhancement tracking - llmEnhancedAt :: Maybe UTCTime + , llmEnhancedAt :: Maybe UTCTime , llmEnhancementVersion :: Maybe Int } deriving stock (Generic, Show) @@ -250,9 +260,6 @@ instance Default Issue where , service = "" , critical = False , severity = "info" - , affectedRequests = 0 - , affectedClients = 0 - , errorRate = Nothing , recommendedAction = "" , migrationComplexity = "low" , issueData = Aeson AE.Null @@ -477,9 +484,6 @@ createAPIChangeIssue projectId endpointHash anomalies = do , service = Anomalies.detectService Nothing firstAnomaly.endpointUrlPath , critical = isCritical , severity = if isCritical then "critical" else "warning" - , affectedRequests = 0 - , affectedClients = 0 - , errorRate = Nothing , recommendedAction = "Review the API changes and update your integration accordingly." , migrationComplexity = if breakingChanges > 5 then "high" else if breakingChanges > 0 then "medium" else "low" , issueData = Aeson $ AE.toJSON apiChangeData @@ -523,9 +527,6 @@ createRuntimeExceptionIssue projectId atError = do , service = fromMaybe "unknown-service" atError.serviceName , critical = True , severity = "critical" - , affectedRequests = 1 - , affectedClients = 0 - , errorRate = Nothing , recommendedAction = "Investigate the error and implement a fix." , migrationComplexity = "n/a" , issueData = Aeson $ AE.toJSON exceptionData @@ -569,9 +570,6 @@ createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thr , service = "Monitoring" , critical = True , severity = "warning" - , affectedRequests = 0 - , affectedClients = 0 - , errorRate = Nothing , recommendedAction = "Review the query results and take appropriate action." , migrationComplexity = "n/a" , issueData = Aeson $ AE.toJSON alertData @@ -616,9 +614,6 @@ createNewErrorIssue projectId err = do , service = fromMaybe "unknown-service" err.service , critical = True , severity = "critical" - , affectedRequests = 1 - , affectedClients = 0 - , errorRate = Nothing , recommendedAction = "Investigate the new error and implement a fix." , migrationComplexity = "n/a" , issueData = Aeson $ AE.toJSON exceptionData @@ -671,9 +666,6 @@ createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do , service = fromMaybe "unknown-service" err.service , critical = True , severity = "critical" - , affectedRequests = round currentRate - , affectedClients = 0 - , errorRate = Just currentRate , recommendedAction = "Error rate has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent deployments or changes." , migrationComplexity = "n/a" , issueData = Aeson $ AE.toJSON exceptionData @@ -722,9 +714,6 @@ createNewLogPatternIssue projectId lp = do Just "error" -> "high" Just "warning" -> "medium" _ -> "low" - , affectedRequests = fromIntegral lp.occurrenceCount - , affectedClients = 0 - , errorRate = Nothing , recommendedAction = "A new log pattern has been detected. Review to ensure it's expected behavior. Pattern first seen at: " <> T.pack (show lp.firstSeenAt) , migrationComplexity = "n/a" , issueData = Aeson $ AE.toJSON exceptionData @@ -782,9 +771,6 @@ createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev Just "error" -> "critical" Just "warning" -> "high" _ -> "medium" - , affectedRequests = round currentRate - , affectedClients = 0 - , errorRate = Just currentRate , recommendedAction = "Log pattern volume has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent changes." , migrationComplexity = "n/a" , issueData = Aeson $ AE.toJSON exceptionData diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 8ea04cc6e..b4fc4dc61 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -133,7 +133,9 @@ getLogPatterns pid mstate limit offset = PG.query q (pid, maybe "%" logPatternSt getLogPatternTexts :: DB es => Projects.ProjectId -> Eff es [Text] -getLogPatternTexts pid = PG.query q (Only pid) +getLogPatternTexts pid = do + result <- PG.query q (Only pid) + return $ fmap (\(Only pat) -> pat) result where q = [sql| SELECT log_pattern FROM apis.log_patterns WHERE project_id = ?|] diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index af769b8e1..f29994923 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -300,8 +300,8 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> do div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do -- Stats (1 column each) - statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing - statBox_ (Just pid) Nothing "Affected Clients" "" (show issue.affectedClients) Nothing Nothing + statBox_ (Just pid) Nothing "Affected Requests" "" "0" Nothing Nothing + statBox_ (Just pid) Nothing "Affected Clients" "" "0" Nothing Nothing whenJust errM $ \err -> do timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.updatedAt @@ -357,7 +357,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do case AE.fromJSON (getAeson issue.issueData) of AE.Success (changeData :: Issues.APIChangeData) -> do div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do - statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing + statBox_ (Just pid) Nothing "Affected Requests" "" "0" Nothing Nothing statBox_ (Just pid) Nothing "New fields" "" (show $ V.length changeData.newFields) Nothing Nothing statBox_ (Just pid) Nothing "Modified fields" "" (show $ V.length changeData.modifiedFields) Nothing Nothing statBox_ (Just pid) Nothing "Deleted fields" "" (show $ V.length changeData.deletedFields) Nothing Nothing @@ -454,8 +454,8 @@ buildAIContext issue errM trDataM spans = , Just $ "- **Type**: " <> show issue.issueType , Just $ "- **Severity**: " <> issue.severity , Just $ "- **Service**: " <> issue.service - , Just $ "- **Affected Requests**: " <> show issue.affectedRequests - , Just $ "- **Affected Clients**: " <> show issue.affectedClients + , Just $ "- **Affected Requests**: 0" + , Just $ "- **Affected Clients**: 0" , Just $ "- **Recommended Action**: " <> issue.recommendedAction , errM >>= \err -> Just diff --git a/static/migrations/0028_rebuild_issues_table.sql b/static/migrations/0028_rebuild_issues_table.sql index 799c3542f..b33194208 100644 --- a/static/migrations/0028_rebuild_issues_table.sql +++ b/static/migrations/0028_rebuild_issues_table.sql @@ -1,16 +1,13 @@ BEGIN; -ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'new_error'; -ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'new_log_pattern'; -ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'api-change'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'log_pattern'; ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'error_escalating'; ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'error_regressed'; ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'log_pattern_rate_change'; ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'endpoint_latency_degradation'; ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'endpoint_error_rate_spike'; ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'endpoint_volume_rate_change'; -ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'alert-triggered'; DROP TABLE IF EXISTS apis.issues CASCADE; @@ -21,7 +18,7 @@ CREATE TABLE apis.issues ( updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), issue_type apis.issue_type NOT NULL, - source_type TEXT NOT NULL, -- 'error', 'log_pattern', 'endpoint' + source_type TEXT NOT NULL, -- 'error', 'log_pattern', 'endpoint', 'shape' target_hash TEXT NOT NULL, -- links to error.hash, log_pattern.pattern_hash, endpoint.hash title TEXT NOT NULL DEFAULT '', @@ -29,20 +26,35 @@ CREATE TABLE apis.issues ( environment TEXT, severity TEXT NOT NULL DEFAULT 'warning', -- 'critical', 'warning', 'info' critical BOOLEAN NOT NULL DEFAULT FALSE, - -- Lifecycle first_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - occurrence_count INT NOT NULL DEFAULT 1, acknowledged_at TIMESTAMPTZ, acknowledged_by UUID REFERENCES users.users(id) ON DELETE SET NULL, resolved_at TIMESTAMPTZ, + recommended_action TEXT NOT NULL DEFAULT '', + migration_complexity TEXT NOT NULL DEFAULT 'n/a', -- 'low', 'medium', 'high', 'n/a' + request_payloads JSONB NOT NULL DEFAULT '[]'::jsonb, + response_payloads JSONB NOT NULL DEFAULT '[]'::jsonb, archived_at TIMESTAMPTZ, - + llm_enhanced_at TIMESTAMPTZ, + llm_enhancement_version INT, -- Flexible category-specific data issue_data JSONB NOT NULL DEFAULT '{}' ); + + , -- Actions + recommendedAction :: Text + , migrationComplexity :: Text -- "low", "medium", "high", "n/a" + -- Data payload (polymorphic based on issueType) + , issueData :: Aeson AE.Value + , -- Payload changes tracking (for API changes) + requestPayloads :: Aeson [PayloadChange] + , responsePayloads :: Aeson [PayloadChange] + , -- LLM enhancement tracking + llmEnhancedAt :: Maybe UTCTime + , llmEnhancementVersion :: Maybe Int SELECT manage_updated_at('apis.issues'); -- Indexes From fc717f2bea68b50af63f2f029038269722762b4c Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 15:34:09 +0000 Subject: [PATCH 12/71] different issue types creation and bj handlers --- src/BackgroundJobs.hs | 138 +++++++++- src/Models/Apis/Endpoints.hs | 196 ++++++++++++++ src/Models/Apis/Issues.hs | 505 +++++++++++++++++++++++++++++++++++ 3 files changed, 837 insertions(+), 2 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index f1e82a8ae..631b6891d 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -124,6 +124,11 @@ data BgJobs LogPatternBaselineCalculation Projects.ProjectId -- Calculate baselines for log patterns | LogPatternSpikeDetection Projects.ProjectId -- Detect log pattern volume spikes | NewLogPatternDetected Projects.ProjectId Text -- projectId, pattern hash - notify about new pattern + | -- Endpoint anomaly detection jobs + EndpointBaselineCalculation Projects.ProjectId -- Calculate baselines for endpoints (latency, error rate, volume) + | EndpointLatencyDegradationDetection Projects.ProjectId -- Detect endpoint latency degradation + | EndpointErrorRateSpikeDetection Projects.ProjectId -- Detect endpoint error rate spikes + | EndpointVolumeRateChangeDetection Projects.ProjectId -- Detect endpoint volume changes (spike/drop) deriving stock (Generic, Show) deriving anyclass (AE.FromJSON, AE.ToJSON) @@ -371,6 +376,11 @@ processBackgroundJob authCtx bgJob = LogPatternBaselineCalculation pid -> calculateLogPatternBaselines pid LogPatternSpikeDetection pid -> detectLogPatternSpikes pid authCtx NewLogPatternDetected pid patternHash -> processNewLogPattern pid patternHash authCtx + -- Endpoint anomaly detection jobs + EndpointBaselineCalculation pid -> calculateEndpointBaselines pid + EndpointLatencyDegradationDetection pid -> detectEndpointLatencyDegradation pid authCtx + EndpointErrorRateSpikeDetection pid -> detectEndpointErrorRateSpike pid authCtx + EndpointVolumeRateChangeDetection pid -> detectEndpointVolumeRateChange pid authCtx -- | Run hourly scheduled tasks for all projects @@ -409,6 +419,11 @@ runHourlyJob scheduledTime hour = do -- Log pattern baseline and spike detection createJob conn "background_jobs" $ LogPatternBaselineCalculation pid createJob conn "background_jobs" $ LogPatternSpikeDetection pid + -- Endpoint baseline and anomaly detection + createJob conn "background_jobs" $ EndpointBaselineCalculation pid + createJob conn "background_jobs" $ EndpointLatencyDegradationDetection pid + createJob conn "background_jobs" $ EndpointErrorRateSpikeDetection pid + createJob conn "background_jobs" $ EndpointVolumeRateChangeDetection pid -- Cleanup expired query cache entries deletedCount <- QueryCache.cleanupExpiredCache @@ -1795,7 +1810,7 @@ detectLogPatternSpikes pid authCtx = do -- Get full pattern record for issue creation patternM <- LogPatterns.getLogPatternById lpRate.patternId whenJust patternM \lp -> do - issue <- liftIO $ Issues.createLogPatternSpikeIssue pid lp currentRate mean stddev + issue <- liftIO $ Issues.createLogPatternRateChangeIssue pid lp currentRate mean stddev "spike" Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) @@ -1819,7 +1834,7 @@ processNewLogPattern pid patternHash authCtx = do -- Only create issue for truly new patterns (state = 'new') Relude.when (lp.state == LogPatterns.LPSNew) $ do -- Create a new log pattern issue - issue <- liftIO $ Issues.createNewLogPatternIssue pid lp + issue <- liftIO $ Issues.createLogPatternIssue pid lp Issues.insertIssue issue -- Queue LLM enhancement @@ -1827,3 +1842,122 @@ processNewLogPattern pid patternHash authCtx = do void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) + + +-- ============================================================================ +-- Endpoint Anomaly Detection Jobs +-- ============================================================================ + +-- | Calculate baselines for endpoints (latency, error rate, volume) +-- Uses hourly stats from otel_logs_and_spans over the last 7 days +calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () +calculateEndpointBaselines pid = do + Log.logInfo "Calculating endpoint baselines" pid + endpoints <- Endpoints.getActiveEndpoints pid + + forM_ endpoints \ep -> do + -- Get hourly stats over last 7 days (168 hours) + statsM <- Endpoints.getEndpointStats pid ep.hash 168 + case statsM of + Nothing -> pass + Just stats -> do + let newSamples = stats.totalHours + -- Establish baseline after 24 hours of data + newState = if newSamples >= 24 then "established" else "learning" + Endpoints.updateEndpointBaseline + ep.id + newState + stats.hourlyMeanErrors + stats.hourlyStddevErrors + stats.meanLatency + stats.stddevLatency + stats.p95Latency + stats.p99Latency + stats.hourlyMeanRequests + stats.hourlyStddevRequests + newSamples + + Log.logInfo "Finished calculating endpoint baselines" (pid, length endpoints) + + +-- | Detect endpoint latency degradation and create issues +detectEndpointLatencyDegradation :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () +detectEndpointLatencyDegradation pid authCtx = do + Log.logInfo "Detecting endpoint latency degradation" pid + + endpointsWithRates <- Endpoints.getEndpointsWithCurrentRates pid + + forM_ endpointsWithRates \epRate -> do + -- Check P95 latency degradation + case (epRate.baselineLatencyP95, epRate.baselineLatencyStddev, epRate.currentHourLatencyP95) of + (Just baselineP95, Just stddev, Just currentP95) | stddev > 0 -> do + let zScore = (currentP95 - baselineP95) / stddev + -- Degradation: >3 std devs AND at least 50% increase + isDegraded = zScore > 3.0 && currentP95 > baselineP95 * 1.5 + Relude.when isDegraded $ do + Log.logInfo "Endpoint latency degradation detected" (epRate.endpointHash, currentP95, baselineP95, zScore) + issue <- liftIO $ Issues.createEndpointLatencyDegradationIssue pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentP95 baselineP95 stddev "p95" V.empty + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for endpoint latency degradation" (pid, epRate.endpointHash, issue.id) + _ -> pass + + Log.logInfo "Finished endpoint latency degradation detection" pid + + +-- | Detect endpoint error rate spikes and create issues +detectEndpointErrorRateSpike :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () +detectEndpointErrorRateSpike pid authCtx = do + Log.logInfo "Detecting endpoint error rate spikes" pid + + endpointsWithRates <- Endpoints.getEndpointsWithCurrentRates pid + + forM_ endpointsWithRates \epRate -> do + case (epRate.baselineErrorRateMean, epRate.baselineErrorRateStddev) of + (Just baselineMean, Just stddev) | stddev > 0 && epRate.currentHourRequests > 0 -> do + let currentErrorRate = fromIntegral epRate.currentHourErrors / fromIntegral epRate.currentHourRequests + zScore = (currentErrorRate - baselineMean) / stddev + -- Spike: >3 std devs AND error rate > 5% AND at least 5 errors + isSpike = zScore > 3.0 && currentErrorRate > 0.05 && epRate.currentHourErrors >= 5 + + Relude.when isSpike $ do + Log.logInfo "Endpoint error rate spike detected" (epRate.endpointHash, currentErrorRate, baselineMean, zScore) + issue <- liftIO $ Issues.createEndpointErrorRateSpikeIssue + pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentErrorRate baselineMean stddev epRate.currentHourErrors epRate.currentHourRequests V.empty + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for endpoint error rate spike" (pid, epRate.endpointHash, issue.id) + _ -> pass + + Log.logInfo "Finished endpoint error rate spike detection" pid + + +-- | Detect endpoint volume rate changes (spike or drop) and create issues +detectEndpointVolumeRateChange :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () +detectEndpointVolumeRateChange pid authCtx = do + Log.logInfo "Detecting endpoint volume rate changes" pid + endpointsWithRates <- Endpoints.getEndpointsWithCurrentRates pid + forM_ endpointsWithRates \epRate -> do + case (epRate.baselineVolumeHourlyMean, epRate.baselineVolumeHourlyStddev) of + (Just baselineMean, Just stddev) | stddev > 0 && baselineMean > 10 -> do + let currentRate = fromIntegral epRate.currentHourRequests + zScore = (currentRate - baselineMean) / stddev + absZScore = abs zScore + -- Significant change: >3 std devs in either direction + isSignificantChange = absZScore > 3.0 + direction = if currentRate > baselineMean then "spike" else "drop" + Relude.when isSignificantChange $ do + Log.logInfo "Endpoint volume rate change detected" (epRate.endpointHash, currentRate, baselineMean, zScore, direction) + issue <- liftIO $ Issues.createEndpointVolumeRateChangeIssue + pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentRate baselineMean stddev direction + + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for endpoint volume rate change" (pid, epRate.endpointHash, issue.id, direction) + _ -> pass + + Log.logInfo "Finished endpoint volume rate change detection" pid diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index 368c563dc..23849569a 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -8,10 +8,17 @@ module Models.Apis.Endpoints ( EndpointRequestStats (..), Host (..), HostEvents (..), + EndpointWithCurrentRates (..), + EndpointStats (..), bulkInsertEndpoints, dependenciesAndEventsCount, endpointRequestStatsByProject, countEndpointInbox, + getEndpointsWithCurrentRates, + getEndpointStats, + updateEndpointBaseline, + getEndpointByHash, + getActiveEndpoints, ) where @@ -233,3 +240,192 @@ countEndpointInbox pid host requestType = do AND ann.acknowledged_at IS NULL AND host = ? |] + + +-- | Endpoint with current hourly rates for anomaly detection +data EndpointWithCurrentRates = EndpointWithCurrentRates + { endpointId :: EndpointId + , endpointHash :: Text + , method :: Text + , urlPath :: Text + , host :: Text + , baselineState :: Text + , baselineErrorRateMean :: Maybe Double + , baselineErrorRateStddev :: Maybe Double + , baselineLatencyMean :: Maybe Double + , baselineLatencyStddev :: Maybe Double + , baselineLatencyP95 :: Maybe Double + , baselineLatencyP99 :: Maybe Double + , baselineVolumeHourlyMean :: Maybe Double + , baselineVolumeHourlyStddev :: Maybe Double + , currentHourRequests :: Int + , currentHourErrors :: Int + , currentHourLatencyP50 :: Maybe Double + , currentHourLatencyP95 :: Maybe Double + , currentHourLatencyP99 :: Maybe Double + } + deriving stock (Generic, Show) + deriving anyclass (Default, FromRow, NFData) + + +-- | Endpoint stats for baseline calculation +data EndpointStats = EndpointStats + { totalHours :: Int + , hourlyMeanRequests :: Double + , hourlyStddevRequests :: Double + , hourlyMeanErrors :: Double + , hourlyStddevErrors :: Double + , meanLatency :: Double + , stddevLatency :: Double + , p95Latency :: Double + , p99Latency :: Double + } + deriving stock (Generic, Show) + deriving anyclass (Default, FromRow, NFData) + + +-- | Get endpoints with their current hourly rates for anomaly detection +getEndpointsWithCurrentRates :: DB es => Projects.ProjectId -> Eff es [EndpointWithCurrentRates] +getEndpointsWithCurrentRates pid = PG.query q (pid, pid) + where + q = + [sql| + WITH current_hour_stats AS ( + SELECT + attributes->'http'->>'route' AS url_path, + attributes___http___request___method AS method, + COUNT(*) AS request_count, + COUNT(*) FILTER (WHERE status_code >= 500 OR status_code = 0) AS error_count, + PERCENTILE_CONT(0.50) WITHIN GROUP (ORDER BY duration) AS p50_latency, + PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY duration) AS p95_latency, + PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY duration) AS p99_latency + FROM otel_logs_and_spans + WHERE project_id = ? + AND name = 'monoscope.http' + AND timestamp >= NOW() - INTERVAL '1 hour' + GROUP BY url_path, method + ) + SELECT + e.id AS endpoint_id, + e.hash AS endpoint_hash, + e.method, + e.url_path, + e.host, + e.baseline_state, + e.baseline_error_rate_mean, + e.baseline_error_rate_stddev, + e.baseline_latency_mean, + e.baseline_latency_stddev, + e.baseline_latency_p95, + e.baseline_latency_p99, + e.baseline_volume_hourly_mean, + e.baseline_volume_hourly_stddev, + COALESCE(chs.request_count, 0)::int AS current_hour_requests, + COALESCE(chs.error_count, 0)::int AS current_hour_errors, + chs.p50_latency AS current_hour_latency_p50, + chs.p95_latency AS current_hour_latency_p95, + chs.p99_latency AS current_hour_latency_p99 + FROM apis.endpoints e + LEFT JOIN current_hour_stats chs + ON e.url_path = chs.url_path AND e.method = chs.method + WHERE e.project_id = ? + AND e.baseline_state = 'established' + |] + + +-- | Get endpoint stats for baseline calculation over N hours +getEndpointStats :: DB es => Projects.ProjectId -> Text -> Int -> Eff es (Maybe EndpointStats) +getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpointHash, hours) + where + q = + [sql| + WITH hourly_stats AS ( + SELECT + DATE_TRUNC('hour', timestamp) AS hour, + COUNT(*) AS request_count, + COUNT(*) FILTER (WHERE status_code >= 500 OR status_code = 0) AS error_count, + AVG(duration) AS avg_latency, + PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY duration) AS p95_latency, + PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY duration) AS p99_latency + FROM otel_logs_and_spans ols + JOIN apis.endpoints e ON e.url_path = (ols.attributes->'http'->>'route') + AND e.method = ols.attributes___http___request___method + WHERE ols.project_id = ? + AND e.hash = ? + AND ols.name = 'monoscope.http' + AND ols.timestamp >= NOW() - MAKE_INTERVAL(hours => ?) + GROUP BY DATE_TRUNC('hour', timestamp) + ) + SELECT + COUNT(*)::int AS total_hours, + COALESCE(AVG(request_count), 0) AS hourly_mean_requests, + COALESCE(STDDEV(request_count), 0) AS hourly_stddev_requests, + COALESCE(AVG(error_count::float / NULLIF(request_count, 0)), 0) AS hourly_mean_errors, + COALESCE(STDDEV(error_count::float / NULLIF(request_count, 0)), 0) AS hourly_stddev_errors, + COALESCE(AVG(avg_latency), 0) AS mean_latency, + COALESCE(STDDEV(avg_latency), 0) AS stddev_latency, + COALESCE(AVG(p95_latency), 0) AS p95_latency, + COALESCE(AVG(p99_latency), 0) AS p99_latency + FROM hourly_stats + HAVING COUNT(*) > 0 + |] + + +-- | Update endpoint baseline values +updateEndpointBaseline + :: DB es + => EndpointId + -> Text -- baseline state + -> Double -- error rate mean + -> Double -- error rate stddev + -> Double -- latency mean + -> Double -- latency stddev + -> Double -- latency p95 + -> Double -- latency p99 + -> Double -- volume mean + -> Double -- volume stddev + -> Int -- samples + -> Eff es () +updateEndpointBaseline eid state errMean errStddev latMean latStddev latP95 latP99 volMean volStddev samples = + void $ PG.execute q (state, errMean, errStddev, latMean, latStddev, latP95, latP99, volMean, volStddev, samples, eid) + where + q = + [sql| + UPDATE apis.endpoints + SET baseline_state = ?, + baseline_error_rate_mean = ?, + baseline_error_rate_stddev = ?, + baseline_latency_mean = ?, + baseline_latency_stddev = ?, + baseline_latency_p95 = ?, + baseline_latency_p99 = ?, + baseline_volume_hourly_mean = ?, + baseline_volume_hourly_stddev = ?, + baseline_samples = ?, + baseline_updated_at = NOW() + WHERE id = ? + |] + + +-- | Get endpoint by hash +getEndpointByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe Endpoint) +getEndpointByHash pid hash = listToMaybe <$> PG.query q (pid, hash) + where + q = + [sql| + SELECT id, created_at, updated_at, project_id, url_path, url_params, method, host, hash, outgoing, description + FROM apis.endpoints + WHERE project_id = ? AND hash = ? + |] + + +-- | Get all active endpoints for a project (for baseline calculation) +getActiveEndpoints :: DB es => Projects.ProjectId -> Eff es [Endpoint] +getActiveEndpoints pid = PG.query q (Only pid) + where + q = + [sql| + SELECT id, created_at, updated_at, project_id, url_path, url_params, method, host, hash, outgoing, description + FROM apis.endpoints + WHERE project_id = ? + |] diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 77929a885..00d54fdcf 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -23,6 +23,13 @@ module Models.Apis.Issues ( APIChangeData (..), RuntimeExceptionData (..), QueryAlertData (..), + LogPatternData (..), + ErrorEscalatingData (..), + ErrorRegressedData (..), + LogPatternRateChangeData (..), + EndpointLatencyDegradationData (..), + EndpointErrorRateSpikeData (..), + EndpointVolumeRateChangeData (..), -- * Database Operations insertIssue, @@ -43,6 +50,13 @@ module Models.Apis.Issues ( createErrorSpikeIssue, createNewLogPatternIssue, createLogPatternSpikeIssue, + createLogPatternIssue, + createErrorEscalatingIssue, + createErrorRegressedIssue, + createLogPatternRateChangeIssue, + createEndpointLatencyDegradationIssue, + createEndpointErrorRateSpikeIssue, + createEndpointVolumeRateChangeIssue, -- * Utilities issueIdText, @@ -215,6 +229,145 @@ data QueryAlertData = QueryAlertData deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] QueryAlertData +-- | Log Pattern issue data (new pattern detected) +data LogPatternData = LogPatternData + { patternHash :: Text + , logPattern :: Text + , sampleMessage :: Maybe Text + , logLevel :: Maybe Text + , serviceName :: Maybe Text + , firstSeenAt :: UTCTime + , occurrenceCount :: Int + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson LogPatternData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] LogPatternData + + +-- | Error Escalating issue data (error rate increasing over time) +data ErrorEscalatingData = ErrorEscalatingData + { errorHash :: Text + , exceptionType :: Text + , errorMessage :: Text + , serviceName :: Maybe Text + , currentState :: Text -- "escalating" + , previousState :: Text -- "new" or "ongoing" + , occurrences1h :: Int + , occurrences24h :: Int + , escalationRate :: Double -- rate of increase (e.g., 2.5x) + , escalationWindow :: Text -- "1h", "6h", "24h" + , firstSeenAt :: UTCTime + , lastSeenAt :: UTCTime + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson ErrorEscalatingData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ErrorEscalatingData + + +-- | Error Regressed issue data (previously resolved error returned) +data ErrorRegressedData = ErrorRegressedData + { errorHash :: Text + , exceptionType :: Text + , errorMessage :: Text + , serviceName :: Maybe Text + , resolvedAt :: UTCTime -- when it was previously resolved + , regressedAt :: UTCTime -- when it came back + , quietPeriodMinutes :: Int -- how long it was quiet + , previousOccurrences :: Int -- count before resolution + , newOccurrences :: Int -- count since regression + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson ErrorRegressedData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ErrorRegressedData + + +-- | Log Pattern Rate Change issue data (volume spike/drop) +data LogPatternRateChangeData = LogPatternRateChangeData + { patternHash :: Text + , logPattern :: Text + , sampleMessage :: Maybe Text + , logLevel :: Maybe Text + , serviceName :: Maybe Text + , currentRatePerHour :: Double + , baselineMean :: Double + , baselineStddev :: Double + , zScore :: Double -- standard deviations from baseline + , changePercent :: Double -- percentage change from baseline + , changeDirection :: Text -- "spike" or "drop" + , detectedAt :: UTCTime + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson LogPatternRateChangeData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] LogPatternRateChangeData + + +-- | Endpoint Latency Degradation issue data +data EndpointLatencyDegradationData = EndpointLatencyDegradationData + { endpointHash :: Text + , endpointMethod :: Text + , endpointPath :: Text + , serviceName :: Maybe Text + , currentLatencyMs :: Double -- current p50/p95/p99 + , baselineLatencyMs :: Double -- baseline p50/p95/p99 + , baselineStddev :: Double + , zScore :: Double + , degradationPercent :: Double -- percentage increase + , percentile :: Text -- "p50", "p95", "p99" + , sampleTraceIds :: V.Vector Text -- example traces for investigation + , detectedAt :: UTCTime + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson EndpointLatencyDegradationData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] EndpointLatencyDegradationData + + +-- | Endpoint Error Rate Spike issue data +data EndpointErrorRateSpikeData = EndpointErrorRateSpikeData + { endpointHash :: Text + , endpointMethod :: Text + , endpointPath :: Text + , serviceName :: Maybe Text + , currentErrorRate :: Double -- current error rate (0.0 - 1.0) + , baselineErrorRate :: Double -- baseline error rate + , baselineStddev :: Double + , zScore :: Double + , spikePercent :: Double -- percentage increase + , errorCount :: Int -- number of errors in window + , totalRequests :: Int -- total requests in window + , topErrorTypes :: V.Vector Text -- most common error types + , detectedAt :: UTCTime + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson EndpointErrorRateSpikeData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] EndpointErrorRateSpikeData + + +-- | Endpoint Volume Rate Change issue data (traffic spike or drop) +data EndpointVolumeRateChangeData = EndpointVolumeRateChangeData + { endpointHash :: Text + , endpointMethod :: Text + , endpointPath :: Text + , serviceName :: Maybe Text + , currentRatePerHour :: Double -- current requests/hour + , baselineRatePerHour :: Double -- baseline requests/hour + , baselineStddev :: Double + , zScore :: Double + , changePercent :: Double -- percentage change + , changeDirection :: Text -- "spike" or "drop" + , detectedAt :: UTCTime + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson EndpointVolumeRateChangeData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] EndpointVolumeRateChangeData + + -- | Main Issue type data Issue = Issue { id :: IssueId @@ -781,6 +934,358 @@ createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev } +-- | Create an issue for a new log pattern (using LogPatternData) +createLogPatternIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> IO Issue +createLogPatternIssue projectId lp = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let logPatternData = + LogPatternData + { patternHash = lp.patternHash + , logPattern = lp.logPattern + , sampleMessage = lp.sampleMessage + , logLevel = lp.logLevel + , serviceName = lp.serviceName + , firstSeenAt = now + , occurrenceCount = fromIntegral lp.occurrenceCount + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = LogPattern + , endpointHash = lp.patternHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "New Log Pattern: " <> T.take 100 lp.logPattern + , service = fromMaybe "unknown-service" lp.serviceName + , critical = lp.logLevel == Just "error" + , severity = case lp.logLevel of + Just "error" -> "critical" + Just "warning" -> "warning" + _ -> "info" + , recommendedAction = "A new log pattern has been detected. Review to ensure it's expected behavior." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON logPatternData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for an escalating error +createErrorEscalatingIssue + :: Projects.ProjectId + -> Errors.Error + -> Text -- previous state + -> Double -- escalation rate + -> Text -- escalation window + -> IO Issue +createErrorEscalatingIssue projectId err prevState escalationRate escalationWindow = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let escalatingData = + ErrorEscalatingData + { errorHash = err.hash + , exceptionType = err.exceptionType + , errorMessage = err.message + , serviceName = err.service + , currentState = "escalating" + , previousState = prevState + , occurrences1h = err.occurrences1h + , occurrences24h = err.occurrences24h + , escalationRate = escalationRate + , escalationWindow = escalationWindow + , firstSeenAt = now + , lastSeenAt = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = ErrorEscalating + , endpointHash = err.hash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Error Escalating: " <> err.exceptionType <> " (" <> T.pack (show (round (escalationRate * 100) :: Int)) <> "% increase)" + , service = fromMaybe "unknown-service" err.service + , critical = True + , severity = "critical" + , recommendedAction = "Error rate is escalating (" <> T.pack (show escalationRate) <> "x over " <> escalationWindow <> "). Investigate immediately." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON escalatingData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for a regressed error +createErrorRegressedIssue :: Projects.ProjectId -> Errors.Error -> UTCTime -> Int -> Int -> IO Issue +createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let regressedData = + ErrorRegressedData + { errorHash = err.hash + , exceptionType = err.exceptionType + , errorMessage = err.message + , serviceName = err.service + , resolvedAt = resolvedAtTime + , regressedAt = now + , quietPeriodMinutes = quietMins + , previousOccurrences = prevOccurrences + , newOccurrences = 1 + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = ErrorRegressed + , endpointHash = err.hash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Error Regressed: " <> err.exceptionType <> " (after " <> T.pack (show quietMins) <> " min quiet)" + , service = fromMaybe "unknown-service" err.service + , critical = True + , severity = "critical" + , recommendedAction = "Previously resolved error has returned after " <> T.pack (show quietMins) <> " minutes. The original fix may be incomplete." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON regressedData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for a log pattern rate change +createLogPatternRateChangeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> Text -> IO Issue +createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineStddev direction = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let zScoreVal = if baselineStddev > 0 then abs (currentRate - baselineMean) / baselineStddev else 0 + changePercentVal = if baselineMean > 0 then abs ((currentRate / baselineMean) - 1) * 100 else 0 + rateChangeData = + LogPatternRateChangeData + { patternHash = lp.patternHash + , logPattern = lp.logPattern + , sampleMessage = lp.sampleMessage + , logLevel = lp.logLevel + , serviceName = lp.serviceName + , currentRatePerHour = currentRate + , baselineMean = baselineMean + , baselineStddev = baselineStddev + , zScore = zScoreVal + , changePercent = changePercentVal + , changeDirection = direction + , detectedAt = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = LogPatternRateChange + , endpointHash = lp.patternHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Log Pattern " <> T.toTitle direction <> ": " <> T.take 60 lp.logPattern <> " (" <> T.pack (show (round changePercentVal :: Int)) <> "%)" + , service = fromMaybe "unknown-service" lp.serviceName + , critical = direction == "spike" && lp.logLevel == Just "error" + , severity = case (direction, lp.logLevel) of + ("spike", Just "error") -> "critical" + ("spike", _) -> "warning" + ("drop", _) -> "info" + _ -> "info" + , recommendedAction = "Log pattern volume " <> direction <> " detected. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr (" <> T.pack (show (round zScoreVal :: Int)) <> " std devs)." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON rateChangeData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for endpoint latency degradation +createEndpointLatencyDegradationIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> V.Vector Text -> IO Issue +createEndpointLatencyDegradationIssue projectId epHash method path serviceName currentLatency baselineLatency baselineStddev percentile traceIds = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let zScoreVal = if baselineStddev > 0 then (currentLatency - baselineLatency) / baselineStddev else 0 + degradationPct = if baselineLatency > 0 then ((currentLatency / baselineLatency) - 1) * 100 else 0 + latencyData = + EndpointLatencyDegradationData + { endpointHash = epHash + , endpointMethod = method + , endpointPath = path + , serviceName = serviceName + , currentLatencyMs = currentLatency + , baselineLatencyMs = baselineLatency + , baselineStddev = baselineStddev + , zScore = zScoreVal + , degradationPercent = degradationPct + , percentile = percentile + , sampleTraceIds = traceIds + , detectedAt = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = EndpointLatencyDegradation + , endpointHash = epHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Latency Degradation: " <> method <> " " <> path <> " (" <> percentile <> " +" <> T.pack (show (round degradationPct :: Int)) <> "%)" + , service = fromMaybe "unknown-service" serviceName + , critical = degradationPct > 100 + , severity = if degradationPct > 100 then "critical" else if degradationPct > 50 then "warning" else "info" + , recommendedAction = "Endpoint " <> percentile <> " latency increased from " <> T.pack (show (round baselineLatency :: Int)) <> "ms to " <> T.pack (show (round currentLatency :: Int)) <> "ms. Check recent deployments and dependencies." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON latencyData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for endpoint error rate spike +createEndpointErrorRateSpikeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Int -> Int -> V.Vector Text -> IO Issue +createEndpointErrorRateSpikeIssue projectId epHash method path serviceName currentRate baselineRate baselineStddev errorCount totalReqs topErrors = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let zScoreVal = if baselineStddev > 0 then (currentRate - baselineRate) / baselineStddev else 0 + spikePct = if baselineRate > 0 then ((currentRate / baselineRate) - 1) * 100 else currentRate * 100 + errorRateData = + EndpointErrorRateSpikeData + { endpointHash = epHash + , endpointMethod = method + , endpointPath = path + , serviceName = serviceName + , currentErrorRate = currentRate + , baselineErrorRate = baselineRate + , baselineStddev = baselineStddev + , zScore = zScoreVal + , spikePercent = spikePct + , errorCount = errorCount + , totalRequests = totalReqs + , topErrorTypes = topErrors + , detectedAt = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = EndpointErrorRateSpike + , endpointHash = epHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Error Rate Spike: " <> method <> " " <> path <> " (" <> T.pack (show (round (currentRate * 100) :: Int)) <> "% errors)" + , service = fromMaybe "unknown-service" serviceName + , critical = currentRate > 0.1 + , severity = if currentRate > 0.1 then "critical" else if currentRate > 0.05 then "warning" else "info" + , recommendedAction = "Error rate spiked from " <> T.pack (show (round (baselineRate * 100) :: Int)) <> "% to " <> T.pack (show (round (currentRate * 100) :: Int)) <> "% (" <> T.pack (show errorCount) <> "/" <> T.pack (show totalReqs) <> " requests failed)." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON errorRateData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +createEndpointVolumeRateChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> IO Issue +createEndpointVolumeRateChangeIssue projectId epHash method path serviceName currentRate baselineRate baselineStddev direction = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let zScoreVal = if baselineStddev > 0 then abs (currentRate - baselineRate) / baselineStddev else 0 + changePct = if baselineRate > 0 then abs ((currentRate / baselineRate) - 1) * 100 else 0 + volumeData = + EndpointVolumeRateChangeData + { endpointHash = epHash + , endpointMethod = method + , endpointPath = path + , serviceName = serviceName + , currentRatePerHour = currentRate + , baselineRatePerHour = baselineRate + , baselineStddev = baselineStddev + , zScore = zScoreVal + , changePercent = changePct + , changeDirection = direction + , detectedAt = now + } + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = EndpointVolumeRateChange + , endpointHash = epHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "Traffic " <> T.toTitle direction <> ": " <> method <> " " <> path <> " (" <> T.pack (show (round changePct :: Int)) <> "%)" + , service = fromMaybe "unknown-service" serviceName + , critical = direction == "drop" && changePct > 80 + , severity = case (direction, changePct) of + ("drop", pct) | pct > 80 -> "critical" + ("drop", pct) | pct > 50 -> "warning" + ("spike", pct) | pct > 200 -> "warning" + _ -> "info" + , recommendedAction = "Endpoint traffic " <> direction <> " detected. Current: " <> T.pack (show (round currentRate :: Int)) <> " req/hr, Baseline: " <> T.pack (show (round baselineRate :: Int)) <> " req/hr." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON volumeData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + -- | Conversation type for AI chats data ConversationType = CTAnomaly | CTTrace | CTLogExplorer | CTDashboard | CTSlackThread | CTDiscordThread deriving stock (Eq, Generic, Read, Show) From 924af724bf415a39d57f8879e0f87822d3e724c1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 8 Jan 2026 15:36:02 +0000 Subject: [PATCH 13/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 30 ++++++++++++++++++++++++++---- src/Models/Apis/Anomalies.hs | 1 - src/Models/Apis/Issues.hs | 12 +++++++----- src/Pages/Anomalies.hs | 4 ++-- 4 files changed, 35 insertions(+), 12 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 631b6891d..dd4d5f44a 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1923,8 +1923,20 @@ detectEndpointErrorRateSpike pid authCtx = do Relude.when isSpike $ do Log.logInfo "Endpoint error rate spike detected" (epRate.endpointHash, currentErrorRate, baselineMean, zScore) - issue <- liftIO $ Issues.createEndpointErrorRateSpikeIssue - pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentErrorRate baselineMean stddev epRate.currentHourErrors epRate.currentHourRequests V.empty + issue <- + liftIO + $ Issues.createEndpointErrorRateSpikeIssue + pid + epRate.endpointHash + epRate.method + epRate.urlPath + (Just epRate.host) + currentErrorRate + baselineMean + stddev + epRate.currentHourErrors + epRate.currentHourRequests + V.empty Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) @@ -1950,8 +1962,18 @@ detectEndpointVolumeRateChange pid authCtx = do direction = if currentRate > baselineMean then "spike" else "drop" Relude.when isSignificantChange $ do Log.logInfo "Endpoint volume rate change detected" (epRate.endpointHash, currentRate, baselineMean, zScore, direction) - issue <- liftIO $ Issues.createEndpointVolumeRateChangeIssue - pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentRate baselineMean stddev direction + issue <- + liftIO + $ Issues.createEndpointVolumeRateChangeIssue + pid + epRate.endpointHash + epRate.method + epRate.urlPath + (Just epRate.host) + currentRate + baselineMean + stddev + direction Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> diff --git a/src/Models/Apis/Anomalies.hs b/src/Models/Apis/Anomalies.hs index 09fe77ede..ef1d456f7 100644 --- a/src/Models/Apis/Anomalies.hs +++ b/src/Models/Apis/Anomalies.hs @@ -349,7 +349,6 @@ instance Default IssuesData where def = IDEmpty - data IssueEventAgg = IssueEventAgg { count :: Int , lastSeen :: UTCTime diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 00d54fdcf..bcfb049c0 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -152,6 +152,7 @@ issueTypeToText EndpointLatencyDegradation = "endpoint_latency_degradation" issueTypeToText EndpointErrorRateSpike = "endpoint_error_rate_spike" issueTypeToText EndpointVolumeRateChange = "endpoint_volume_rate_change" + parseIssueType :: Text -> Maybe IssueType parseIssueType "api_change" = Just APIChange parseIssueType "shape" = Just APIChange -- Handle DB anomaly_type @@ -178,6 +179,7 @@ instance FromField IssueType where Just t -> pure t Nothing -> returnError ConversionFailed f $ "Unknown issue type: " <> decodeUtf8 bs + -- | API Change issue data data APIChangeData = APIChangeData { endpointMethod :: Text @@ -1035,7 +1037,7 @@ createErrorEscalatingIssue projectId err prevState escalationRate escalationWind -- | Create an issue for a regressed error -createErrorRegressedIssue :: Projects.ProjectId -> Errors.Error -> UTCTime -> Int -> Int -> IO Issue +createErrorRegressedIssue :: Projects.ProjectId -> Errors.Error -> UTCTime -> Int -> Int -> IO Issue createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1080,7 +1082,7 @@ createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences -- | Create an issue for a log pattern rate change -createLogPatternRateChangeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> Text -> IO Issue +createLogPatternRateChangeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> Text -> IO Issue createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineStddev direction = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1134,7 +1136,7 @@ createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineSt -- | Create an issue for endpoint latency degradation -createEndpointLatencyDegradationIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> V.Vector Text -> IO Issue +createEndpointLatencyDegradationIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> V.Vector Text -> IO Issue createEndpointLatencyDegradationIssue projectId epHash method path serviceName currentLatency baselineLatency baselineStddev percentile traceIds = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1184,7 +1186,7 @@ createEndpointLatencyDegradationIssue projectId epHash method path serviceName c -- | Create an issue for endpoint error rate spike -createEndpointErrorRateSpikeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Int -> Int -> V.Vector Text -> IO Issue +createEndpointErrorRateSpikeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Int -> Int -> V.Vector Text -> IO Issue createEndpointErrorRateSpikeIssue projectId epHash method path serviceName currentRate baselineRate baselineStddev errorCount totalReqs topErrors = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1234,7 +1236,7 @@ createEndpointErrorRateSpikeIssue projectId epHash method path serviceName curre } -createEndpointVolumeRateChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> IO Issue +createEndpointVolumeRateChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> IO Issue createEndpointVolumeRateChangeIssue projectId epHash method path serviceName currentRate baselineRate baselineStddev direction = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index f29994923..3d5c18f80 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -300,7 +300,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> do div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do -- Stats (1 column each) - statBox_ (Just pid) Nothing "Affected Requests" "" "0" Nothing Nothing + statBox_ (Just pid) Nothing "Affected Requests" "" "0" Nothing Nothing statBox_ (Just pid) Nothing "Affected Clients" "" "0" Nothing Nothing whenJust errM $ \err -> do timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt @@ -454,7 +454,7 @@ buildAIContext issue errM trDataM spans = , Just $ "- **Type**: " <> show issue.issueType , Just $ "- **Severity**: " <> issue.severity , Just $ "- **Service**: " <> issue.service - , Just $ "- **Affected Requests**: 0" + , Just $ "- **Affected Requests**: 0" , Just $ "- **Affected Clients**: 0" , Just $ "- **Recommended Action**: " <> issue.recommendedAction , errM >>= \err -> From 338f2bd2a93d4a1bbf4021f66b6349dd4b2e47cf Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 15:54:41 +0000 Subject: [PATCH 14/71] fix LogPatterns typo --- src/BackgroundJobs.hs | 2 +- src/Models/Apis/Endpoints.hs | 15 +-------------- src/Models/Apis/Errors.hs | 11 ++--------- src/Models/Apis/LogPatterns.hs | 11 +---------- 4 files changed, 5 insertions(+), 34 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index dd4d5f44a..0cb6fe697 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -539,7 +539,7 @@ processPatterns :: Text -> Text -> V.Vector (Text, Text) -> Projects.ProjectId - processPatterns kind fieldName events pid scheduledTime since = do Relude.when (not $ V.null events) $ do let qq = [text| select $fieldName from otel_logs_and_spans where project_id= ? AND timestamp >= now() - interval '1 hour' and $fieldName is not null GROUP BY $fieldName ORDER BY count(*) desc limit 20|] - existingPatterns <- if kind == "summary" then coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid else LogPattern.getLogPatternTexts pid + existingPatterns <- if kind == "summary" then coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid else LogPatterns.getLogPatternTexts pid let known = V.fromList $ map ("",) existingPatterns combined = known <> events drainTree = processBatch (kind == "summary") combined scheduledTime Drain.emptyDrainTree diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index 23849569a..b47d6663c 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -372,20 +372,7 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo -- | Update endpoint baseline values -updateEndpointBaseline - :: DB es - => EndpointId - -> Text -- baseline state - -> Double -- error rate mean - -> Double -- error rate stddev - -> Double -- latency mean - -> Double -- latency stddev - -> Double -- latency p95 - -> Double -- latency p99 - -> Double -- volume mean - -> Double -- volume stddev - -> Int -- samples - -> Eff es () +updateEndpointBaseline :: DB es => EndpointId -> Text -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Int -> Eff es () updateEndpointBaseline eid state errMean errStddev latMean latStddev latP95 latP99 volMean volStddev samples = void $ PG.execute q (state, errMean, errStddev, latMean, latStddev, latP95, latP99, volMean, volStddev, samples, eid) where diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index c0a95aea3..a3e298db7 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -344,11 +344,7 @@ data HourlyBucket = HourlyBucket -- | Get hourly error counts for a specific error over a time range -- Returns counts bucketed by hour for baseline calculation -getHourlyErrorCounts - :: DB es - => ErrorId - -> Int -- hours to look back - -> Eff es [HourlyBucket] +getHourlyErrorCounts :: DB es => ErrorId -> Int -> Eff es [HourlyBucket] getHourlyErrorCounts eid hoursBack = PG.query q (eid, hoursBack) where @@ -421,10 +417,7 @@ getErrorEventStats eid hoursBack = do -- | Check if an error is spiking compared to its baseline -- Returns (isSpike, currentRate, zScore) if baseline is established -checkErrorSpike - :: DB es - => Error - -> Eff es (Maybe (Bool, Double, Double)) +checkErrorSpike :: DB es => Error -> Eff es (Maybe (Bool, Double, Double)) checkErrorSpike err = do case (err.baselineState, err.baselineErrorRateMean, err.baselineErrorRateStddev) of ("established", Just mean, Just stddev) | stddev > 0 -> do diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index b4fc4dc61..567352f9b 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -201,16 +201,7 @@ updateLogPatternStats pid patHash additionalCount = |] --- | Update baseline data for a log pattern -updateBaseline - :: DB es - => Projects.ProjectId - -> Text -- pattern_hash - -> Text -- baseline_state ('learning' or 'established') - -> Double -- hourly_mean - -> Double -- hourly_stddev - -> Int -- samples - -> Eff es Int64 +updateBaseline :: DB es => Projects.ProjectId -> Text -> Text -> Double -> Double -> Int -> Eff es Int64 updateBaseline pid patHash bState hourlyMean hourlyStddev samples = PG.execute q (bState, hourlyMean, hourlyStddev, samples, pid, patHash) where From 117aa81e37ea7cc693056d90aab150662c914c05 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 8 Jan 2026 15:55:31 +0000 Subject: [PATCH 15/71] Auto-format code with fourmolu --- src/Models/Apis/Endpoints.hs | 2 +- src/Models/Apis/LogPatterns.hs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index b47d6663c..e8816f100 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -372,7 +372,7 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo -- | Update endpoint baseline values -updateEndpointBaseline :: DB es => EndpointId -> Text -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Int -> Eff es () +updateEndpointBaseline :: DB es => EndpointId -> Text -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Int -> Eff es () updateEndpointBaseline eid state errMean errStddev latMean latStddev latP95 latP99 volMean volStddev samples = void $ PG.execute q (state, errMean, errStddev, latMean, latStddev, latP95, latP99, volMean, volStddev, samples, eid) where diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 567352f9b..e29f793a5 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -201,7 +201,7 @@ updateLogPatternStats pid patHash additionalCount = |] -updateBaseline :: DB es => Projects.ProjectId -> Text -> Text -> Double -> Double -> Int -> Eff es Int64 +updateBaseline :: DB es => Projects.ProjectId -> Text -> Text -> Double -> Double -> Int -> Eff es Int64 updateBaseline pid patHash bState hourlyMean hourlyStddev samples = PG.execute q (bState, hourlyMean, hourlyStddev, samples, pid, patHash) where From 11a10d7debd33705e02e1c1e2d1f386b5f683a33 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 21:12:53 +0000 Subject: [PATCH 16/71] towards api change issues presentation --- src/Models/Apis/Issues.hs | 271 ++++++++++++++++-- .../0029_shapes_example_payloads.sql | 13 + 2 files changed, 257 insertions(+), 27 deletions(-) create mode 100644 static/migrations/0029_shapes_example_payloads.sql diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index bcfb049c0..9d1272e1d 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -30,6 +30,9 @@ module Models.Apis.Issues ( EndpointLatencyDegradationData (..), EndpointErrorRateSpikeData (..), EndpointVolumeRateChangeData (..), + NewEndpointData (..), + NewShapeData (..), + FieldChangeData (..), -- * Database Operations insertIssue, @@ -57,6 +60,9 @@ module Models.Apis.Issues ( createEndpointLatencyDegradationIssue, createEndpointErrorRateSpikeIssue, createEndpointVolumeRateChangeIssue, + createNewEndpointIssue, + createNewShapeIssue, + createFieldChangeIssue, -- * Utilities issueIdText, @@ -122,6 +128,9 @@ issueIdText = idToText -- | Issue types data IssueType = APIChange + | NewEndpoint + | NewShape + | FieldChange | RuntimeException | QueryAlert | LogPattern @@ -141,7 +150,10 @@ instance Default IssueType where issueTypeToText :: IssueType -> Text -issueTypeToText APIChange = "api_change" -- Maps to anomaly_type 'shape' in DB +issueTypeToText APIChange = "api_change" +issueTypeToText NewEndpoint = "new_endpoint" +issueTypeToText NewShape = "new_shape" +issueTypeToText FieldChange = "field_change" issueTypeToText RuntimeException = "runtime_exception" issueTypeToText QueryAlert = "query_alert" issueTypeToText LogPattern = "log_pattern" @@ -155,7 +167,12 @@ issueTypeToText EndpointVolumeRateChange = "endpoint_volume_rate_change" parseIssueType :: Text -> Maybe IssueType parseIssueType "api_change" = Just APIChange -parseIssueType "shape" = Just APIChange -- Handle DB anomaly_type +parseIssueType "new_endpoint" = Just NewEndpoint +parseIssueType "new_shape" = Just NewShape +parseIssueType "field_change" = Just FieldChange +parseIssueType "shape" = Just NewShape -- Handle DB anomaly_type +parseIssueType "endpoint" = Just NewEndpoint -- Handle DB anomaly_type +parseIssueType "field" = Just FieldChange -- Handle DB anomaly_type parseIssueType "runtime_exception" = Just RuntimeException parseIssueType "query_alert" = Just QueryAlert parseIssueType "log_pattern" = Just LogPattern @@ -370,6 +387,60 @@ data EndpointVolumeRateChangeData = EndpointVolumeRateChangeData deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] EndpointVolumeRateChangeData +-- | New Endpoint issue data +data NewEndpointData = NewEndpointData + { endpointHash :: Text + , endpointMethod :: Text + , endpointPath :: Text + , endpointHost :: Text + , firstSeenAt :: UTCTime + , initialShapes :: V.Vector Text + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson NewEndpointData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] NewEndpointData + + +-- | New Shape issue data +data NewShapeData = NewShapeData + { shapeHash :: Text + , endpointHash :: Text + , endpointMethod :: Text + , endpointPath :: Text + , statusCode :: Int + , exampleRequestPayload :: AE.Value + , exampleResponsePayload :: AE.Value + , newFields :: V.Vector Text + , deletedFields :: V.Vector Text + , modifiedFields :: V.Vector Text + , fieldHashes :: V.Vector Text + , firstSeenAt :: UTCTime + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson NewShapeData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] NewShapeData + + +-- | Field Change issue data (for individual field changes) +data FieldChangeData = FieldChangeData + { fieldHash :: Text + , endpointHash :: Text + , endpointMethod :: Text + , endpointPath :: Text + , keyPath :: Text + , fieldCategory :: Text + , previousType :: Maybe Text + , newType :: Text + , changeType :: Text + } + deriving stock (Generic, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via Aeson FieldChangeData + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] FieldChangeData + + -- | Main Issue type data Issue = Issue { id :: IssueId @@ -377,8 +448,8 @@ data Issue = Issue , updatedAt :: ZonedTime , projectId :: Projects.ProjectId , issueType :: IssueType - , target_hash :: Text -- links to error.hash, log_pattern.pattern_hash, endpoint.hash - , endpointHash :: Text -- For API changes, empty for othersxl + , target_hash :: Text + , endpointHash :: Text , acknowledgedAt :: Maybe ZonedTime , acknowledgedBy :: Maybe Users.UserId , archivedAt :: Maybe ZonedTime @@ -407,6 +478,7 @@ instance Default Issue where , updatedAt = error "updatedAt must be set" , projectId = def , issueType = def + , target_hash = "" , endpointHash = "" , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -631,6 +703,7 @@ createAPIChangeIssue projectId endpointHash anomalies = do , updatedAt = firstAnomaly.updatedAt , projectId = projectId , issueType = APIChange + , target_hash = endpointHash , endpointHash = endpointHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -674,11 +747,12 @@ createRuntimeExceptionIssue projectId atError = do , updatedAt = errorZonedTime , projectId = projectId , issueType = RuntimeException - , endpointHash = fromMaybe "" atError.hash + , target_hash = fromMaybe "" atError.hash + , endpointHash = "" , acknowledgedAt = Nothing , acknowledgedBy = Nothing , archivedAt = Nothing - , title = atError.rootErrorType <> ": " <> T.take 100 atError.message + , title = "New Exception: " <> atError.errorType <> " - " <> T.take 80 atError.message , service = fromMaybe "unknown-service" atError.serviceName , critical = True , severity = "critical" @@ -717,6 +791,7 @@ createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thr , updatedAt = zonedNow , projectId = projectId , issueType = QueryAlert + , target_hash = "" , endpointHash = "" , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -761,6 +836,7 @@ createNewErrorIssue projectId err = do , updatedAt = zonedNow , projectId = projectId , issueType = RuntimeException + , target_hash = err.hash , endpointHash = err.hash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -780,13 +856,7 @@ createNewErrorIssue projectId err = do -- | Create issue for an error spike -createErrorSpikeIssue - :: Projects.ProjectId - -> Errors.Error - -> Double -- current rate - -> Double -- baseline mean - -> Double -- baseline stddev - -> IO Issue +createErrorSpikeIssue :: Projects.ProjectId -> Errors.Error -> Double -> Double -> Double -> IO Issue createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -813,6 +883,7 @@ createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do , updatedAt = zonedNow , projectId = projectId , issueType = RuntimeException + , target_hash = err.hash , endpointHash = err.hash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -858,6 +929,7 @@ createNewLogPatternIssue projectId lp = do , updatedAt = zonedNow , projectId = projectId , issueType = RuntimeException + , target_hash = lp.patternHash , endpointHash = lp.patternHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -880,13 +952,7 @@ createNewLogPatternIssue projectId lp = do -- | Create an issue for a log pattern volume spike -createLogPatternSpikeIssue - :: Projects.ProjectId - -> LogPatterns.LogPattern - -> Double -- current rate (events/hour) - -> Double -- baseline mean - -> Double -- baseline stddev - -> IO Issue +createLogPatternSpikeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> IO Issue createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -913,6 +979,7 @@ createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev , updatedAt = zonedNow , projectId = projectId , issueType = RuntimeException + , target_hash = lp.patternHash , endpointHash = lp.patternHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -961,6 +1028,7 @@ createLogPatternIssue projectId lp = do , updatedAt = zonedNow , projectId = projectId , issueType = LogPattern + , target_hash = lp.patternHash , endpointHash = lp.patternHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -983,13 +1051,7 @@ createLogPatternIssue projectId lp = do -- | Create an issue for an escalating error -createErrorEscalatingIssue - :: Projects.ProjectId - -> Errors.Error - -> Text -- previous state - -> Double -- escalation rate - -> Text -- escalation window - -> IO Issue +createErrorEscalatingIssue :: Projects.ProjectId -> Errors.Error -> Text -> Double -> Text -> IO Issue createErrorEscalatingIssue projectId err prevState escalationRate escalationWindow = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1018,6 +1080,7 @@ createErrorEscalatingIssue projectId err prevState escalationRate escalationWind , updatedAt = zonedNow , projectId = projectId , issueType = ErrorEscalating + , target_hash = err.hash , endpointHash = err.hash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -1063,6 +1126,7 @@ createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences , updatedAt = zonedNow , projectId = projectId , issueType = ErrorRegressed + , target_hash = err.hash , endpointHash = err.hash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -1113,6 +1177,7 @@ createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineSt , updatedAt = zonedNow , projectId = projectId , issueType = LogPatternRateChange + , target_hash = lp.patternHash , endpointHash = lp.patternHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -1167,6 +1232,7 @@ createEndpointLatencyDegradationIssue projectId epHash method path serviceName c , updatedAt = zonedNow , projectId = projectId , issueType = EndpointLatencyDegradation + , target_hash = epHash , endpointHash = epHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -1218,6 +1284,7 @@ createEndpointErrorRateSpikeIssue projectId epHash method path serviceName curre , updatedAt = zonedNow , projectId = projectId , issueType = EndpointErrorRateSpike + , target_hash = epHash , endpointHash = epHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -1266,6 +1333,7 @@ createEndpointVolumeRateChangeIssue projectId epHash method path serviceName cur , updatedAt = zonedNow , projectId = projectId , issueType = EndpointVolumeRateChange + , target_hash = epHash , endpointHash = epHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing @@ -1288,6 +1356,155 @@ createEndpointVolumeRateChangeIssue projectId epHash method path serviceName cur } +-- | Create an issue for a new endpoint +createNewEndpointIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> IO Issue +createNewEndpointIssue projectId epHash method path host = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + let endpointData = + NewEndpointData + { endpointHash = epHash + , endpointMethod = method + , endpointPath = path + , endpointHost = host + , firstSeenAt = now + , initialShapes = V.empty + } + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = NewEndpoint + , target_hash = epHash + , endpointHash = epHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "New Endpoint detected" + , service = host + , critical = False + , severity = "info" + , recommendedAction = "A new API endpoint has been detected. Review to ensure it matches your API specification." + , migrationComplexity = "n/a" + , issueData = Aeson $ AE.toJSON endpointData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for a new shape +createNewShapeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Int -> AE.Value -> AE.Value -> V.Vector Text -> V.Vector Text -> V.Vector Text -> V.Vector Text -> IO Issue +createNewShapeIssue projectId shHash epHash method path statusCode reqPayload respPayload newFlds deletedFlds modifiedFlds fldHashes = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let shapeData = + NewShapeData + { shapeHash = shHash + , endpointHash = epHash + , endpointMethod = method + , endpointPath = path + , statusCode = statusCode + , exampleRequestPayload = reqPayload + , exampleResponsePayload = respPayload + , newFields = newFlds + , deletedFields = deletedFlds + , modifiedFields = modifiedFlds + , fieldHashes = fldHashes + , firstSeenAt = now + } + + hasBreakingChanges = not (V.null deletedFlds) || not (V.null modifiedFlds) + changeCount = V.length newFlds + V.length deletedFlds + V.length modifiedFlds + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = NewShape + , target_hash = shHash + , endpointHash = epHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = "New Shape: " <> method <> " " <> path <> " (" <> T.pack (show statusCode) <> ") - " <> T.pack (show changeCount) <> " field changes" + , service = "" + , critical = hasBreakingChanges + , severity = if hasBreakingChanges then "critical" else "warning" + , recommendedAction = if hasBreakingChanges + then "Breaking API changes detected: " <> T.pack (show (V.length deletedFlds)) <> " deleted, " <> T.pack (show (V.length modifiedFlds)) <> " modified fields. Update clients immediately." + else "New API shape detected with " <> T.pack (show (V.length newFlds)) <> " new fields. Review for compatibility." + , migrationComplexity = if V.length deletedFlds > 5 then "high" else if hasBreakingChanges then "medium" else "low" + , issueData = Aeson $ AE.toJSON shapeData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + +-- | Create an issue for a field change +createFieldChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Text -> Text -> Maybe Text -> Text -> Text -> IO Issue +createFieldChangeIssue projectId fldHash epHash method path keyPath category prevType newType changeType = do + issueId <- UUIDId <$> UUID4.nextRandom + now <- getCurrentTime + zonedNow <- utcToLocalZonedTime now + + let fieldData = + FieldChangeData + { fieldHash = fldHash + , endpointHash = epHash + , endpointMethod = method + , endpointPath = path + , keyPath = keyPath + , fieldCategory = category + , previousType = prevType + , newType = newType + , changeType = changeType + } + + isBreaking = changeType `elem` ["removed", "type_changed"] + titlePrefix = case changeType of + "added" -> "New Field" + "removed" -> "Removed Field" + "type_changed" -> "Field Type Changed" + "format_changed" -> "Field Format Changed" + _ -> "Field Changed" + + pure + Issue + { id = issueId + , createdAt = zonedNow + , updatedAt = zonedNow + , projectId = projectId + , issueType = FieldChange + , target_hash = fldHash + , endpointHash = epHash + , acknowledgedAt = Nothing + , acknowledgedBy = Nothing + , archivedAt = Nothing + , title = titlePrefix <> ": " <> keyPath <> " in " <> method <> " " <> path + , service = "" + , critical = isBreaking + , severity = if isBreaking then "critical" else "info" + , recommendedAction = "" + , migrationComplexity = if isBreaking then "medium" else "low" + , issueData = Aeson $ AE.toJSON fieldData + , requestPayloads = Aeson [] + , responsePayloads = Aeson [] + , llmEnhancedAt = Nothing + , llmEnhancementVersion = Nothing + } + + -- | Conversation type for AI chats data ConversationType = CTAnomaly | CTTrace | CTLogExplorer | CTDashboard | CTSlackThread | CTDiscordThread deriving stock (Eq, Generic, Read, Show) diff --git a/static/migrations/0029_shapes_example_payloads.sql b/static/migrations/0029_shapes_example_payloads.sql new file mode 100644 index 000000000..e9ebbf494 --- /dev/null +++ b/static/migrations/0029_shapes_example_payloads.sql @@ -0,0 +1,13 @@ +BEGIN; + +-- Add example payloads to shapes table for displaying context in issues +ALTER TABLE apis.shapes +ADD COLUMN IF NOT EXISTS example_request_payload JSONB NOT NULL DEFAULT '{}'::jsonb, +ADD COLUMN IF NOT EXISTS example_response_payload JSONB NOT NULL DEFAULT '{}'::jsonb; + +-- Add new issue types for API changes +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'new_endpoint'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'new_shape'; +ALTER TYPE apis.issue_type ADD VALUE IF NOT EXISTS 'field_change'; + +COMMIT; From 1f717b5e4713b925f21b1989fcb9a7fa81c4d645 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 21:13:45 +0000 Subject: [PATCH 17/71] improve runtim error handdlinging and notification formatting --- src/BackgroundJobs.hs | 121 ++++++++++++-------------------------- src/Models/Apis/Errors.hs | 46 +++++++++++++-- src/Pkg/Mail.hs | 7 ++- 3 files changed, 81 insertions(+), 93 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 0cb6fe697..7a270ae95 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -688,7 +688,6 @@ processProjectErrors pid errors = do Left (e :: SomePostgreSqlException) -> Log.logAttention "Failed to insert errors" ("error", AE.toJSON $ show e) Right _ -> - -- Only log if errors were actually inserted (reduces noise in tests) Relude.when (V.length errors > 0) $ Log.logInfo "Successfully inserted errors for project" $ AE.object [("project_id", AE.toJSON pid.toText), ("error_count", AE.toJSON $ V.length errors)] @@ -1145,60 +1144,8 @@ newAnomalyJob pid createdAt anomalyTypesT anomalyActionsT targetHashes = do Anomalies.ATFormat -> processAPIChangeAnomalies pid targetHashes -- Runtime exceptions get individual issues -- Each unique error pattern gets its own issue for tracking - Anomalies.ATRuntimeException -> do - errors <- Anomalies.errorsByHashes pid targetHashes - - -- Create one issue per error - forM_ errors \err -> do - issue <- liftIO $ Issues.createRuntimeExceptionIssue pid err.errorData - Issues.insertIssue issue - -- Queue enhancement job - _ <- liftIO $ withResource authCtx.jobsPool \conn -> - createJob conn "background_jobs" $ BackgroundJobs.EnhanceIssuesWithLLM pid (V.singleton issue.id) - -- Send notifications only if project exists and has alerts enabled - projectM <- Projects.projectById pid - whenJust projectM \project -> Relude.when project.errorAlerts do - users <- Projects.usersByProjectId pid - let issueId = UUID.toText issue.id.unUUIDId - forM_ project.notificationsChannel \case - Projects.NSlack -> - forM_ errors \err' -> sendSlackAlert (RuntimeErrorAlert{issueId = issueId, errorData = err'.errorData}) pid project.title Nothing - Projects.NDiscord -> - forM_ errors \err' -> sendDiscordAlert (RuntimeErrorAlert{issueId = issueId, errorData = err'.errorData}) pid project.title Nothing - Projects.NPhone -> - forM_ errors \err' -> - sendWhatsAppAlert (RuntimeErrorAlert{issueId = issueId, errorData = err'.errorData}) pid project.title project.whatsappNumbers - Projects.NEmail -> - forM_ users \u -> do - let errosJ = - ( \ee -> - let e = ee.errorData - in AE.object - [ "root_error_message" AE..= e.rootErrorMessage - , "error_type" AE..= e.errorType - , "error_message" AE..= e.message - , "stack_trace" AE..= e.stackTrace - , "when" AE..= formatTime defaultTimeLocale "%b %-e, %Y, %-l:%M:%S %p" e.when - , "hash" AE..= e.hash - , "tech" AE..= e.technology - , "request_info" AE..= (fromMaybe "" e.requestMethod <> " " <> fromMaybe "" e.requestPath) - , "root_error_type" AE..= e.rootErrorType - ] - ) - <$> errors - title = project.title - errors_url = authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues/" - templateVars = - [aesonQQ|{ - "project_name": #{title}, - "errors_url": #{errors_url}, - "errors": #{errosJ} - }|] - sendPostmarkEmail (CI.original u.email) (Just ("runtime-errors", templateVars)) Nothing - -- Ignore other anomaly types _ -> pass - -- | Process API change anomalies (endpoint, shape, format) into unified APIChange issues -- This function groups related anomalies by endpoint to prevent notification spam. -- For example, if a new endpoint is added with 5 fields and 2 formats, instead of @@ -1735,10 +1682,7 @@ detectErrorSpikes pid authCtx = do processNewError :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewError pid errorHash authCtx = do Log.logInfo "Processing new error" (pid, errorHash) - - -- Get the error by hash - errorM <- Errors.getErrorByHash pid errorHash "production" - + errorM <- Errors.getErrorByHash pid errorHash case errorM of Nothing -> Log.logAttention "Error not found for new error processing" (pid, errorHash) Just err -> do @@ -1747,10 +1691,41 @@ processNewError pid errorHash authCtx = do -- Create a runtime exception issue issue <- liftIO $ Issues.createNewErrorIssue pid err Issues.insertIssue issue - -- Queue LLM enhancement liftIO $ withResource authCtx.jobsPool \conn -> void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + -- Send notifications only if project exists and has alerts enabled + projectM <- Projects.projectById pid + whenJust projectM \project -> Relude.when project.errorAlerts do + users <- Projects.usersByProjectId pid + let issueId = UUID.toText issue.id.unUUIDId + forM_ project.notificationsChannel \case + Projects.NSlack -> sendSlackAlert (RuntimeErrorAlert{issueId = issueId, errorData = err.errorData}) pid project.title Nothing + Projects.NDiscord -> sendDiscordAlert (RuntimeErrorAlert{issueId = issueId, errorData = err.errorData}) pid project.title Nothing + Projects.NPhone -> sendWhatsAppAlert (RuntimeErrorAlert{issueId = issueId, errorData = err.errorData}) pid project.title project.whatsappNumbers + Projects.NEmail -> + forM_ users \u -> do + let e = err.errorData + errorsJ = V.singleton $ AE.object + [ "root_error_message" AE..= e.rootErrorMessage + , "error_type" AE..= e.errorType + , "error_message" AE..= e.message + , "stack_trace" AE..= e.stackTrace + , "when" AE..= formatTime defaultTimeLocale "%b %-e, %Y, %-l:%M:%S %p" e.when + , "hash" AE..= e.hash + , "tech" AE..= e.technology + , "request_info" AE..= (fromMaybe "" e.requestMethod <> " " <> fromMaybe "" e.requestPath) + , "root_error_type" AE..= e.rootErrorType + ] + title = project.title + errors_url = authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues/" + templateVars = + [aesonQQ|{ + "project_name": #{title}, + "errors_url": #{errors_url}, + "errors": #{errorsJ} + }|] + sendPostmarkEmail (CI.original u.email) (Just ("runtime-errors", templateVars)) Nothing Log.logInfo "Created issue for new error" (pid, err.id, issue.id) @@ -1923,20 +1898,8 @@ detectEndpointErrorRateSpike pid authCtx = do Relude.when isSpike $ do Log.logInfo "Endpoint error rate spike detected" (epRate.endpointHash, currentErrorRate, baselineMean, zScore) - issue <- - liftIO - $ Issues.createEndpointErrorRateSpikeIssue - pid - epRate.endpointHash - epRate.method - epRate.urlPath - (Just epRate.host) - currentErrorRate - baselineMean - stddev - epRate.currentHourErrors - epRate.currentHourRequests - V.empty + issue <- liftIO $ Issues.createEndpointErrorRateSpikeIssue + pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentErrorRate baselineMean stddev epRate.currentHourErrors epRate.currentHourRequests V.empty Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) @@ -1962,18 +1925,8 @@ detectEndpointVolumeRateChange pid authCtx = do direction = if currentRate > baselineMean then "spike" else "drop" Relude.when isSignificantChange $ do Log.logInfo "Endpoint volume rate change detected" (epRate.endpointHash, currentRate, baselineMean, zScore, direction) - issue <- - liftIO - $ Issues.createEndpointVolumeRateChangeIssue - pid - epRate.endpointHash - epRate.method - epRate.urlPath - (Just epRate.host) - currentRate - baselineMean - stddev - direction + issue <- liftIO $ Issues.createEndpointVolumeRateChangeIssue + pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentRate baselineMean stddev direction Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index a3e298db7..8aff09503 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -4,6 +4,7 @@ module Models.Apis.Errors ( ErrorState (..), ErrorEvent (..), ErrorEventId, + ATError (..), -- Queries getErrors, getErrorById, @@ -45,6 +46,9 @@ import Models.Users.Users qualified as Users import Relude hiding (id) import System.Types (DB) import Utils (DBField (MkDBField)) +import Models.Apis.RequestDumps qualified as RequestDumps +import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) +import Data.Default newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -100,7 +104,6 @@ instance FromField ErrorState where Just s -> pure s Nothing -> pure ESNew - data Error = Error { id :: ErrorId , projectId :: Projects.ProjectId @@ -113,7 +116,7 @@ data Error = Error , environment :: Text , service :: Maybe Text , runtime :: Maybe Text - , errorData :: AE.Value + , errorData :: ATError , representativeMessage :: Maybe Text , firstEventId :: Maybe ErrorEventId , lastEventId :: Maybe ErrorEventId @@ -146,6 +149,37 @@ data Error = Error via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] Error +data ATError = ATError + { projectId :: Maybe Projects.ProjectId + , when :: UTCTime + , errorType :: Text + , rootErrorType :: Text + , message :: Text + , rootErrorMessage :: Text + , stackTrace :: Text + , hash :: Maybe Text + , technology :: Maybe RequestDumps.SDKTypes + , requestMethod :: Maybe Text + , requestPath :: Maybe Text + , serviceName :: Maybe Text + , environment :: Maybe Text + , runtime :: Maybe Text + , traceId :: Maybe Text + , spanId :: Maybe Text + , parentSpanId :: Maybe Text + , endpointHash :: Maybe Text + , userId :: Maybe Text + , userEmail :: Maybe Text + , userIp :: Maybe Text + , sessionId :: Maybe Text + } + deriving stock (Generic, Show) + deriving anyclass (Default, NFData) + deriving (FromField, ToField) via Aeson ATError + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ATError + data ErrorEvent = ErrorEvent { id :: ErrorEventId , projectId :: Projects.ProjectId @@ -228,9 +262,9 @@ getErrorById eid = do -- | Get error by hash -getErrorByHash :: DB es => Projects.ProjectId -> Text -> Text -> Eff es (Maybe Error) -getErrorByHash pid hash env = do - results <- PG.query q (pid, hash, env) +getErrorByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe Error) +getErrorByHash pid hash = do + results <- PG.query q (pid, hash) return $ listToMaybe results where q = @@ -246,7 +280,7 @@ getErrorByHash pid hash env = do baseline_error_rate_mean, baseline_error_rate_stddev, baseline_updated_at, is_ignored, ignored_until FROM apis.errors - WHERE project_id = ? AND hash = ? AND environment = ? + WHERE project_id = ? AND hash = ? |] diff --git a/src/Pkg/Mail.hs b/src/Pkg/Mail.hs index b13187854..7dfc5700d 100644 --- a/src/Pkg/Mail.hs +++ b/src/Pkg/Mail.hs @@ -25,6 +25,7 @@ import System.Config (AuthContext (env)) import System.Config qualified as Config import System.Logging qualified as Log import System.Types (DB) +import Models.Apis.Errors qualified as Errors sendPostmarkEmail :: Notify.Notify :> es => Text -> Maybe (Text, AE.Value) -> Maybe (Text, Text) -> Eff es () @@ -44,7 +45,7 @@ sendSlackMessage pid message = do data NotificationAlerts = EndpointAlert {project :: Text, endpoints :: V.Vector Text, endpointHash :: Text} - | RuntimeErrorAlert {issueId :: Text, errorData :: RequestDumps.ATError} + | RuntimeErrorAlert {issueId :: Text, errorData :: Errors.ATError} | ShapeAlert | ReportAlert { reportType :: Text @@ -199,7 +200,7 @@ slackReportAlert reportType startTime endTime totalErrors totalEvents breakDown sumr = V.take 10 $ V.map (\(name, errCount, evCount) -> AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*" <> name <> ":* Errors-" <> toText (show errCount) <> ", Total-" <> toText (show evCount))]) breakDown -slackErrorAlert :: RequestDumps.ATError -> Text -> Text -> Text -> AE.Value +slackErrorAlert :: Errors.ATError -> Text -> Text -> Text -> AE.Value slackErrorAlert err project channelId projectUrl = AE.object [ "blocks" @@ -305,7 +306,7 @@ discordReportAlert reportType startTime endTime totalErrors totalEvents breakDow T.intercalate "\n" $ V.toList $ V.take 10 $ V.map (\(name, errCount, evCount) -> "* **" <> name <> "**: Total errors-" <> show errCount <> ", Total events-" <> show evCount) breakDown -discordErrorAlert :: RequestDumps.ATError -> Text -> Text -> AE.Value +discordErrorAlert :: Errors.ATError -> Text -> Text -> AE.Value discordErrorAlert err project projectUrl = [aesonQQ|{ "embeds": [ From dd7742455838c48e5e84b2945289bcb9575cbaac Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 8 Jan 2026 21:14:20 +0000 Subject: [PATCH 18/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 55 ++++++++++++++++++++++++++++----------- src/Models/Apis/Errors.hs | 8 +++--- src/Models/Apis/Issues.hs | 31 +++++++++++----------- src/Pkg/Mail.hs | 2 +- 4 files changed, 62 insertions(+), 34 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 7a270ae95..7bacd4eeb 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1146,6 +1146,7 @@ newAnomalyJob pid createdAt anomalyTypesT anomalyActionsT targetHashes = do -- Each unique error pattern gets its own issue for tracking _ -> pass + -- | Process API change anomalies (endpoint, shape, format) into unified APIChange issues -- This function groups related anomalies by endpoint to prevent notification spam. -- For example, if a new endpoint is added with 5 fields and 2 formats, instead of @@ -1706,17 +1707,19 @@ processNewError pid errorHash authCtx = do Projects.NEmail -> forM_ users \u -> do let e = err.errorData - errorsJ = V.singleton $ AE.object - [ "root_error_message" AE..= e.rootErrorMessage - , "error_type" AE..= e.errorType - , "error_message" AE..= e.message - , "stack_trace" AE..= e.stackTrace - , "when" AE..= formatTime defaultTimeLocale "%b %-e, %Y, %-l:%M:%S %p" e.when - , "hash" AE..= e.hash - , "tech" AE..= e.technology - , "request_info" AE..= (fromMaybe "" e.requestMethod <> " " <> fromMaybe "" e.requestPath) - , "root_error_type" AE..= e.rootErrorType - ] + errorsJ = + V.singleton + $ AE.object + [ "root_error_message" AE..= e.rootErrorMessage + , "error_type" AE..= e.errorType + , "error_message" AE..= e.message + , "stack_trace" AE..= e.stackTrace + , "when" AE..= formatTime defaultTimeLocale "%b %-e, %Y, %-l:%M:%S %p" e.when + , "hash" AE..= e.hash + , "tech" AE..= e.technology + , "request_info" AE..= (fromMaybe "" e.requestMethod <> " " <> fromMaybe "" e.requestPath) + , "root_error_type" AE..= e.rootErrorType + ] title = project.title errors_url = authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues/" templateVars = @@ -1898,8 +1901,20 @@ detectEndpointErrorRateSpike pid authCtx = do Relude.when isSpike $ do Log.logInfo "Endpoint error rate spike detected" (epRate.endpointHash, currentErrorRate, baselineMean, zScore) - issue <- liftIO $ Issues.createEndpointErrorRateSpikeIssue - pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentErrorRate baselineMean stddev epRate.currentHourErrors epRate.currentHourRequests V.empty + issue <- + liftIO + $ Issues.createEndpointErrorRateSpikeIssue + pid + epRate.endpointHash + epRate.method + epRate.urlPath + (Just epRate.host) + currentErrorRate + baselineMean + stddev + epRate.currentHourErrors + epRate.currentHourRequests + V.empty Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) @@ -1925,8 +1940,18 @@ detectEndpointVolumeRateChange pid authCtx = do direction = if currentRate > baselineMean then "spike" else "drop" Relude.when isSignificantChange $ do Log.logInfo "Endpoint volume rate change detected" (epRate.endpointHash, currentRate, baselineMean, zScore, direction) - issue <- liftIO $ Issues.createEndpointVolumeRateChangeIssue - pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentRate baselineMean stddev direction + issue <- + liftIO + $ Issues.createEndpointVolumeRateChangeIssue + pid + epRate.endpointHash + epRate.method + epRate.urlPath + (Just epRate.host) + currentRate + baselineMean + stddev + direction Issues.insertIssue issue liftIO $ withResource authCtx.jobsPool \conn -> diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 8aff09503..ea209a272 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -28,12 +28,14 @@ module Models.Apis.Errors ( where import Data.Aeson qualified as AE +import Data.Default import Data.Time import Data.UUID qualified as UUID import Data.Vector qualified as V import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) import Database.PostgreSQL.Simple (FromRow, Only (..), ToRow) import Database.PostgreSQL.Simple.FromField (FromField, ResultError (ConversionFailed, UnexpectedNull), fromField, returnError) +import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField, toField) import Database.PostgreSQL.Simple.Types (Query (Query)) @@ -41,14 +43,12 @@ import Deriving.Aeson qualified as DAE import Effectful (Eff) import Effectful.PostgreSQL qualified as PG import Models.Apis.RequestDumps qualified as RequestDump +import Models.Apis.RequestDumps qualified as RequestDumps import Models.Projects.Projects qualified as Projects import Models.Users.Users qualified as Users import Relude hiding (id) import System.Types (DB) import Utils (DBField (MkDBField)) -import Models.Apis.RequestDumps qualified as RequestDumps -import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) -import Data.Default newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -104,6 +104,7 @@ instance FromField ErrorState where Just s -> pure s Nothing -> pure ESNew + data Error = Error { id :: ErrorId , projectId :: Projects.ProjectId @@ -180,6 +181,7 @@ data ATError = ATError (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ATError + data ErrorEvent = ErrorEvent { id :: ErrorEventId , projectId :: Projects.ProjectId diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 9d1272e1d..473b04c89 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -411,10 +411,10 @@ data NewShapeData = NewShapeData , statusCode :: Int , exampleRequestPayload :: AE.Value , exampleResponsePayload :: AE.Value - , newFields :: V.Vector Text + , newFields :: V.Vector Text , deletedFields :: V.Vector Text - , modifiedFields :: V.Vector Text - , fieldHashes :: V.Vector Text + , modifiedFields :: V.Vector Text + , fieldHashes :: V.Vector Text , firstSeenAt :: UTCTime } deriving stock (Generic, Show) @@ -433,7 +433,7 @@ data FieldChangeData = FieldChangeData , fieldCategory :: Text , previousType :: Maybe Text , newType :: Text - , changeType :: Text + , changeType :: Text } deriving stock (Generic, Show) deriving anyclass (NFData) @@ -448,8 +448,8 @@ data Issue = Issue , updatedAt :: ZonedTime , projectId :: Projects.ProjectId , issueType :: IssueType - , target_hash :: Text - , endpointHash :: Text + , target_hash :: Text + , endpointHash :: Text , acknowledgedAt :: Maybe ZonedTime , acknowledgedBy :: Maybe Users.UserId , archivedAt :: Maybe ZonedTime @@ -856,7 +856,7 @@ createNewErrorIssue projectId err = do -- | Create issue for an error spike -createErrorSpikeIssue :: Projects.ProjectId -> Errors.Error -> Double -> Double -> Double -> IO Issue +createErrorSpikeIssue :: Projects.ProjectId -> Errors.Error -> Double -> Double -> Double -> IO Issue createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -952,7 +952,7 @@ createNewLogPatternIssue projectId lp = do -- | Create an issue for a log pattern volume spike -createLogPatternSpikeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> IO Issue +createLogPatternSpikeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> IO Issue createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1051,7 +1051,7 @@ createLogPatternIssue projectId lp = do -- | Create an issue for an escalating error -createErrorEscalatingIssue :: Projects.ProjectId -> Errors.Error -> Text -> Double -> Text -> IO Issue +createErrorEscalatingIssue :: Projects.ProjectId -> Errors.Error -> Text -> Double -> Text -> IO Issue createErrorEscalatingIssue projectId err prevState escalationRate escalationWindow = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1357,7 +1357,7 @@ createEndpointVolumeRateChangeIssue projectId epHash method path serviceName cur -- | Create an issue for a new endpoint -createNewEndpointIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> IO Issue +createNewEndpointIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> IO Issue createNewEndpointIssue projectId epHash method path host = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1398,7 +1398,7 @@ createNewEndpointIssue projectId epHash method path host = do -- | Create an issue for a new shape -createNewShapeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Int -> AE.Value -> AE.Value -> V.Vector Text -> V.Vector Text -> V.Vector Text -> V.Vector Text -> IO Issue +createNewShapeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Int -> AE.Value -> AE.Value -> V.Vector Text -> V.Vector Text -> V.Vector Text -> V.Vector Text -> IO Issue createNewShapeIssue projectId shHash epHash method path statusCode reqPayload respPayload newFlds deletedFlds modifiedFlds fldHashes = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime @@ -1439,9 +1439,10 @@ createNewShapeIssue projectId shHash epHash method path statusCode reqPayload re , service = "" , critical = hasBreakingChanges , severity = if hasBreakingChanges then "critical" else "warning" - , recommendedAction = if hasBreakingChanges - then "Breaking API changes detected: " <> T.pack (show (V.length deletedFlds)) <> " deleted, " <> T.pack (show (V.length modifiedFlds)) <> " modified fields. Update clients immediately." - else "New API shape detected with " <> T.pack (show (V.length newFlds)) <> " new fields. Review for compatibility." + , recommendedAction = + if hasBreakingChanges + then "Breaking API changes detected: " <> T.pack (show (V.length deletedFlds)) <> " deleted, " <> T.pack (show (V.length modifiedFlds)) <> " modified fields. Update clients immediately." + else "New API shape detected with " <> T.pack (show (V.length newFlds)) <> " new fields. Review for compatibility." , migrationComplexity = if V.length deletedFlds > 5 then "high" else if hasBreakingChanges then "medium" else "low" , issueData = Aeson $ AE.toJSON shapeData , requestPayloads = Aeson [] @@ -1452,7 +1453,7 @@ createNewShapeIssue projectId shHash epHash method path statusCode reqPayload re -- | Create an issue for a field change -createFieldChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Text -> Text -> Maybe Text -> Text -> Text -> IO Issue +createFieldChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Text -> Text -> Maybe Text -> Text -> Text -> IO Issue createFieldChangeIssue projectId fldHash epHash method path keyPath category prevType newType changeType = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime diff --git a/src/Pkg/Mail.hs b/src/Pkg/Mail.hs index 7dfc5700d..67d87d6e6 100644 --- a/src/Pkg/Mail.hs +++ b/src/Pkg/Mail.hs @@ -16,6 +16,7 @@ import Effectful ( ) import Effectful.Log (Log) import Effectful.Reader.Static (Reader, ask) +import Models.Apis.Errors qualified as Errors import Models.Apis.RequestDumps qualified as RequestDumps import Models.Apis.Slack (DiscordData (..), SlackData (..), getDiscordDataByProjectId, getProjectSlackData) import Models.Projects.Projects qualified as Projects @@ -25,7 +26,6 @@ import System.Config (AuthContext (env)) import System.Config qualified as Config import System.Logging qualified as Log import System.Types (DB) -import Models.Apis.Errors qualified as Errors sendPostmarkEmail :: Notify.Notify :> es => Text -> Maybe (Text, AE.Value) -> Maybe (Text, Text) -> Eff es () From d48fc28d8438c106d01b288069a796e2ff75391f Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 21:33:47 +0000 Subject: [PATCH 19/71] error_event triger after error update/insertion --- .../migrations/0030_error_events_trigger.sql | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 static/migrations/0030_error_events_trigger.sql diff --git a/static/migrations/0030_error_events_trigger.sql b/static/migrations/0030_error_events_trigger.sql new file mode 100644 index 000000000..2e3805b7f --- /dev/null +++ b/static/migrations/0030_error_events_trigger.sql @@ -0,0 +1,68 @@ +BEGIN; + +-- Trigger function to create error_events from error_data JSONB on errors table +-- The error_data column contains ATError structure with event-specific details +CREATE OR REPLACE FUNCTION apis.create_error_event_proc() RETURNS trigger AS $$ +BEGIN + IF TG_WHEN <> 'AFTER' THEN + RAISE EXCEPTION 'apis.create_error_event_proc() may only run as an AFTER trigger'; + END IF; + + IF NEW.error_data IS NOT NULL AND NEW.error_data != '{}'::jsonb THEN + INSERT INTO apis.error_events ( + project_id, + occurred_at, + target_hash, + exception_type, + message, + stack_trace, + service_name, + environment, + request_method, + request_path, + endpoint_hash, + trace_id, + span_id, + parent_span_id, + user_id, + user_email, + user_ip, + session_id, + sample_rate + ) VALUES ( + NEW.project_id, + COALESCE((NEW.error_data->>'when')::timestamptz, NOW()), + NEW.hash, + COALESCE(NEW.error_data->>'root_error_type', NEW.error_data->>'error_type', NEW.exception_type), + COALESCE(NEW.error_data->>'root_error_message', NEW.error_data->>'message', NEW.message), + COALESCE(NEW.error_data->>'stack_trace', NEW.stacktrace), + COALESCE(NEW.error_data->>'service_name', NEW.service, 'unknown'), + COALESCE(NEW.error_data->>'environment', NEW.environment), + NEW.error_data->>'request_method', + NEW.error_data->>'request_path', + NEW.error_data->>'endpoint_hash', + NEW.error_data->>'trace_id', + NEW.error_data->>'span_id', + NEW.error_data->>'parent_span_id', + NEW.error_data->>'user_id', + NEW.error_data->>'user_email', + -- Handle user_ip carefully - convert to INET if valid, otherwise NULL + CASE + WHEN NEW.error_data->>'user_ip' IS NOT NULL + AND NEW.error_data->>'user_ip' != '' + THEN (NEW.error_data->>'user_ip')::inet + ELSE NULL + END, + NEW.error_data->>'session_id', + 1.0 -- default sample rate + ); + END IF; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER error_insert_create_event AFTER INSERT ON apis.errors FOR EACH ROW EXECUTE PROCEDURE apis.create_error_event_proc(); +CREATE TRIGGER error_update_create_event AFTER UPDATE OF error_data ON apis.errors FOR EACH ROW EXECUTE PROCEDURE apis.create_error_event_proc(); + +COMMIT; From 91852decf2132722bc67f2bdadf3c330e2d2307c Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 8 Jan 2026 21:34:40 +0000 Subject: [PATCH 20/71] complete switch to Errors.ATError --- src/BackgroundJobs.hs | 6 +++--- src/Models/Apis/Errors.hs | 3 ++- src/Models/Telemetry/Telemetry.hs | 7 ++++--- src/RequestMessages.hs | 16 ++++------------ 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 7bacd4eeb..53568c696 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -673,13 +673,13 @@ processOneMinuteErrors scheduledTime pid = do -- Log.logInfo "Completed 1-minute error processing" () -- | Process and insert errors for a specific project -processProjectErrors :: Projects.ProjectId -> V.Vector RequestDumps.ATError -> ATBackgroundCtx () +processProjectErrors :: Projects.ProjectId -> V.Vector Errors.ATError -> ATBackgroundCtx () processProjectErrors pid errors = do -- Process each error, extracting HTTP fields if available let processedErrors = V.map processError errors -- Extract queries and params - let (_, queries, paramsList) = V.unzip3 processedErrors + let (queries, paramsList) = V.unzip processedErrors -- Bulk insert errors result <- try $ V.zipWithM_ PG.execute queries paramsList @@ -694,7 +694,7 @@ processProjectErrors pid errors = do where -- Process a single error - the error already has requestMethod and requestPath -- set by getAllATErrors if it was extracted from span context - processError :: RequestDumps.ATError -> (RequestDumps.ATError, Query, [DBField]) + processError :: Errors.ATError -> (Query, [DBField]) processError = RequestMessages.processErrors pid Nothing Nothing Nothing diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index ea209a272..8dda0fe45 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -14,6 +14,7 @@ module Models.Apis.Errors ( updateErrorState, updateBaseline, resolveError, + upsertErrorQueryAndParam, assignError, -- Error Events (for baseline/spike detection) HourlyBucket (..), @@ -512,7 +513,7 @@ getErrorsWithCurrentRates pid = -- | Upsert an error (insert or update on conflict) -upsertErrorQueryAndParam :: DB es => Projects.ProjectId -> RequestDump.ATError -> (Query, [DBField]) +upsertErrorQueryAndParam :: Projects.ProjectId -> ATError -> (Query, [DBField]) upsertErrorQueryAndParam pid err = (q, params) where q = diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index 71d33bd4f..76c9eec20 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -98,6 +98,7 @@ import System.Types (DB) import Text.Regex.TDFA.Text () import UnliftIO (throwIO, tryAny) import Utils (lookupValueText, toXXHash) +import Models.Apis.Errors qualified as Errors -- Helper function to get nested value from a map using dot notation @@ -1012,7 +1013,7 @@ getErrorEvents _ = [] -- | Extract all runtime errors from a collection of spans -- This is the main entry point for error anomaly detection -getAllATErrors :: V.Vector OtelLogsAndSpans -> V.Vector RequestDumps.ATError +getAllATErrors :: V.Vector OtelLogsAndSpans -> V.Vector Errors.ATError getAllATErrors = V.concatMap extractErrorsFromSpan where extractErrorsFromSpan spanObj = @@ -1026,7 +1027,7 @@ getAllATErrors = V.concatMap extractErrorsFromSpan -- - exception.message: The exception message -- - exception.stacktrace: The stacktrace -- Also extracts HTTP context (method, path) for better error tracking -extractATError :: OtelLogsAndSpans -> AE.Value -> Maybe RequestDumps.ATError +extractATError :: OtelLogsAndSpans -> AE.Value -> Maybe Errors.ATError extractATError spanObj (AE.Object o) = do AE.Object attrs' <- KEM.lookup "event_attributes" o AE.Object attrs <- KEM.lookup "exception" attrs' @@ -1072,7 +1073,7 @@ extractATError spanObj (AE.Object o) = do -- Hash components: projectId + service + span name + error type + sanitized message/stack -- This ensures similar errors are grouped while allowing variations in the actual message return - $ RequestDumps.ATError + $ Errors.ATError { projectId = UUID.fromText spanObj.project_id >>= (Just . UUIDId) , when = spanObj.timestamp , errorType = typ diff --git a/src/RequestMessages.hs b/src/RequestMessages.hs index 40bc935b1..5303aea41 100644 --- a/src/RequestMessages.hs +++ b/src/RequestMessages.hs @@ -43,6 +43,7 @@ import Data.Vector.Algorithms.Intro qualified as VA import Database.PostgreSQL.Simple (Query) import Deriving.Aeson qualified as DAE import Models.Apis.Anomalies qualified as Anomalies +import Models.Apis.Errors qualified as Errors import Models.Apis.Fields.Types qualified as Fields ( Field (..), FieldCategoryEnum (..), @@ -149,19 +150,10 @@ replaceNullChars = T.replace "\\u0000" "" -- | Process errors with optional HTTP-specific fields -- If HTTP fields are not provided, they remain as Nothing in the error record -processErrors :: Projects.ProjectId -> Maybe RequestDumps.SDKTypes -> Maybe Text -> Maybe Text -> RequestDumps.ATError -> (RequestDumps.ATError, Query, [DBField]) -processErrors pid maybeSdkType maybeMethod maybePath err = (normalizedError, q, params) +processErrors :: Projects.ProjectId -> Maybe RequestDumps.SDKTypes -> Maybe Text -> Maybe Text -> Errors.ATError -> ( Query, [DBField]) +processErrors pid maybeSdkType maybeMethod maybePath err = (q, params) where - (q, params) = Anomalies.insertErrorQueryAndParams pid normalizedError - normalizedError = - err - { RequestDumps.projectId = Just pid - , RequestDumps.hash = Just $ fromMaybe defaultHash err.hash - , RequestDumps.technology = maybeSdkType <|> err.technology - , RequestDumps.requestMethod = maybeMethod <|> err.requestMethod - , RequestDumps.requestPath = maybePath <|> err.requestPath - } - defaultHash = toXXHash (pid.toText <> fromMaybe "" err.serviceName <> err.errorType <> replaceAllFormats (err.message <> err.stackTrace) <> maybe "" show maybeSdkType) + (q, params) = Errors.upsertErrorQueryAndParam pid err sortVector :: Ord a => V.Vector a -> V.Vector a From 9a70a27e08b2e3eae696a1dbb819beb3a610527f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 8 Jan 2026 21:35:33 +0000 Subject: [PATCH 21/71] Auto-format code with fourmolu --- src/Models/Telemetry/Telemetry.hs | 2 +- src/RequestMessages.hs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index 76c9eec20..cc342322c 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -84,6 +84,7 @@ import Effectful.Log (Log) import Effectful.PostgreSQL (WithConnection) import Effectful.PostgreSQL qualified as PG import Effectful.Reader.Static qualified as Eff +import Models.Apis.Errors qualified as Errors import Models.Apis.RequestDumps qualified as RequestDumps import Models.Projects.Projects qualified as Projects import NeatInterpolation (text) @@ -98,7 +99,6 @@ import System.Types (DB) import Text.Regex.TDFA.Text () import UnliftIO (throwIO, tryAny) import Utils (lookupValueText, toXXHash) -import Models.Apis.Errors qualified as Errors -- Helper function to get nested value from a map using dot notation diff --git a/src/RequestMessages.hs b/src/RequestMessages.hs index 5303aea41..07ec5753d 100644 --- a/src/RequestMessages.hs +++ b/src/RequestMessages.hs @@ -150,7 +150,7 @@ replaceNullChars = T.replace "\\u0000" "" -- | Process errors with optional HTTP-specific fields -- If HTTP fields are not provided, they remain as Nothing in the error record -processErrors :: Projects.ProjectId -> Maybe RequestDumps.SDKTypes -> Maybe Text -> Maybe Text -> Errors.ATError -> ( Query, [DBField]) +processErrors :: Projects.ProjectId -> Maybe RequestDumps.SDKTypes -> Maybe Text -> Maybe Text -> Errors.ATError -> (Query, [DBField]) processErrors pid maybeSdkType maybeMethod maybePath err = (q, params) where (q, params) = Errors.upsertErrorQueryAndParam pid err From 16f3f8ceb909eb95c40b6410379d7cd64359e6bd Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 11:45:08 +0000 Subject: [PATCH 22/71] towards api change anomalies next steps is handle them intelligently --- src/BackgroundJobs.hs | 89 +++++++++++++++++++ src/Models/Apis/Fields/Types.hs | 39 +++++++- src/Models/Apis/Shapes.hs | 45 +++++++++- .../0031_new_api_change_triggers.sql | 60 +++++++++++++ 4 files changed, 231 insertions(+), 2 deletions(-) create mode 100644 static/migrations/0031_new_api_change_triggers.sql diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 53568c696..807848b86 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -129,6 +129,10 @@ data BgJobs | EndpointLatencyDegradationDetection Projects.ProjectId -- Detect endpoint latency degradation | EndpointErrorRateSpikeDetection Projects.ProjectId -- Detect endpoint error rate spikes | EndpointVolumeRateChangeDetection Projects.ProjectId -- Detect endpoint volume changes (spike/drop) + | -- API change detection jobs (from SQL triggers) + NewEndpoint Projects.ProjectId Text -- projectId, endpoint hash - creates issue for new endpoint + | NewShape Projects.ProjectId Text -- projectId, shape hash - creates issue for new shape + | NewFieldChange Projects.ProjectId Text -- projectId, field hash - creates issue for field change deriving stock (Generic, Show) deriving anyclass (AE.FromJSON, AE.ToJSON) @@ -381,6 +385,10 @@ processBackgroundJob authCtx bgJob = EndpointLatencyDegradationDetection pid -> detectEndpointLatencyDegradation pid authCtx EndpointErrorRateSpikeDetection pid -> detectEndpointErrorRateSpike pid authCtx EndpointVolumeRateChangeDetection pid -> detectEndpointVolumeRateChange pid authCtx + -- API change detection jobs + NewEndpoint pid hash -> processNewEndpoint pid hash authCtx + NewShape pid hash -> processNewShape pid hash authCtx + NewFieldChange pid hash -> processNewFieldChange pid hash authCtx -- | Run hourly scheduled tasks for all projects @@ -1961,3 +1969,84 @@ detectEndpointVolumeRateChange pid authCtx = do _ -> pass Log.logInfo "Finished endpoint volume rate change detection" pid + + +-- | Process new endpoint detected by SQL trigger +processNewEndpoint :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () +processNewEndpoint pid hash authCtx = do + Log.logInfo "Processing new endpoint" (pid, hash) + endpointM <- Endpoints.getEndpointByHash pid hash + case endpointM of + Nothing -> Log.logAttention "Endpoint not found for new endpoint processing" (pid, hash) + Just ep -> do + issue <- liftIO $ Issues.createNewEndpointIssue pid ep.hash ep.method ep.urlPath ep.host + Issues.insertIssue issue + + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for new endpoint" (pid, hash, issue.id) + + +-- | Process new shape detected by SQL trigger +processNewShape :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () +processNewShape pid hash authCtx = do + Log.logInfo "Processing new shape" (pid, hash) + + shapeM <- Shapes.getShapeForIssue hash + + case shapeM of + Nothing -> Log.logAttention "Shape not found for new shape processing" (pid, hash) + Just sh -> do + issue <- + liftIO $ + Issues.createNewShapeIssue + pid + sh.shapeHash + sh.endpointHash + sh.method + sh.path + sh.statusCode + sh.exampleRequestPayload + sh.exampleResponsePayload + sh.newFields + sh.deletedFields + sh.modifiedFields + sh.fieldHashes + Issues.insertIssue issue + + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for new shape" (pid, hash, issue.id) + + +-- | Process new field change detected by SQL trigger +processNewFieldChange :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () +processNewFieldChange pid hash authCtx = do + Log.logInfo "Processing new field change" (pid, hash) + + fieldM <- Fields.getFieldForIssue hash + + case fieldM of + Nothing -> Log.logAttention "Field not found for new field change processing" (pid, hash) + Just fld -> do + issue <- + liftIO $ + Issues.createFieldChangeIssue + pid + fld.fieldHash + fld.endpointHash + fld.method + fld.path + fld.keyPath + fld.fieldCategory + Nothing + fld.fieldType + "added" + Issues.insertIssue issue + + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for new field change" (pid, hash, issue.id) diff --git a/src/Models/Apis/Fields/Types.hs b/src/Models/Apis/Fields/Types.hs index e5279dd3c..5408ccecf 100644 --- a/src/Models/Apis/Fields/Types.hs +++ b/src/Models/Apis/Fields/Types.hs @@ -9,10 +9,12 @@ module Models.Apis.Fields.Types ( FacetSummary (..), FacetValue (..), FacetData (..), + FieldForIssue (..), parseFieldCategoryEnum, parseFieldTypes, fieldTypeToText, bulkInsertFields, + getFieldForIssue, -- Formats Format (..), FormatId, @@ -29,7 +31,7 @@ import Data.Time (ZonedTime) import Data.UUID qualified as UUID import Data.Vector qualified as V import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) -import Database.PostgreSQL.Simple (FromRow, ToRow) +import Database.PostgreSQL.Simple (FromRow, Only (..), ToRow) import Database.PostgreSQL.Simple.FromField (FromField) import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) import Database.PostgreSQL.Simple.SqlQQ (sql) @@ -319,3 +321,38 @@ data SwFormat = SwFormat deriving anyclass (AE.ToJSON) deriving (FromField) via Aeson SwFormat deriving (AE.FromJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] SwFormat + + +-- | Field data needed for creating issues +data FieldForIssue = FieldForIssue + { fieldHash :: Text + , endpointHash :: Text + , method :: Text + , path :: Text + , keyPath :: Text + , fieldCategory :: Text + , fieldType :: Text + , format :: Text + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +getFieldForIssue :: DB es => Text -> Eff es (Maybe FieldForIssue) +getFieldForIssue hash = listToMaybe <$> PG.query q (Only hash) + where + q = + [sql| + SELECT + f.hash, + f.endpoint_hash, + COALESCE(e.method, 'UNKNOWN'), + COALESCE(e.url_path, '/'), + f.key_path, + f.field_category::TEXT, + f.field_type::TEXT, + f.format + FROM apis.fields f + LEFT JOIN apis.endpoints e ON e.hash = f.endpoint_hash + WHERE f.hash = ? + |] diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index 2f553afb6..a6cfb9b7c 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -3,7 +3,9 @@ module Models.Apis.Shapes ( ShapeWithFields (..), SwShape (..), ShapeId, + ShapeForIssue (..), bulkInsertShapes, + getShapeForIssue, ) where @@ -12,7 +14,7 @@ import Data.Default (Default) import Data.Time (UTCTime) import Data.Vector qualified as V import Database.PostgreSQL.Entity.Types (CamelToSnake, Entity, FieldModifiers, GenericEntity, PrimaryKey, Schema, TableName) -import Database.PostgreSQL.Simple (FromRow, ToRow) +import Database.PostgreSQL.Simple (FromRow, Only (..), ToRow) import Database.PostgreSQL.Simple.FromField (FromField) import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) import Database.PostgreSQL.Simple.SqlQQ (sql) @@ -114,3 +116,44 @@ data SwShape = SwShape deriving anyclass (AE.ToJSON) deriving (FromField) via Aeson SwShape deriving (AE.FromJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] SwShape + + +-- | Shape data needed for creating issues +data ShapeForIssue = ShapeForIssue + { shapeHash :: Text + , endpointHash :: Text + , method :: Text + , path :: Text + , statusCode :: Int + , exampleRequestPayload :: AE.Value + , exampleResponsePayload :: AE.Value + , newFields :: V.Vector Text + , deletedFields :: V.Vector Text + , modifiedFields :: V.Vector Text + , fieldHashes :: V.Vector Text + } + deriving stock (Generic, Show) + deriving anyclass (FromRow) + + +getShapeForIssue :: DB es => Text -> Eff es (Maybe ShapeForIssue) +getShapeForIssue hash = listToMaybe <$> PG.query q (Only hash) + where + q = + [sql| + SELECT + s.hash, + s.endpoint_hash, + COALESCE(e.method, 'UNKNOWN'), + COALESCE(e.url_path, '/'), + s.status_code, + COALESCE(s.example_request_payload, '{}'::jsonb), + COALESCE(s.example_response_payload, '{}'::jsonb), + COALESCE(s.new_unique_fields, '{}'::TEXT[]), + COALESCE(s.deleted_fields, '{}'::TEXT[]), + COALESCE(s.updated_field_formats, '{}'::TEXT[]), + COALESCE(s.field_hashes, '{}'::TEXT[]) + FROM apis.shapes s + LEFT JOIN apis.endpoints e ON e.hash = s.endpoint_hash + WHERE s.hash = ? + |] diff --git a/static/migrations/0031_new_api_change_triggers.sql b/static/migrations/0031_new_api_change_triggers.sql new file mode 100644 index 000000000..be01486e7 --- /dev/null +++ b/static/migrations/0031_new_api_change_triggers.sql @@ -0,0 +1,60 @@ +-- Migration: Replace generic new_anomaly_proc with specific triggers for NewEndpoint, NewShape, NewFieldChange +-- These triggers create background jobs directly without inserting into apis.anomalies table + +BEGIN; + +------------------------------------------------------------------------ +-- UNIFIED API CHANGE TRIGGER FUNCTION +------------------------------------------------------------------------ +CREATE OR REPLACE FUNCTION apis.api_change_detected_proc() RETURNS trigger AS $$ +DECLARE + job_tag TEXT; +BEGIN + IF TG_WHEN <> 'AFTER' THEN + RAISE EXCEPTION 'apis.api_change_detected_proc() may only run as an AFTER trigger'; + END IF; + + -- Get job tag from trigger argument (NewEndpoint, NewShape, NewFieldChange) + job_tag := TG_ARGV[0]; + + INSERT INTO background_jobs (run_at, status, payload) + VALUES ( + now(), + 'queued', + jsonb_build_object( + 'tag', job_tag, + 'projectId', NEW.project_id, + 'hash', NEW.hash + ) + ); + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +------------------------------------------------------------------------ +-- DROP OLD TRIGGERS AND CREATE NEW ONES +------------------------------------------------------------------------ + +-- Drop old triggers that use new_anomaly_proc +DROP TRIGGER IF EXISTS endpoint_created_anomaly ON apis.endpoints; +DROP TRIGGER IF EXISTS shapes_created_anomaly ON apis.shapes; +DROP TRIGGER IF EXISTS fields_created_anomaly ON apis.fields; + +-- Create new triggers +CREATE TRIGGER endpoint_created_new + AFTER INSERT ON apis.endpoints + FOR EACH ROW + EXECUTE FUNCTION apis.api_change_detected_proc('NewEndpoint'); + +CREATE TRIGGER shape_created_new + AFTER INSERT ON apis.shapes + FOR EACH ROW + EXECUTE FUNCTION apis.api_change_detected_proc('NewShape'); + +CREATE TRIGGER field_created_new + AFTER INSERT ON apis.fields + FOR EACH ROW + EXECUTE FUNCTION apis.api_change_detected_proc('NewFieldChange'); + +COMMIT; From d3623a28cd25379c47e1238b65f8fefd962d15e6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 9 Jan 2026 11:46:16 +0000 Subject: [PATCH 23/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 807848b86..c9341ca39 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1999,8 +1999,8 @@ processNewShape pid hash authCtx = do Nothing -> Log.logAttention "Shape not found for new shape processing" (pid, hash) Just sh -> do issue <- - liftIO $ - Issues.createNewShapeIssue + liftIO + $ Issues.createNewShapeIssue pid sh.shapeHash sh.endpointHash @@ -2032,8 +2032,8 @@ processNewFieldChange pid hash authCtx = do Nothing -> Log.logAttention "Field not found for new field change processing" (pid, hash) Just fld -> do issue <- - liftIO $ - Issues.createFieldChangeIssue + liftIO + $ Issues.createFieldChangeIssue pid fld.fieldHash fld.endpointHash From b395ac0292da99ad8446da9aab0c5f22ccdd3bbd Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 20:38:27 +0000 Subject: [PATCH 24/71] feat: new endpoint anomaly alerts --- src/BackgroundJobs.hs | 110 +++++++++++++++++++++++++++--------------- src/Pkg/Mail.hs | 39 +++++++-------- 2 files changed, 88 insertions(+), 61 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index c9341ca39..03ce4f03a 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1206,35 +1206,35 @@ processAPIChangeAnomalies pid targetHashes = do createJob conn "background_jobs" $ BackgroundJobs.EnhanceIssuesWithLLM pid (V.singleton issue.id) pass - -- Send notifications - projectM <- Projects.projectById pid - whenJust projectM \project -> do - users <- Projects.usersByProjectId pid - let endpointInfo = - map - ( \(_, anoms) -> - let firstAnom = V.head anoms - in fromMaybe "UNKNOWN" firstAnom.endpointMethod <> " " <> fromMaybe "/" firstAnom.endpointUrlPath - ) - anomaliesByEndpoint - -- Only send notifications if we have valid endpoint info - Relude.when (project.endpointAlerts && not (null endpointInfo)) do - let alert = EndpointAlert{project = project.title, endpoints = V.fromList endpointInfo, endpointHash = fromMaybe "" $ viaNonEmpty head $ V.toList targetHashes} - - forM_ project.notificationsChannel \case - Projects.NSlack -> sendSlackAlert alert pid project.title Nothing - Projects.NDiscord -> sendDiscordAlert alert pid project.title Nothing - Projects.NPhone -> sendWhatsAppAlert alert pid project.title project.whatsappNumbers - Projects.NEmail -> do - forM_ users \u -> do - let templateVars = - AE.object - [ "user_name" AE..= u.firstName - , "project_name" AE..= project.title - , "anomaly_url" AE..= (authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues") - , "endpoint_name" AE..= endpointInfo - ] - sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing + -- -- Send notifications + -- projectM <- Projects.projectById pid + -- whenJust projectM \project -> do + -- users <- Projects.usersByProjectId pid + -- let endpointInfo = + -- map + -- ( \(_, anoms) -> + -- let firstAnom = V.head anoms + -- in fromMaybe "UNKNOWN" firstAnom.endpointMethod <> " " <> fromMaybe "/" firstAnom.endpointUrlPath + -- ) + -- anomaliesByEndpoint + -- -- Only send notifications if we have valid endpoint info + -- Relude.when (project.endpointAlerts && not (null endpointInfo)) do + -- let alert = EndpointAlert{project = project.title, endpoints = V.fromList endpointInfo, endpointHash = fromMaybe "" $ viaNonEmpty head $ V.toList targetHashes} + + -- forM_ project.notificationsChannel \case + -- Projects.NSlack -> sendSlackAlert alert pid project.title Nothing + -- Projects.NDiscord -> sendDiscordAlert alert pid project.title Nothing + -- Projects.NPhone -> sendWhatsAppAlert alert pid project.title project.whatsappNumbers + -- Projects.NEmail -> do + -- forM_ users \u -> do + -- let templateVars = + -- AE.object + -- [ "user_name" AE..= u.firstName + -- , "project_name" AE..= project.title + -- , "anomaly_url" AE..= (authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues") + -- , "endpoint_name" AE..= (method <> " " <> urlPath) + -- ] + -- sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing -- | Group anomalies by endpoint hash @@ -1975,17 +1975,47 @@ detectEndpointVolumeRateChange pid authCtx = do processNewEndpoint :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewEndpoint pid hash authCtx = do Log.logInfo "Processing new endpoint" (pid, hash) - endpointM <- Endpoints.getEndpointByHash pid hash - case endpointM of - Nothing -> Log.logAttention "Endpoint not found for new endpoint processing" (pid, hash) - Just ep -> do - issue <- liftIO $ Issues.createNewEndpointIssue pid ep.hash ep.method ep.urlPath ep.host - Issues.insertIssue issue - - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - - Log.logInfo "Created issue for new endpoint" (pid, hash, issue.id) + totalEvents <- do + res <- PG.query [sql| SELECT count(5000) from otel_logs_and_spans WHERE project_id = ? AND timestamp >= now() - interval '7 days' |] (Only pid) + case res of + [Only cnt] -> return cnt + _ -> return 0 + if totalEvents < 5000 + then Log.logInfo "Skipping new endpoint issue creation due to low event volume" (pid, hash, totalEvents) + else do + projectM <- Projects.projectById pid + whenJust projectM \project -> do + endpointM <- Endpoints.getEndpointByHash pid hash + case endpointM of + Nothing -> Log.logAttention "Endpoint not found for new endpoint processing" (pid, hash) + Just ep -> do + issue <- liftIO $ Issues.createNewEndpointIssue pid ep.hash ep.method ep.urlPath ep.host + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for new endpoint" (pid, hash, issue.id) + -- Send notifications only if project exists and has alerts enabled + Relude.when project.endpointAlerts $ do + users <- Projects.usersByProjectId pid + let issueId = UUID.toText issue.id.unUUIDId + title = project.title + forM_ project.notificationsChannel \case + Projects.NSlack -> sendSlackAlert (EndpointAlert title ep.method ep.urlPath ep.hash) pid project.title Nothing + Projects.NDiscord -> sendDiscordAlert (EndpointAlert title ep.method ep.urlPath ep.hash) pid project.title Nothing + Projects.NPhone -> sendWhatsAppAlert (EndpointAlert title ep.method ep.urlPath ep.hash) pid project.title project.whatsappNumbers + Projects.NEmail -> + forM_ users \u -> do + let endpoint_url = authCtx.env.hostUrl <> "p/" <> pid.toText <> "/anomalies/" <> issueId + endpoint_name = ep.method <> " " <> ep.urlPath + firstName = u.firstName + templateVars = + [aesonQQ|{ + "user_name": #{firstName}, + "project_name": #{title}, + "anomaly_url": #{endpoint_url}, + "endpoint_name": #{endpoint_name} + }|] + sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing -- | Process new shape detected by SQL trigger diff --git a/src/Pkg/Mail.hs b/src/Pkg/Mail.hs index 67d87d6e6..1cdaeff19 100644 --- a/src/Pkg/Mail.hs +++ b/src/Pkg/Mail.hs @@ -44,7 +44,7 @@ sendSlackMessage pid message = do data NotificationAlerts - = EndpointAlert {project :: Text, endpoints :: V.Vector Text, endpointHash :: Text} + = EndpointAlert {project :: Text, method:: Text, urlPath :: Text, endpointHash :: Text} | RuntimeErrorAlert {issueId :: Text, errorData :: Errors.ATError} | ShapeAlert | ReportAlert @@ -78,7 +78,7 @@ sendDiscordAlert alert pid pTitle channelIdM' = do let projectUrl = appCtx.env.hostUrl <> "p/" <> pid.toText case alert of RuntimeErrorAlert{..} -> send $ discordErrorAlert errorData pTitle projectUrl - EndpointAlert{..} -> send $ discordNewEndpointAlert project endpoints endpointHash projectUrl + EndpointAlert{..} -> send $ discordNewEndpointAlert project method urlPath endpointHash projectUrl ShapeAlert -> pass ReportAlert{..} -> send $ discordReportAlert reportType startTime endTime totalErrors totalEvents breakDown pTitle reportUrl allChartUrl errorChartUrl MonitorsAlert{..} -> @@ -105,7 +105,7 @@ sendSlackAlert alert pid pTitle channelM = do let projectUrl = appCtx.env.hostUrl <> "p/" <> pid.toText case alert of RuntimeErrorAlert{..} -> sendAlert cid $ slackErrorAlert errorData pTitle cid projectUrl - EndpointAlert{..} -> sendAlert cid $ slackNewEndpointsAlert project endpoints cid endpointHash projectUrl + EndpointAlert{..} -> sendAlert cid $ slackNewEndpointsAlert project method urlPath cid endpointHash projectUrl ShapeAlert -> pass ReportAlert{..} -> sendAlert cid $ slackReportAlert reportType startTime endTime totalErrors totalEvents breakDown pTitle cid reportUrl allChartUrl errorChartUrl MonitorsAlert{..} -> @@ -130,7 +130,7 @@ sendWhatsAppAlert alert pid pTitle tos = do EndpointAlert{..} -> do let template = appCtx.config.whatsappEndpointTemplate url = pid.toText <> "/anomalies/by_hash/" <> endpointHash - contentVars = AE.object ["1" AE..= ("*" <> pTitle <> "*"), "2" AE..= T.intercalate "." ((\x -> "`" <> x <> "`") <$> V.toList endpoints), "3" AE..= url] + contentVars = AE.object ["1" AE..= ("*" <> pTitle <> "*"), "2" AE..= ("`" <> method <> " " <> urlPath <> "`"), "3" AE..= url] sendAlert template contentVars pass ReportAlert{..} -> do @@ -246,16 +246,16 @@ slackErrorAlert err project channelId projectUrl = firstSeen = toText $ formatTime defaultTimeLocale "%b %-e, %Y, %-l:%M:%S %p" err.when -slackNewEndpointsAlert :: Text -> V.Vector Text -> Text -> Text -> Text -> AE.Value -slackNewEndpointsAlert projectName endpoints channelId hash projectUrl = +slackNewEndpointsAlert :: Text -> Text -> Text -> Text -> Text -> Text -> AE.Value +slackNewEndpointsAlert projectName method urlPath channelId hash projectUrl = AE.object [ "blocks" AE..= AE.Array ( V.fromList - [ AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("<" <> targetUrl <> "|:large_blue_circle: New Endpoint(s) Detected>")]] - , AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("We've detected *" <> toText (show $ length endpoints) <> "* new endpoint(s) in *" <> projectName <> "*.")]] + [ AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("<" <> targetUrl <> "|:large_blue_circle: New Endpoint Detected>")]] + , AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("We've detected a new endpoint in *" <> projectName <> "*.")]] , AE.object ["type" AE..= "divider"] - , AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*Endpoints:*\n\n" <> enps)]] + , AE.object ["type" AE..= "section", "text" AE..= AE.object ["type" AE..= "mrkdwn", "text" AE..= ("*Endpoint:*\n\n" <> enps)]] , AE.object [ "type" AE..= "actions" , "elements" @@ -267,10 +267,9 @@ slackNewEndpointsAlert projectName endpoints channelId hash projectUrl = ] where targetUrl = projectUrl <> "/anomalies/by_hash/" <> hash - enp = (\x -> "\"" <> x <> "\"") . T.dropWhile (/= ' ') <$> V.toList endpoints - query = urlEncode True $ encodeUtf8 $ "attributes.http.route in (" <> T.intercalate "," enp <> ")" + query = urlEncode True $ encodeUtf8 $ "method=\"" <> method <> "\" AND attributes.http.route in (\"" <> urlPath <> "\")" explorerUrl = projectUrl <> "/log_explorer?query=" <> decodeUtf8 query - enps = T.intercalate "\n\n" $ (\x -> "`" <> x <> "`") <$> V.toList endpoints + enps = "`" <> method <> " " <> urlPath <> "`" discordReportAlert :: Text -> Text -> Text -> Int -> Int -> V.Vector (Text, Int, Int) -> Text -> Text -> Text -> Text -> AE.Value @@ -339,8 +338,8 @@ discordErrorAlert err project projectUrl = serviceName = fromMaybe "" err.serviceName -discordNewEndpointAlert :: Text -> V.Vector Text -> Text -> Text -> AE.Value -discordNewEndpointAlert projectName endpoints hash projectUrl = +discordNewEndpointAlert :: Text -> Text -> Text -> Text -> Text -> AE.Value +discordNewEndpointAlert projectName method urlPath hash projectUrl = [aesonQQ| { "embeds": [ @@ -349,22 +348,20 @@ discordNewEndpointAlert projectName endpoints hash projectUrl = "description": #{description}, "color": 263167, "fields": [ - {"name": "Endpoints","value": #{enps},"inline": false}, + {"name": "Endpoint","value": #{enp},"inline": false}, {"name": "···","value": #{explorerLink},"inline": false} ], "footer": { "text": "{···}"}, "url": #{url} } ], - "content" : "🔵 New Endpoint(s) Detected" + "content" : "🔵 New Endpoint Detected" } |] where - endpointsCount = length endpoints - description = "We've detected **" <> show endpointsCount <> " new endpoints** in the **" <> projectName <> "** project." + description = "We've detected a new endpoint in the **" <> projectName <> "** project." url = projectUrl <> "/anomalies/by_hash/" <> hash - enp = (\x -> "\"" <> x <> "\"") . T.dropWhile (/= ' ') <$> V.toList endpoints - query = urlEncode True $ encodeUtf8 $ "attributes.http.route in (" <> T.intercalate "," enp <> ")" + query = urlEncode True $ encodeUtf8 $ "method=\"" <> method <> "\" AND attributes.http.route in (\"" <> urlPath <> "\")" explorerUrl = projectUrl <> "/log_explorer?query=" <> decodeUtf8 query explorerLink = "[View in Explorer](" <> explorerUrl <> ")" - enps = T.intercalate "\n\n" $ (\x -> "`" <> x <> "`") <$> V.toList endpoints + enp = "`" <> method <> " " <> urlPath <> "`" \ No newline at end of file From 2d55973b0b9fcf7cb99d749ccfd87b46ac502ed6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 9 Jan 2026 20:39:15 +0000 Subject: [PATCH 25/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 68 +++++++++++++++++++++---------------------- src/Pkg/Mail.hs | 6 ++-- 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 03ce4f03a..6ea3a3495 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1206,36 +1206,36 @@ processAPIChangeAnomalies pid targetHashes = do createJob conn "background_jobs" $ BackgroundJobs.EnhanceIssuesWithLLM pid (V.singleton issue.id) pass - -- -- Send notifications - -- projectM <- Projects.projectById pid - -- whenJust projectM \project -> do - -- users <- Projects.usersByProjectId pid - -- let endpointInfo = - -- map - -- ( \(_, anoms) -> - -- let firstAnom = V.head anoms - -- in fromMaybe "UNKNOWN" firstAnom.endpointMethod <> " " <> fromMaybe "/" firstAnom.endpointUrlPath - -- ) - -- anomaliesByEndpoint - -- -- Only send notifications if we have valid endpoint info - -- Relude.when (project.endpointAlerts && not (null endpointInfo)) do - -- let alert = EndpointAlert{project = project.title, endpoints = V.fromList endpointInfo, endpointHash = fromMaybe "" $ viaNonEmpty head $ V.toList targetHashes} - - -- forM_ project.notificationsChannel \case - -- Projects.NSlack -> sendSlackAlert alert pid project.title Nothing - -- Projects.NDiscord -> sendDiscordAlert alert pid project.title Nothing - -- Projects.NPhone -> sendWhatsAppAlert alert pid project.title project.whatsappNumbers - -- Projects.NEmail -> do - -- forM_ users \u -> do - -- let templateVars = - -- AE.object - -- [ "user_name" AE..= u.firstName - -- , "project_name" AE..= project.title - -- , "anomaly_url" AE..= (authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues") - -- , "endpoint_name" AE..= (method <> " " <> urlPath) - -- ] - -- sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing +-- -- Send notifications +-- projectM <- Projects.projectById pid +-- whenJust projectM \project -> do +-- users <- Projects.usersByProjectId pid +-- let endpointInfo = +-- map +-- ( \(_, anoms) -> +-- let firstAnom = V.head anoms +-- in fromMaybe "UNKNOWN" firstAnom.endpointMethod <> " " <> fromMaybe "/" firstAnom.endpointUrlPath +-- ) +-- anomaliesByEndpoint +-- -- Only send notifications if we have valid endpoint info +-- Relude.when (project.endpointAlerts && not (null endpointInfo)) do +-- let alert = EndpointAlert{project = project.title, endpoints = V.fromList endpointInfo, endpointHash = fromMaybe "" $ viaNonEmpty head $ V.toList targetHashes} + +-- forM_ project.notificationsChannel \case +-- Projects.NSlack -> sendSlackAlert alert pid project.title Nothing +-- Projects.NDiscord -> sendDiscordAlert alert pid project.title Nothing +-- Projects.NPhone -> sendWhatsAppAlert alert pid project.title project.whatsappNumbers +-- Projects.NEmail -> do +-- forM_ users \u -> do +-- let templateVars = +-- AE.object +-- [ "user_name" AE..= u.firstName +-- , "project_name" AE..= project.title +-- , "anomaly_url" AE..= (authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues") +-- , "endpoint_name" AE..= (method <> " " <> urlPath) +-- ] +-- sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing -- | Group anomalies by endpoint hash groupAnomaliesByEndpointHash :: V.Vector Anomalies.AnomalyVM -> [(Text, V.Vector Anomalies.AnomalyVM)] @@ -1976,10 +1976,10 @@ processNewEndpoint :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBack processNewEndpoint pid hash authCtx = do Log.logInfo "Processing new endpoint" (pid, hash) totalEvents <- do - res <- PG.query [sql| SELECT count(5000) from otel_logs_and_spans WHERE project_id = ? AND timestamp >= now() - interval '7 days' |] (Only pid) - case res of - [Only cnt] -> return cnt - _ -> return 0 + res <- PG.query [sql| SELECT count(5000) from otel_logs_and_spans WHERE project_id = ? AND timestamp >= now() - interval '7 days' |] (Only pid) + case res of + [Only cnt] -> return cnt + _ -> return 0 if totalEvents < 5000 then Log.logInfo "Skipping new endpoint issue creation due to low event volume" (pid, hash, totalEvents) else do @@ -2015,7 +2015,7 @@ processNewEndpoint pid hash authCtx = do "anomaly_url": #{endpoint_url}, "endpoint_name": #{endpoint_name} }|] - sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing + sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing -- | Process new shape detected by SQL trigger diff --git a/src/Pkg/Mail.hs b/src/Pkg/Mail.hs index 1cdaeff19..305805bee 100644 --- a/src/Pkg/Mail.hs +++ b/src/Pkg/Mail.hs @@ -44,7 +44,7 @@ sendSlackMessage pid message = do data NotificationAlerts - = EndpointAlert {project :: Text, method:: Text, urlPath :: Text, endpointHash :: Text} + = EndpointAlert {project :: Text, method :: Text, urlPath :: Text, endpointHash :: Text} | RuntimeErrorAlert {issueId :: Text, errorData :: Errors.ATError} | ShapeAlert | ReportAlert @@ -269,7 +269,7 @@ slackNewEndpointsAlert projectName method urlPath channelId hash projectUrl = targetUrl = projectUrl <> "/anomalies/by_hash/" <> hash query = urlEncode True $ encodeUtf8 $ "method=\"" <> method <> "\" AND attributes.http.route in (\"" <> urlPath <> "\")" explorerUrl = projectUrl <> "/log_explorer?query=" <> decodeUtf8 query - enps = "`" <> method <> " " <> urlPath <> "`" + enps = "`" <> method <> " " <> urlPath <> "`" discordReportAlert :: Text -> Text -> Text -> Int -> Int -> V.Vector (Text, Int, Int) -> Text -> Text -> Text -> Text -> AE.Value @@ -364,4 +364,4 @@ discordNewEndpointAlert projectName method urlPath hash projectUrl = query = urlEncode True $ encodeUtf8 $ "method=\"" <> method <> "\" AND attributes.http.route in (\"" <> urlPath <> "\")" explorerUrl = projectUrl <> "/log_explorer?query=" <> decodeUtf8 query explorerLink = "[View in Explorer](" <> explorerUrl <> ")" - enp = "`" <> method <> " " <> urlPath <> "`" \ No newline at end of file + enp = "`" <> method <> " " <> urlPath <> "`" From ec4aeaf9c5d2e41e2d646a586c6502f48c2548df Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 21:34:02 +0000 Subject: [PATCH 26/71] new endpoint issue alert, on new shape for existing endpoint issue, and update issue data by incrementing shapes count. --- src/BackgroundJobs.hs | 61 +++++++++++-------- src/Models/Apis/Issues.hs | 14 ++++- .../migrations/0028_rebuild_issues_table.sql | 14 +---- 3 files changed, 49 insertions(+), 40 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 6ea3a3495..f9034c29c 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -23,6 +23,7 @@ import Data.UUID qualified as UUID import Data.UUID.V4 qualified as UUIDV4 import Data.Vector qualified as V import Database.PostgreSQL.Simple (SomePostgreSqlException) +import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.Types import Effectful (Eff, IOE, (:>)) @@ -2022,33 +2023,41 @@ processNewEndpoint pid hash authCtx = do processNewShape :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewShape pid hash authCtx = do Log.logInfo "Processing new shape" (pid, hash) - shapeM <- Shapes.getShapeForIssue hash - - case shapeM of - Nothing -> Log.logAttention "Shape not found for new shape processing" (pid, hash) - Just sh -> do - issue <- - liftIO - $ Issues.createNewShapeIssue - pid - sh.shapeHash - sh.endpointHash - sh.method - sh.path - sh.statusCode - sh.exampleRequestPayload - sh.exampleResponsePayload - sh.newFields - sh.deletedFields - sh.modifiedFields - sh.fieldHashes - Issues.insertIssue issue - - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - - Log.logInfo "Created issue for new shape" (pid, hash, issue.id) + existingIssueM <- Issues.findOpenIssueForEndpoint pid hash + case existingIssueM of + Just issue -> do + Log.logInfo "Skipping new shape issue creation due to existing open issue for endpoint" (pid, hash, issue.id) + let Aeson rawIssueData = issue.issueData + endpontData = AE.decode (AE.encode rawIssueData) :: Maybe Issues.NewEndpointData + whenJust endpontData \Issues.NewEndpointData {..} -> do + let newData = Issues.NewEndpointData endpointHash endpointMethod endpointPath endpointHost firstSeenAt (V.snoc initialShapes hash) + Issues.updateIssueData issue.id (AE.toJSON newData) + Nothing -> do + case shapeM of + Nothing -> Log.logAttention "Shape not found for new shape processing" (pid, hash) + Just sh -> do + issue <- + liftIO + $ Issues.createNewShapeIssue + pid + sh.shapeHash + sh.endpointHash + sh.method + sh.path + sh.statusCode + sh.exampleRequestPayload + sh.exampleResponsePayload + sh.newFields + sh.deletedFields + sh.modifiedFields + sh.fieldHashes + Issues.insertIssue issue + + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + Log.logInfo "Created issue for new shape" (pid, hash, issue.id) -- | Process new field change detected by SQL trigger diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 473b04c89..cc3075c58 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -44,6 +44,7 @@ module Models.Apis.Issues ( updateIssueCriticality, acknowledgeIssue, selectIssueByHash, + updateIssueData, -- * Conversion Functions createAPIChangeIssue, @@ -600,7 +601,7 @@ selectIssues pid _typeM isAcknowledged isArchived limit offset timeRangeM sortM -- | Find open issue for endpoint findOpenIssueForEndpoint :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe Issue) -findOpenIssueForEndpoint pid endpointHash = listToMaybe <$> PG.query q (pid, "api_change" :: Text, endpointHash) +findOpenIssueForEndpoint pid endpointHash = listToMaybe <$> PG.query q (pid, "new-endpoint" :: Text, endpointHash) where q = [sql| @@ -614,6 +615,7 @@ findOpenIssueForEndpoint pid endpointHash = listToMaybe <$> PG.query q (pid, "ap |] + -- | Update issue with new anomaly data updateIssueWithNewAnomaly :: DB es => IssueId -> APIChangeData -> Eff es () updateIssueWithNewAnomaly issueId newData = void $ PG.execute q (Aeson newData, issueId) @@ -1506,6 +1508,16 @@ createFieldChangeIssue projectId fldHash epHash method path keyPath category pre } +updateIssueData :: DB es => IssueId -> AE.Value -> Eff es () +updateIssueData issueId newData = void $ PG.execute q (Aeson newData, issueId) + where + q = + [sql| + UPDATE apis.issues + SET issue_data = ? + WHERE id = ? + |] + -- | Conversation type for AI chats data ConversationType = CTAnomaly | CTTrace | CTLogExplorer | CTDashboard | CTSlackThread | CTDiscordThread deriving stock (Eq, Generic, Read, Show) diff --git a/static/migrations/0028_rebuild_issues_table.sql b/static/migrations/0028_rebuild_issues_table.sql index b33194208..a56922fcb 100644 --- a/static/migrations/0028_rebuild_issues_table.sql +++ b/static/migrations/0028_rebuild_issues_table.sql @@ -20,7 +20,7 @@ CREATE TABLE apis.issues ( issue_type apis.issue_type NOT NULL, source_type TEXT NOT NULL, -- 'error', 'log_pattern', 'endpoint', 'shape' target_hash TEXT NOT NULL, -- links to error.hash, log_pattern.pattern_hash, endpoint.hash - + endpoint_hash TEXT, -- for endpoint-related issues title TEXT NOT NULL DEFAULT '', service TEXT, environment TEXT, @@ -43,18 +43,6 @@ CREATE TABLE apis.issues ( issue_data JSONB NOT NULL DEFAULT '{}' ); - - , -- Actions - recommendedAction :: Text - , migrationComplexity :: Text -- "low", "medium", "high", "n/a" - -- Data payload (polymorphic based on issueType) - , issueData :: Aeson AE.Value - , -- Payload changes tracking (for API changes) - requestPayloads :: Aeson [PayloadChange] - , responsePayloads :: Aeson [PayloadChange] - , -- LLM enhancement tracking - llmEnhancedAt :: Maybe UTCTime - , llmEnhancementVersion :: Maybe Int SELECT manage_updated_at('apis.issues'); -- Indexes From 2b66135bcf9bc19a660e29f65a981a22cdc28624 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 9 Jan 2026 21:35:13 +0000 Subject: [PATCH 27/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 12 ++++++------ src/Models/Apis/Issues.hs | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index f9034c29c..016f9d91a 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -2026,13 +2026,13 @@ processNewShape pid hash authCtx = do shapeM <- Shapes.getShapeForIssue hash existingIssueM <- Issues.findOpenIssueForEndpoint pid hash case existingIssueM of - Just issue -> do + Just issue -> do Log.logInfo "Skipping new shape issue creation due to existing open issue for endpoint" (pid, hash, issue.id) let Aeson rawIssueData = issue.issueData endpontData = AE.decode (AE.encode rawIssueData) :: Maybe Issues.NewEndpointData - whenJust endpontData \Issues.NewEndpointData {..} -> do - let newData = Issues.NewEndpointData endpointHash endpointMethod endpointPath endpointHost firstSeenAt (V.snoc initialShapes hash) - Issues.updateIssueData issue.id (AE.toJSON newData) + whenJust endpontData \Issues.NewEndpointData{..} -> do + let newData = Issues.NewEndpointData endpointHash endpointMethod endpointPath endpointHost firstSeenAt (V.snoc initialShapes hash) + Issues.updateIssueData issue.id (AE.toJSON newData) Nothing -> do case shapeM of Nothing -> Log.logAttention "Shape not found for new shape processing" (pid, hash) @@ -2053,10 +2053,10 @@ processNewShape pid hash authCtx = do sh.modifiedFields sh.fieldHashes Issues.insertIssue issue - + liftIO $ withResource authCtx.jobsPool \conn -> void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - + Log.logInfo "Created issue for new shape" (pid, hash, issue.id) diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index cc3075c58..89c671ed1 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -615,7 +615,6 @@ findOpenIssueForEndpoint pid endpointHash = listToMaybe <$> PG.query q (pid, "ne |] - -- | Update issue with new anomaly data updateIssueWithNewAnomaly :: DB es => IssueId -> APIChangeData -> Eff es () updateIssueWithNewAnomaly issueId newData = void $ PG.execute q (Aeson newData, issueId) @@ -1508,7 +1507,7 @@ createFieldChangeIssue projectId fldHash epHash method path keyPath category pre } -updateIssueData :: DB es => IssueId -> AE.Value -> Eff es () +updateIssueData :: DB es => IssueId -> AE.Value -> Eff es () updateIssueData issueId newData = void $ PG.execute q (Aeson newData, issueId) where q = @@ -1518,6 +1517,7 @@ updateIssueData issueId newData = void $ PG.execute q (Aeson newData, issueId) WHERE id = ? |] + -- | Conversation type for AI chats data ConversationType = CTAnomaly | CTTrace | CTLogExplorer | CTDashboard | CTSlackThread | CTDiscordThread deriving stock (Eq, Generic, Read, Show) From 80615ba192ef87e98b8e85d4a95173c81e12a5da Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 22:56:36 +0000 Subject: [PATCH 28/71] new baseline state type --- src/Models/Apis/Endpoints.hs | 21 +++++++++++++++++---- src/Models/Apis/Errors.hs | 9 +++++---- src/Models/Apis/LogPatterns.hs | 7 ++++--- src/Pkg/DeriveUtils.hs | 19 ++++++++++++++++++- 4 files changed, 44 insertions(+), 12 deletions(-) diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index e8816f100..2568fe8f1 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -40,7 +40,7 @@ import Effectful.PostgreSQL (withConnection) import Effectful.PostgreSQL qualified as PG import Models.Projects.Projects qualified as Projects import NeatInterpolation (text) -import Pkg.DeriveUtils (UUIDId (..)) +import Pkg.DeriveUtils (BaselineState (..), UUIDId (..)) import Relude import System.Types (DB) @@ -66,6 +66,20 @@ data Endpoint = Endpoint , hash :: Text , outgoing :: Bool , description :: Text + , firstTraceId :: Maybe Text + , recentTraceId :: Maybe Text + , service :: Maybe Text + , baselineState :: BaselineState + , baselineSamples :: Int + , baselineUpdatedAt :: Maybe UTCTime + , baselineErrorRateMean :: Maybe Double + , baselineErrorRateStddev :: Maybe Double + , baselineLatencyMean :: Maybe Double + , baselineLatencyStddev :: Maybe Double + , baselineLatencyP95 :: Maybe Double + , baselineLatencyP99 :: Maybe Double + , baselineVolumeHourlyMean :: Maybe Double + , baselineVolumeHourlyStddev :: Maybe Double } deriving stock (Eq, Generic, Show) deriving anyclass (Default, FromRow, NFData, ToRow) @@ -249,7 +263,7 @@ data EndpointWithCurrentRates = EndpointWithCurrentRates , method :: Text , urlPath :: Text , host :: Text - , baselineState :: Text + , baselineState :: BaselineState , baselineErrorRateMean :: Maybe Double , baselineErrorRateStddev :: Maybe Double , baselineLatencyMean :: Maybe Double @@ -351,7 +365,6 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo JOIN apis.endpoints e ON e.url_path = (ols.attributes->'http'->>'route') AND e.method = ols.attributes___http___request___method WHERE ols.project_id = ? - AND e.hash = ? AND ols.name = 'monoscope.http' AND ols.timestamp >= NOW() - MAKE_INTERVAL(hours => ?) GROUP BY DATE_TRUNC('hour', timestamp) @@ -372,7 +385,7 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo -- | Update endpoint baseline values -updateEndpointBaseline :: DB es => EndpointId -> Text -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Int -> Eff es () +updateEndpointBaseline :: DB es => EndpointId -> BaselineState -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Int -> Eff es () updateEndpointBaseline eid state errMean errStddev latMean latStddev latP95 latP99 volMean volStddev samples = void $ PG.execute q (state, errMean, errStddev, latMean, latStddev, latP95, latP99, volMean, volStddev, samples, eid) where diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 8dda0fe45..e5e0344c4 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -47,6 +47,7 @@ import Models.Apis.RequestDumps qualified as RequestDump import Models.Apis.RequestDumps qualified as RequestDumps import Models.Projects.Projects qualified as Projects import Models.Users.Users qualified as Users +import Pkg.DeriveUtils (BaselineState (..)) import Relude hiding (id) import System.Types (DB) import Utils (DBField (MkDBField)) @@ -133,7 +134,7 @@ data Error = Error , occurrences24h :: Int , quietMinutes :: Int , resolutionThresholdMinutes :: Int - , baselineState :: Text + , baselineState :: BaselineState , baselineSamples :: Int , baselineErrorRateMean :: Maybe Double , baselineErrorRateStddev :: Maybe Double @@ -354,7 +355,7 @@ assignError eid uid = PG.execute q (uid, eid) -- | Update baseline data for an error -updateBaseline :: DB es => ErrorId -> Text -> Double -> Double -> Int -> Eff es Int64 +updateBaseline :: DB es => ErrorId -> BaselineState -> Double -> Double -> Int -> Eff es Int64 updateBaseline eid bState rateMean rateStddev samples = PG.execute q (bState, rateMean, rateStddev, samples, eid) where @@ -457,7 +458,7 @@ getErrorEventStats eid hoursBack = do checkErrorSpike :: DB es => Error -> Eff es (Maybe (Bool, Double, Double)) checkErrorSpike err = do case (err.baselineState, err.baselineErrorRateMean, err.baselineErrorRateStddev) of - ("established", Just mean, Just stddev) | stddev > 0 -> do + (BSEstablished, Just mean, Just stddev) | stddev > 0 -> do currentCount <- getCurrentHourErrorCount err.id let currentRate = fromIntegral currentCount :: Double zScore = (currentRate - mean) / stddev @@ -474,7 +475,7 @@ data ErrorWithCurrentRate = ErrorWithCurrentRate , exceptionType :: Text , message :: Text , service :: Maybe Text - , baselineState :: Text + , baselineState :: BaselineState , baselineMean :: Maybe Double , baselineStddev :: Maybe Double , currentHourCount :: Int diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index e29f793a5..5e1124634 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -35,6 +35,7 @@ import Effectful (Eff) import Effectful.PostgreSQL qualified as PG import Models.Projects.Projects qualified as Projects import Models.Users.Users qualified as Users +import Pkg.DeriveUtils (BaselineState (..)) import Relude hiding (id) import System.Types (DB) @@ -98,7 +99,7 @@ data LogPattern = LogPattern , state :: LogPatternState , acknowledgedBy :: Maybe Users.UserId , acknowledgedAt :: Maybe ZonedTime - , baselineState :: Text + , baselineState :: BaselineState , baselineVolumeHourlyMean :: Maybe Double , baselineVolumeHourlyStddev :: Maybe Double , baselineSamples :: Int @@ -201,7 +202,7 @@ updateLogPatternStats pid patHash additionalCount = |] -updateBaseline :: DB es => Projects.ProjectId -> Text -> Text -> Double -> Double -> Int -> Eff es Int64 +updateBaseline :: DB es => Projects.ProjectId -> Text -> BaselineState -> Double -> Double -> Int -> Eff es Int64 updateBaseline pid patHash bState hourlyMean hourlyStddev samples = PG.execute q (bState, hourlyMean, hourlyStddev, samples, pid, patHash) where @@ -279,7 +280,7 @@ data LogPatternWithRate = LogPatternWithRate , projectId :: Projects.ProjectId , logPattern :: Text , patternHash :: Text - , baselineState :: Text + , baselineState :: BaselineState , baselineMean :: Maybe Double , baselineStddev :: Maybe Double , currentHourCount :: Int diff --git a/src/Pkg/DeriveUtils.hs b/src/Pkg/DeriveUtils.hs index 277d34ca1..4b384c3b1 100644 --- a/src/Pkg/DeriveUtils.hs +++ b/src/Pkg/DeriveUtils.hs @@ -1,5 +1,6 @@ module Pkg.DeriveUtils ( AesonText (..), + BaselineState (..), PGTextArray (..), UUIDId (..), idToText, @@ -9,14 +10,16 @@ module Pkg.DeriveUtils ( ) where import Data.Aeson qualified as AE -import Data.Default (Default) +import Data.Default (Default (..)) import Data.Default.Instances () +import Deriving.Aeson qualified as DAE import Data.UUID qualified as UUID import Data.Vector qualified as V import Database.PostgreSQL.Simple (FromRow, ResultError (ConversionFailed), ToRow) import Database.PostgreSQL.Simple.FromField (Conversion (..), FromField (..), fromField, returnError) import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) import Database.PostgreSQL.Simple.ToField (ToField (..)) +import Pkg.DBUtils (WrappedEnumSC (..)) import GHC.Records (HasField (getField)) import GHC.TypeLits (Symbol) import Language.Haskell.TH.Syntax qualified as THS @@ -97,3 +100,17 @@ idToText = UUID.toText . unUUIDId -- | Parse Text to a UUID-based ID idFromText :: Text -> Maybe (UUIDId name) idFromText = fmap UUIDId . UUID.fromText + + +-- | Baseline state for anomaly detection. +-- Baselines start in 'Learning' state until enough data is collected, +-- then transition to 'Established' once statistically significant. +data BaselineState = BSLearning | BSEstablished + deriving stock (Eq, Generic, Read, Show) + deriving anyclass (NFData) + deriving (FromField, ToField) via WrappedEnumSC "BS" BaselineState + deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.ConstructorTagModifier '[DAE.StripPrefix "BS", DAE.CamelToSnake]] BaselineState + + +instance Default BaselineState where + def = BSLearning From 478bc66747fdd4a2a747f2d3333a37e1f4609239 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 22:56:52 +0000 Subject: [PATCH 29/71] new shape issue handling, aggreage shapes per status code --- src/BackgroundJobs.hs | 139 ++++++++++++++++---------------------- src/Models/Apis/Issues.hs | 18 +++++ src/Models/Apis/Shapes.hs | 6 +- 3 files changed, 81 insertions(+), 82 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 016f9d91a..ba779e1c0 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -67,7 +67,7 @@ import OpenTelemetry.Attributes qualified as OA import OpenTelemetry.Trace (TracerProvider) import Pages.Charts.Charts qualified as Charts import Pages.Reports qualified as RP -import Pkg.DeriveUtils (UUIDId (..)) +import Pkg.DeriveUtils (BaselineState (..), UUIDId (..)) import Pkg.Drain qualified as Drain import Pkg.GitHub qualified as GitHub import Pkg.Mail (NotificationAlerts (..), sendDiscordAlert, sendPostmarkEmail, sendSlackAlert, sendSlackMessage, sendWhatsAppAlert) @@ -1647,7 +1647,7 @@ calculateErrorBaselines pid = do newMean = stats.hourlyMean newStddev = stats.hourlyStddev -- Establish baseline after 24 hours of data - newState = if newSamples >= 24 then "established" else "learning" + newState = if newSamples >= 24 then BSEstablished else BSLearning _ <- Errors.updateBaseline err.id newState newMean newStddev newSamples pass @@ -1666,7 +1666,7 @@ detectErrorSpikes pid authCtx = do forM_ errorsWithRates \errRate -> do -- Only check errors with established baselines case (errRate.baselineState, errRate.baselineMean, errRate.baselineStddev) of - ("established", Just mean, Just stddev) | stddev > 0 -> do + (BSEstablished, Just mean, Just stddev) | stddev > 0 -> do let currentRate = fromIntegral errRate.currentHourCount :: Double zScore = (currentRate - mean) / stddev isSpike = zScore > 3.0 && currentRate > mean + 5 @@ -1766,7 +1766,7 @@ calculateLogPatternBaselines pid = do newMean = stats.hourlyMean newStddev = stats.hourlyStddev -- Establish baseline after 24 hours of data - newState = if newSamples >= 24 then "established" else "learning" + newState = if newSamples >= 24 then BSEstablished else BSLearning _ <- LogPatterns.updateBaseline pid lp.patternHash newState newMean newStddev newSamples pass @@ -1785,7 +1785,7 @@ detectLogPatternSpikes pid authCtx = do forM_ patternsWithRates \lpRate -> do -- Only check patterns with established baselines case (lpRate.baselineState, lpRate.baselineMean, lpRate.baselineStddev) of - ("established", Just mean, Just stddev) | stddev > 0 -> do + (BSEstablished, Just mean, Just stddev) | stddev > 0 -> do let currentRate = fromIntegral lpRate.currentHourCount :: Double zScore = (currentRate - mean) / stddev -- Spike detection: >3 std devs AND at least 10 more events than baseline @@ -1831,12 +1831,7 @@ processNewLogPattern pid patternHash authCtx = do Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) --- ============================================================================ --- Endpoint Anomaly Detection Jobs --- ============================================================================ --- | Calculate baselines for endpoints (latency, error rate, volume) --- Uses hourly stats from otel_logs_and_spans over the last 7 days calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () calculateEndpointBaselines pid = do Log.logInfo "Calculating endpoint baselines" pid @@ -1850,7 +1845,7 @@ calculateEndpointBaselines pid = do Just stats -> do let newSamples = stats.totalHours -- Establish baseline after 24 hours of data - newState = if newSamples >= 24 then "established" else "learning" + newState = if newSamples >= 24 then BSEstablished else BSLearning Endpoints.updateEndpointBaseline ep.id newState @@ -1867,7 +1862,6 @@ calculateEndpointBaselines pid = do Log.logInfo "Finished calculating endpoint baselines" (pid, length endpoints) --- | Detect endpoint latency degradation and create issues detectEndpointLatencyDegradation :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () detectEndpointLatencyDegradation pid authCtx = do Log.logInfo "Detecting endpoint latency degradation" pid @@ -1893,7 +1887,6 @@ detectEndpointLatencyDegradation pid authCtx = do Log.logInfo "Finished endpoint latency degradation detection" pid --- | Detect endpoint error rate spikes and create issues detectEndpointErrorRateSpike :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () detectEndpointErrorRateSpike pid authCtx = do Log.logInfo "Detecting endpoint error rate spikes" pid @@ -1933,7 +1926,6 @@ detectEndpointErrorRateSpike pid authCtx = do Log.logInfo "Finished endpoint error rate spike detection" pid --- | Detect endpoint volume rate changes (spike or drop) and create issues detectEndpointVolumeRateChange :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () detectEndpointVolumeRateChange pid authCtx = do Log.logInfo "Detecting endpoint volume rate changes" pid @@ -1972,7 +1964,6 @@ detectEndpointVolumeRateChange pid authCtx = do Log.logInfo "Finished endpoint volume rate change detection" pid --- | Process new endpoint detected by SQL trigger processNewEndpoint :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewEndpoint pid hash authCtx = do Log.logInfo "Processing new endpoint" (pid, hash) @@ -2019,73 +2010,63 @@ processNewEndpoint pid hash authCtx = do sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing --- | Process new shape detected by SQL trigger processNewShape :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewShape pid hash authCtx = do Log.logInfo "Processing new shape" (pid, hash) - shapeM <- Shapes.getShapeForIssue hash - existingIssueM <- Issues.findOpenIssueForEndpoint pid hash - case existingIssueM of - Just issue -> do - Log.logInfo "Skipping new shape issue creation due to existing open issue for endpoint" (pid, hash, issue.id) - let Aeson rawIssueData = issue.issueData - endpontData = AE.decode (AE.encode rawIssueData) :: Maybe Issues.NewEndpointData - whenJust endpontData \Issues.NewEndpointData{..} -> do - let newData = Issues.NewEndpointData endpointHash endpointMethod endpointPath endpointHost firstSeenAt (V.snoc initialShapes hash) - Issues.updateIssueData issue.id (AE.toJSON newData) - Nothing -> do - case shapeM of - Nothing -> Log.logAttention "Shape not found for new shape processing" (pid, hash) - Just sh -> do - issue <- - liftIO - $ Issues.createNewShapeIssue - pid - sh.shapeHash - sh.endpointHash - sh.method - sh.path - sh.statusCode - sh.exampleRequestPayload - sh.exampleResponsePayload - sh.newFields - sh.deletedFields - sh.modifiedFields - sh.fieldHashes - Issues.insertIssue issue - - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - - Log.logInfo "Created issue for new shape" (pid, hash, issue.id) - - --- | Process new field change detected by SQL trigger + projectM <- Projects.projectById pid + whenJust projectM \project -> do + shapeM <- Shapes.getShapeForIssue pid hash + whenJust shapeM \shape -> do + endpointM <- Endpoints.getEndpointByHash pid shape.endpointHash + case endpointM of + Just endpoint | endpoint.baselineState == BSEstablished -> do + existingIssueM <- Issues.findOpenIssueForEndpoint pid hash + case existingIssueM of + Just issue -> do + Log.logInfo "Skipping new shape issue creation due to existing open issue for endpoint" (pid, hash, issue.id) + let Aeson rawIssueData = issue.issueData + shapeData = AE.decode (AE.encode rawIssueData) :: Maybe Issues.NewShapeData + whenJust shapeData \sh -> do + let newData = sh {Issues.newShapesAfterIssue= V.snoc sh.newShapesAfterIssue hash} + Issues.updateIssueData issue.id (AE.toJSON newData) + Nothing -> do + issue <- liftIO + $ Issues.createNewShapeIssue + pid shape.shapeHash shape.endpointHash shape.method shape.path shape.statusCode shape.exampleRequestPayload shape.exampleResponsePayload shape.newFields shape.deletedFields shape.modifiedFields shape.fieldHashes + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for new shape" (pid, hash, issue.id) + _ -> pass + + + processNewFieldChange :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewFieldChange pid hash authCtx = do Log.logInfo "Processing new field change" (pid, hash) - - fieldM <- Fields.getFieldForIssue hash - - case fieldM of - Nothing -> Log.logAttention "Field not found for new field change processing" (pid, hash) - Just fld -> do - issue <- - liftIO - $ Issues.createFieldChangeIssue - pid - fld.fieldHash - fld.endpointHash - fld.method - fld.path - fld.keyPath - fld.fieldCategory - Nothing - fld.fieldType - "added" - Issues.insertIssue issue - - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - - Log.logInfo "Created issue for new field change" (pid, hash, issue.id) + pass -- Temporarily disabled field change issue creation + + -- fieldM <- Fields.getFieldForIssue hash + + -- case fieldM of + -- Nothing -> Log.logAttention "Field not found for new field change processing" (pid, hash) + -- Just fld -> do + -- issue <- + -- liftIO + -- $ Issues.createFieldChangeIssue + -- pid + -- fld.fieldHash + -- fld.endpointHash + -- fld.method + -- fld.path + -- fld.keyPath + -- fld.fieldCategory + -- Nothing + -- fld.fieldType + -- "added" + -- Issues.insertIssue issue + + -- liftIO $ withResource authCtx.jobsPool \conn -> + -- void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + + -- Log.logInfo "Created issue for new field change" (pid, hash, issue.id) diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 89c671ed1..92b7ac6c3 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -64,6 +64,7 @@ module Models.Apis.Issues ( createNewEndpointIssue, createNewShapeIssue, createFieldChangeIssue, + findOpenShapeIssueForStatusCode, -- * Utilities issueIdText, @@ -417,6 +418,7 @@ data NewShapeData = NewShapeData , modifiedFields :: V.Vector Text , fieldHashes :: V.Vector Text , firstSeenAt :: UTCTime + , newShapesAfterIssue :: V.Vector Text } deriving stock (Generic, Show) deriving anyclass (NFData) @@ -614,6 +616,21 @@ findOpenIssueForEndpoint pid endpointHash = listToMaybe <$> PG.query q (pid, "ne LIMIT 1 |] +findOpenShapeIssueForStatusCode :: DB es => Projects.ProjectId -> Text -> Int -> Eff es (Maybe Issue) +findOpenShapeIssueForStatusCode pid endpointHash statusCode = listToMaybe <$> PG.query q (pid, "new-shape" :: Text, endpointHash, statusCode) + where + q = + [sql| + SELECT * FROM apis.issues + WHERE project_id = ? + AND issue_type = ? + AND endpoint_hash = ? + AND (issue_data->>'status_code')::int = ? + AND acknowledged_at IS NULL + AND archived_at IS NULL + LIMIT 1 + |] + -- | Update issue with new anomaly data updateIssueWithNewAnomaly :: DB es => IssueId -> APIChangeData -> Eff es () @@ -1419,6 +1436,7 @@ createNewShapeIssue projectId shHash epHash method path statusCode reqPayload re , modifiedFields = modifiedFlds , fieldHashes = fldHashes , firstSeenAt = now + , newShapesAfterIssue = V.empty } hasBreakingChanges = not (V.null deletedFlds) || not (V.null modifiedFlds) diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index a6cfb9b7c..c8eff4950 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -136,8 +136,8 @@ data ShapeForIssue = ShapeForIssue deriving anyclass (FromRow) -getShapeForIssue :: DB es => Text -> Eff es (Maybe ShapeForIssue) -getShapeForIssue hash = listToMaybe <$> PG.query q (Only hash) +getShapeForIssue :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe ShapeForIssue) +getShapeForIssue pid hash = listToMaybe <$> PG.query q (Only hash) where q = [sql| @@ -155,5 +155,5 @@ getShapeForIssue hash = listToMaybe <$> PG.query q (Only hash) COALESCE(s.field_hashes, '{}'::TEXT[]) FROM apis.shapes s LEFT JOIN apis.endpoints e ON e.hash = s.endpoint_hash - WHERE s.hash = ? + WHERE s.project_id = ? AND s.hash = ? |] From c8ea501a0d68aa88921441054a2fdfd7f0a36851 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 9 Jan 2026 22:57:38 +0000 Subject: [PATCH 30/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 94 ++++++++++++++++++++++----------------- src/Models/Apis/Issues.hs | 1 + src/Pkg/DeriveUtils.hs | 6 +-- 3 files changed, 56 insertions(+), 45 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index ba779e1c0..fe9755d19 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1831,7 +1831,6 @@ processNewLogPattern pid patternHash authCtx = do Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) - calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () calculateEndpointBaselines pid = do Log.logInfo "Calculating endpoint baselines" pid @@ -2022,51 +2021,62 @@ processNewShape pid hash authCtx = do Just endpoint | endpoint.baselineState == BSEstablished -> do existingIssueM <- Issues.findOpenIssueForEndpoint pid hash case existingIssueM of - Just issue -> do - Log.logInfo "Skipping new shape issue creation due to existing open issue for endpoint" (pid, hash, issue.id) - let Aeson rawIssueData = issue.issueData - shapeData = AE.decode (AE.encode rawIssueData) :: Maybe Issues.NewShapeData - whenJust shapeData \sh -> do - let newData = sh {Issues.newShapesAfterIssue= V.snoc sh.newShapesAfterIssue hash} - Issues.updateIssueData issue.id (AE.toJSON newData) - Nothing -> do - issue <- liftIO - $ Issues.createNewShapeIssue - pid shape.shapeHash shape.endpointHash shape.method shape.path shape.statusCode shape.exampleRequestPayload shape.exampleResponsePayload shape.newFields shape.deletedFields shape.modifiedFields shape.fieldHashes - Issues.insertIssue issue - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - Log.logInfo "Created issue for new shape" (pid, hash, issue.id) + Just issue -> do + Log.logInfo "Skipping new shape issue creation due to existing open issue for endpoint" (pid, hash, issue.id) + let Aeson rawIssueData = issue.issueData + shapeData = AE.decode (AE.encode rawIssueData) :: Maybe Issues.NewShapeData + whenJust shapeData \sh -> do + let newData = sh{Issues.newShapesAfterIssue = V.snoc sh.newShapesAfterIssue hash} + Issues.updateIssueData issue.id (AE.toJSON newData) + Nothing -> do + issue <- + liftIO + $ Issues.createNewShapeIssue + pid + shape.shapeHash + shape.endpointHash + shape.method + shape.path + shape.statusCode + shape.exampleRequestPayload + shape.exampleResponsePayload + shape.newFields + shape.deletedFields + shape.modifiedFields + shape.fieldHashes + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for new shape" (pid, hash, issue.id) _ -> pass - - + processNewFieldChange :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewFieldChange pid hash authCtx = do Log.logInfo "Processing new field change" (pid, hash) pass -- Temporarily disabled field change issue creation - -- fieldM <- Fields.getFieldForIssue hash - - -- case fieldM of - -- Nothing -> Log.logAttention "Field not found for new field change processing" (pid, hash) - -- Just fld -> do - -- issue <- - -- liftIO - -- $ Issues.createFieldChangeIssue - -- pid - -- fld.fieldHash - -- fld.endpointHash - -- fld.method - -- fld.path - -- fld.keyPath - -- fld.fieldCategory - -- Nothing - -- fld.fieldType - -- "added" - -- Issues.insertIssue issue - - -- liftIO $ withResource authCtx.jobsPool \conn -> - -- void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - - -- Log.logInfo "Created issue for new field change" (pid, hash, issue.id) +-- fieldM <- Fields.getFieldForIssue hash + +-- case fieldM of +-- Nothing -> Log.logAttention "Field not found for new field change processing" (pid, hash) +-- Just fld -> do +-- issue <- +-- liftIO +-- $ Issues.createFieldChangeIssue +-- pid +-- fld.fieldHash +-- fld.endpointHash +-- fld.method +-- fld.path +-- fld.keyPath +-- fld.fieldCategory +-- Nothing +-- fld.fieldType +-- "added" +-- Issues.insertIssue issue + +-- liftIO $ withResource authCtx.jobsPool \conn -> +-- void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + +-- Log.logInfo "Created issue for new field change" (pid, hash, issue.id) diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 92b7ac6c3..108c9e875 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -616,6 +616,7 @@ findOpenIssueForEndpoint pid endpointHash = listToMaybe <$> PG.query q (pid, "ne LIMIT 1 |] + findOpenShapeIssueForStatusCode :: DB es => Projects.ProjectId -> Text -> Int -> Eff es (Maybe Issue) findOpenShapeIssueForStatusCode pid endpointHash statusCode = listToMaybe <$> PG.query q (pid, "new-shape" :: Text, endpointHash, statusCode) where diff --git a/src/Pkg/DeriveUtils.hs b/src/Pkg/DeriveUtils.hs index 4b384c3b1..3b3f6f103 100644 --- a/src/Pkg/DeriveUtils.hs +++ b/src/Pkg/DeriveUtils.hs @@ -12,17 +12,17 @@ module Pkg.DeriveUtils ( import Data.Aeson qualified as AE import Data.Default (Default (..)) import Data.Default.Instances () -import Deriving.Aeson qualified as DAE import Data.UUID qualified as UUID import Data.Vector qualified as V import Database.PostgreSQL.Simple (FromRow, ResultError (ConversionFailed), ToRow) import Database.PostgreSQL.Simple.FromField (Conversion (..), FromField (..), fromField, returnError) import Database.PostgreSQL.Simple.Newtypes (Aeson (..)) import Database.PostgreSQL.Simple.ToField (ToField (..)) -import Pkg.DBUtils (WrappedEnumSC (..)) +import Deriving.Aeson qualified as DAE import GHC.Records (HasField (getField)) import GHC.TypeLits (Symbol) import Language.Haskell.TH.Syntax qualified as THS +import Pkg.DBUtils (WrappedEnumSC (..)) import Relude import Web.HttpApiData (FromHttpApiData) @@ -108,8 +108,8 @@ idFromText = fmap UUIDId . UUID.fromText data BaselineState = BSLearning | BSEstablished deriving stock (Eq, Generic, Read, Show) deriving anyclass (NFData) - deriving (FromField, ToField) via WrappedEnumSC "BS" BaselineState deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.ConstructorTagModifier '[DAE.StripPrefix "BS", DAE.CamelToSnake]] BaselineState + deriving (FromField, ToField) via WrappedEnumSC "BS" BaselineState instance Default BaselineState where From 3e3245449a4dbe3253cafdd6f29d847496c1822d Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 23:09:53 +0000 Subject: [PATCH 31/71] fix migration files and remove baselines table --- ...ata_for_anomalies_issue_update_trigger.sql | 18 +++---- .../0026_anomaly_detection_baselines.sql | 49 ++----------------- 2 files changed, 13 insertions(+), 54 deletions(-) diff --git a/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql b/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql index 4941221c0..d0098044d 100644 --- a/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql +++ b/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql @@ -1,14 +1,14 @@ BEGIN; -ALTER TABLE apis.endpoints - ADD COLUMN first_trace_id, - ADD COLUMN recent_trace_id, - ADD COLUMN service; - -ALTER TABLE apis.shapes - ADD COLUMN first_trace_id, - ADD COLUMN recent_trace_id, - ADD COLUMN service; +ALTER TABLE apis.endpoints + ADD COLUMN IF NOT EXISTS first_trace_id TEXT, + ADD COLUMN IF NOT EXISTS recent_trace_id TEXT, + ADD COLUMN IF NOT EXISTS service TEXT; + +ALTER TABLE apis.shapes + ADD COLUMN IF NOT EXISTS first_trace_id TEXT, + ADD COLUMN IF NOT EXISTS recent_trace_id TEXT, + ADD COLUMN IF NOT EXISTS service TEXT; CREATE OR REPLACE FUNCTION apis.update_occurance() RETURNS trigger AS $$ diff --git a/static/migrations/0026_anomaly_detection_baselines.sql b/static/migrations/0026_anomaly_detection_baselines.sql index 9442eb520..bf32c94b6 100644 --- a/static/migrations/0026_anomaly_detection_baselines.sql +++ b/static/migrations/0026_anomaly_detection_baselines.sql @@ -1,45 +1,5 @@ BEGIN; --- ============================================================================ --- 2. ENDPOINT BASELINES --- ============================================================================ --- Per-endpoint, service and log pattern behavioral baselines for detecting spikes/degradations. --- Dimensions: error_rate, latency, volume - -CREATE TABLE IF NOT EXISTS apis.baselines ( - id BIGSERIAL PRIMARY KEY, - project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, - subject_type TEXT NOT NULL, -- 'endpoint', 'service', 'log_pattern' - subject_key TEXT NOT NULL, -- endpoint_hash or service_name or pattern_hash - - state TEXT NOT NULL DEFAULT 'learning', -- 'learning', 'established' - min_observations INT DEFAULT 1000, -- need this many data points to establish - - baseline_data JSONB NOT NULL DEFAULT '{}', - /* - error_rate: { "mean": 0.02, "stddev": 0.008, "samples": 5000 } - latency: { "mean": 65, "stddev": 40, "p50": 45, "p95": 120, "p99": 250, "samples": 5000 } - volume: { "mean": 150, "stddev": 35, "samples": 1440 } - */ - - baseline_window_hours INT DEFAULT 24, - - last_calculated_at TIMESTAMPTZ, - established_at TIMESTAMPTZ, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - - UNIQUE(project_id, subject_key, dimension) -); - -SELECT manage_updated_at('apis.baselines'); - -CREATE INDEX IF NOT EXISTS idx_baselines_lookup -ON apis.baselines(project_id, subject_key, subject_type); - -CREATE INDEX IF NOT EXISTS idx_baselines_established -ON apis.baselines(project_id, state) -WHERE state = 'established'; -- ============================================================================ @@ -111,8 +71,7 @@ CREATE TABLE apis.errors ( baseline_updated_at TIMESTAMPTZ, is_ignored BOOLEAN DEFAULT false, - ignored_until TIMESTAMPTZ, - PRIMARY KEY (id) + ignored_until TIMESTAMPTZ ); SELECT manage_updated_at('apis.errors'); @@ -121,7 +80,7 @@ CREATE INDEX idx_errors_project_state ON apis.errors (project_id, state); CREATE INDEX idx_errors_last_seen ON apis.errors (project_id, last_event_id); CREATE UNIQUE INDEX idx_apis_errors_project_id_hash ON apis.errors(project_id, hash); CREATE INDEX idx_apis_errors_project_id ON apis.errors(project_id); -CREATE INDEX idx_errors_active ON apis.errors(project_id, state, last_seen_at DESC) WHERE state != 'resolved'; +CREATE INDEX idx_errors_active ON apis.errors(project_id, state) WHERE state != 'resolved'; CREATE INDEX idx_errors_state ON apis.errors(project_id, state); CREATE TRIGGER error_created_anomaly AFTER INSERT ON apis.errors FOR EACH ROW EXECUTE PROCEDURE apis.new_error_proc('runtime_exception', 'created', 'skip_anomaly_record'); @@ -133,7 +92,7 @@ CREATE TABLE apis.error_events ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), project_id UUID NOT NULL REFERENCES projects.projects(id) ON DELETE CASCADE, occurred_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - target_hash TEXT NOT NULL REFERENCES apis.errors(hash) ON DELETE CASCADE, + target_hash TEXT NOT NULL, -- references apis.errors.hash (no FK due to composite unique) exception_type TEXT NOT NULL, message TEXT NOT NULL, stack_trace TEXT NOT NULL, @@ -151,7 +110,7 @@ CREATE TABLE apis.error_events ( user_email TEXT, user_ip INET, session_id TEXT, - sample_rate FLOAT NOT NULL DEFAULT 1.0, + sample_rate FLOAT NOT NULL DEFAULT 1.0 ); -- Indexes for efficient queries From 4c46af43ea73f7f034576e216320978ff0322116 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 23:24:10 +0000 Subject: [PATCH 32/71] more migration files cleanups --- ...ata_for_anomalies_issue_update_trigger.sql | 21 -------- .../0031_new_api_change_triggers.sql | 48 +++---------------- 2 files changed, 7 insertions(+), 62 deletions(-) diff --git a/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql b/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql index d0098044d..a5db8d485 100644 --- a/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql +++ b/static/migrations/0025_trace_data_for_anomalies_issue_update_trigger.sql @@ -10,25 +10,4 @@ ALTER TABLE apis.shapes ADD COLUMN IF NOT EXISTS recent_trace_id TEXT, ADD COLUMN IF NOT EXISTS service TEXT; -CREATE OR REPLACE FUNCTION apis.update_occurance() -RETURNS trigger AS $$ -DECLARE - target_hash TEXT; -BEGIN - target_hash := NEW.hash; - UPDATE apis.issues - SET - affected_requests = affected_requests + 1, - updated_at = NOW() - WHERE endpoint_hash = hash; - RETURN NULL; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER trg_update_issue_from_endpoints AFTER UPDATE ON apis.endpoints FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); -CREATE TRIGGER trg_update_issue_from_shapes AFTER UPDATE ON apis.shapes FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); -CREATE TRIGGER trg_update_issue_from_format AFTER UPDATE ON apis.format FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); -CREATE TRIGGER trg_update_issue_from_fields AFTER UPDATE ON apis.fields FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); -CREATE TRIGGER trg_update_issue_from_errors AFTER UPDATE ON apis.errors FOR EACH ROW EXECUTE FUNCTION apis.update_occurance(); - COMMIT; \ No newline at end of file diff --git a/static/migrations/0031_new_api_change_triggers.sql b/static/migrations/0031_new_api_change_triggers.sql index be01486e7..3b48248a6 100644 --- a/static/migrations/0031_new_api_change_triggers.sql +++ b/static/migrations/0031_new_api_change_triggers.sql @@ -1,11 +1,6 @@ --- Migration: Replace generic new_anomaly_proc with specific triggers for NewEndpoint, NewShape, NewFieldChange --- These triggers create background jobs directly without inserting into apis.anomalies table BEGIN; ------------------------------------------------------------------------- --- UNIFIED API CHANGE TRIGGER FUNCTION ------------------------------------------------------------------------- CREATE OR REPLACE FUNCTION apis.api_change_detected_proc() RETURNS trigger AS $$ DECLARE job_tag TEXT; @@ -13,48 +8,19 @@ BEGIN IF TG_WHEN <> 'AFTER' THEN RAISE EXCEPTION 'apis.api_change_detected_proc() may only run as an AFTER trigger'; END IF; - - -- Get job tag from trigger argument (NewEndpoint, NewShape, NewFieldChange) job_tag := TG_ARGV[0]; - INSERT INTO background_jobs (run_at, status, payload) - VALUES ( - now(), - 'queued', - jsonb_build_object( - 'tag', job_tag, - 'projectId', NEW.project_id, - 'hash', NEW.hash - ) - ); - + VALUES (now(),'queued',jsonb_build_object('tag', job_tag,'projectId', NEW.project_id,'hash', NEW.hash)); RETURN NULL; END; $$ LANGUAGE plpgsql; ------------------------------------------------------------------------- --- DROP OLD TRIGGERS AND CREATE NEW ONES ------------------------------------------------------------------------- - --- Drop old triggers that use new_anomaly_proc -DROP TRIGGER IF EXISTS endpoint_created_anomaly ON apis.endpoints; -DROP TRIGGER IF EXISTS shapes_created_anomaly ON apis.shapes; -DROP TRIGGER IF EXISTS fields_created_anomaly ON apis.fields; - --- Create new triggers -CREATE TRIGGER endpoint_created_new - AFTER INSERT ON apis.endpoints - FOR EACH ROW - EXECUTE FUNCTION apis.api_change_detected_proc('NewEndpoint'); - -CREATE TRIGGER shape_created_new - AFTER INSERT ON apis.shapes - FOR EACH ROW - EXECUTE FUNCTION apis.api_change_detected_proc('NewShape'); +DROP TRIGGER IF EXISTS fields_created_anomaly AFTER INSERT ON apis.fields +DROP TRIGGER IF EXISTS endpoint_created_anomaly AFTER INSERT ON apis.endpoints +DROP TRIGGER IF EXISTS shapes_created_anomaly AFTER INSERT ON apis.shapes -CREATE TRIGGER field_created_new - AFTER INSERT ON apis.fields - FOR EACH ROW - EXECUTE FUNCTION apis.api_change_detected_proc('NewFieldChange'); +CREATE TRIGGER endpoint_created_new AFTER INSERT ON apis.endpoints FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewEndpoint'); +CREATE TRIGGER shape_created_new AFTER INSERT ON apis.shapes FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewShape'); +CREATE TRIGGER field_created_new AFTER INSERT ON apis.fields FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewFieldChange'); COMMIT; From 3d00ee2b621976557508de7dd9cd8b83fdd6c74d Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 9 Jan 2026 23:38:40 +0000 Subject: [PATCH 33/71] fix drop trigger syntax --- static/migrations/0031_new_api_change_triggers.sql | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/static/migrations/0031_new_api_change_triggers.sql b/static/migrations/0031_new_api_change_triggers.sql index 3b48248a6..116462be8 100644 --- a/static/migrations/0031_new_api_change_triggers.sql +++ b/static/migrations/0031_new_api_change_triggers.sql @@ -15,9 +15,9 @@ BEGIN END; $$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS fields_created_anomaly AFTER INSERT ON apis.fields -DROP TRIGGER IF EXISTS endpoint_created_anomaly AFTER INSERT ON apis.endpoints -DROP TRIGGER IF EXISTS shapes_created_anomaly AFTER INSERT ON apis.shapes +DROP TRIGGER IF EXISTS fields_created_anomaly ON apis.fields; +DROP TRIGGER IF EXISTS endpoint_created_anomaly ON apis.endpoints; +DROP TRIGGER IF EXISTS shapes_created_anomaly ON apis.shapes; CREATE TRIGGER endpoint_created_new AFTER INSERT ON apis.endpoints FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewEndpoint'); CREATE TRIGGER shape_created_new AFTER INSERT ON apis.shapes FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewShape'); From d31b2510f8d74d10efdd8aac6c5e2792051d70c4 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 14:46:03 +0000 Subject: [PATCH 34/71] initialize added fields to ATError --- src/Models/Telemetry/Telemetry.hs | 16 +++++++++++++++- src/Pkg/Mail.hs | 4 ++-- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index cc342322c..4395084a2 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -1043,11 +1043,18 @@ extractATError spanObj (AE.Object o) = do Just (AE.String s) -> Just s _ -> Nothing getTextOrEmpty k = fromMaybe "" (lookupText k) + getUserAttrM k v = case unAesonTextMaybe spanObj.resource >>= Map.lookup v of + Just (AE.Object userAttrs) -> KEM.lookup k userAttrs >>= asText + _ -> Nothing typ = getTextOrEmpty "type" msg = getTextOrEmpty "message" stack = getTextOrEmpty "stacktrace" + userId = getUserAttrM "id" "user" + userEmail = getUserAttrM "email" "user" + sessionId = getUserAttrM "id" "session" + -- TODO: parse telemetry.sdk.name to SDKTypes tech = case unAesonTextMaybe spanObj.resource >>= Map.lookup "telemetry" of Just (AE.Object tel) -> @@ -1072,6 +1079,7 @@ extractATError spanObj (AE.Object o) = do -- The hash is critical for grouping similar errors together -- Hash components: projectId + service + span name + error type + sanitized message/stack -- This ensures similar errors are grouped while allowing variations in the actual message + return $ Errors.ATError { projectId = UUID.fromText spanObj.project_id >>= (Just . UUIDId) @@ -1081,7 +1089,7 @@ extractATError spanObj (AE.Object o) = do , message = msg , rootErrorMessage = msg , stackTrace = stack - , hash = Just (toXXHash (spanObj.project_id <> fromMaybe "" serviceName <> fromMaybe "" spanObj.name <> typ <> replaceAllFormats (msg <> stack))) + , hash = (toXXHash (spanObj.project_id <> <> typ <> replaceAllFormats (msg <> stack))) , technology = Nothing , serviceName = serviceName , requestMethod = method @@ -1089,6 +1097,12 @@ extractATError spanObj (AE.Object o) = do , spanId = spanId , traceId = trId , runtime = tech + , parentSpanId = spanObj.parent_id + , endpointHash = Nothing + , environment = Nothing + , userId = userId + , userEmail = userEmail + , sessionId = sessionId } extractATError _ _ = Nothing diff --git a/src/Pkg/Mail.hs b/src/Pkg/Mail.hs index 305805bee..81aab26c3 100644 --- a/src/Pkg/Mail.hs +++ b/src/Pkg/Mail.hs @@ -123,7 +123,7 @@ sendWhatsAppAlert alert pid pTitle tos = do case alert of RuntimeErrorAlert{..} -> do let template = appCtx.config.whatsappErrorTemplate - url = pid.toText <> "/anomalies/by_hash/" <> fromMaybe "" errorData.hash + url = pid.toText <> "/anomalies/by_hash/" <> errorData.hash contentVars = AE.object ["1" AE..= ("*" <> pTitle <> "*"), "2" AE..= ("*" <> errorData.errorType <> "*"), "3" AE..= ("`" <> errorData.message <> "`"), "4" AE..= url] sendAlert template contentVars pass @@ -326,7 +326,7 @@ discordErrorAlert err project projectUrl = "content": "**🔴 New Runtime Error**" }|] where - url = projectUrl <> "/anomalies/by_hash/" <> fromMaybe "" err.hash + url = projectUrl <> "/anomalies/by_hash/" <> err.hash msg = "```" <> err.message <> "```" method = fromMaybe "" err.requestMethod path = fromMaybe "" err.requestPath From 0e912437bd3478a563dff68a4cacda21da933750 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 14:51:26 +0000 Subject: [PATCH 35/71] improved sentry level error fingerprinting --- src/Models/Apis/Errors.hs | 578 +++++++++++++++++++++++++++++++++++++- 1 file changed, 574 insertions(+), 4 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index e5e0344c4..44dbf0bab 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -25,11 +25,20 @@ module Models.Apis.Errors ( getErrorEventStats, checkErrorSpike, getErrorsWithCurrentRates, + -- Error Fingerprinting (Sentry-style) + StackFrame (..), + parseStackTrace, + normalizeStackTrace, + normalizeMessage, + computeErrorFingerprint, ) where import Data.Aeson qualified as AE +import Data.Char (isSpace) import Data.Default +import Data.List qualified as L +import Data.Text qualified as T import Data.Time import Data.UUID qualified as UUID import Data.Vector qualified as V @@ -50,7 +59,8 @@ import Models.Users.Users qualified as Users import Pkg.DeriveUtils (BaselineState (..)) import Relude hiding (id) import System.Types (DB) -import Utils (DBField (MkDBField)) +import Text.RE.TDFA (RE, SearchReplace, ed, (*=~/)) +import Utils (DBField (MkDBField), toXXHash) newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -160,7 +170,7 @@ data ATError = ATError , message :: Text , rootErrorMessage :: Text , stackTrace :: Text - , hash :: Maybe Text + , hash :: Text , technology :: Maybe RequestDumps.SDKTypes , requestMethod :: Maybe Text , requestPath :: Maybe Text @@ -173,7 +183,6 @@ data ATError = ATError , endpointHash :: Maybe Text , userId :: Maybe Text , userEmail :: Maybe Text - , userIp :: Maybe Text , sessionId :: Maybe Text } deriving stock (Generic, Show) @@ -204,7 +213,6 @@ data ErrorEvent = ErrorEvent , parentSpanId :: Maybe Text , userId :: Maybe Text , userEmail :: Maybe Text - , userIp :: Maybe Text , sessionId :: Maybe Text , sampleRate :: Double , ingestionId :: Maybe UUID.UUID @@ -551,3 +559,565 @@ upsertErrorQueryAndParam pid err = (q, params) , MkDBField err.runtime , MkDBField err ] + + +-- ============================================================================= +-- Error Fingerprinting (Sentry-style) +-- ============================================================================= +-- Fingerprinting priority: +-- 1. Stack trace (if available and has in-app frames) +-- 2. Exception type + normalized message +-- 3. Normalized message only (fallback) +-- +-- Stack trace normalization extracts: +-- - Module/package name +-- - Function name (cleaned per platform) +-- - Context line (whitespace normalized, max 120 chars) +-- +-- Message normalization: +-- - Limits to first 2 non-empty lines +-- - Replaces UUIDs, IPs, emails, timestamps, numbers with placeholders + + +-- | Represents a parsed stack frame +data StackFrame = StackFrame + { sfFilePath :: Text -- ^ Full file path or module path + , sfModule :: Maybe Text -- ^ Module/package name (extracted from path) + , sfFunction :: Text -- ^ Function/method name + , sfLineNumber :: Maybe Int -- ^ Line number + , sfColumnNumber :: Maybe Int -- ^ Column number + , sfContextLine :: Maybe Text -- ^ Source code at this frame (if available) + , sfIsInApp :: Bool -- ^ Whether this is application code vs library/system + } + deriving stock (Eq, Generic, Show) + deriving anyclass (NFData) + + +parseStackTrace :: Text -> Text -> [StackFrame] +parseStackTrace mSdk stackText = + let lns = filter (not . T.null . T.strip) $ T.lines stackText + in mapMaybe (parseStackFrame mSdk) lns + + +-- | Parse a single stack frame line based on SDK type +parseStackFrame :: Text -> Text -> Maybe StackFrame +parseStackFrame mSdk line = + let trimmed = T.strip line + in case mSdk of + Just sdk | isGoSDK sdk -> parseGoFrame trimmed + Just sdk | isJsSDK sdk -> parseJsFrame trimmed + Just sdk | isPythonSDK sdk -> parsePythonFrame trimmed + Just sdk | isJavaSDK sdk -> parseJavaFrame trimmed + Just sdk | isPhpSDK sdk -> parsePhpFrame trimmed + Just sdk | isDotNetSDK sdk -> parseDotNetFrame trimmed + _ -> parseGenericFrame trimmed + where + isGoSDK = \case + RequestDumps.GoGin -> True + RequestDumps.GoBuiltIn -> True + RequestDumps.GoGorillaMux -> True + RequestDumps.GoFiber -> True + RequestDumps.GoDefault -> True + RequestDumps.GoOutgoing -> True + _ -> False + + isJsSDK = \case + RequestDumps.JsExpress -> True + RequestDumps.JsNest -> True + RequestDumps.JsFastify -> True + RequestDumps.JsAdonis -> True + RequestDumps.JsNext -> True + RequestDumps.JsAxiosOutgoing -> True + RequestDumps.JsOutgoing -> True + _ -> False + + isPythonSDK = \case + RequestDumps.PythonFastApi -> True + RequestDumps.PythonFlask -> True + RequestDumps.PythonDjango -> True + RequestDumps.PythonOutgoing -> True + RequestDumps.PythonPyramid -> True + _ -> False + + isJavaSDK = \case + RequestDumps.JavaSpringBoot -> True + RequestDumps.JavaSpring -> True + RequestDumps.JavaApacheOutgoing -> True + RequestDumps.JavaVertx -> True + _ -> False + + isPhpSDK = \case + RequestDumps.PhpLaravel -> True + RequestDumps.PhpSymfony -> True + RequestDumps.PhpSlim -> True + RequestDumps.GuzzleOutgoing -> True + _ -> False + + isDotNetSDK = \case + RequestDumps.DotNet -> True + RequestDumps.DotNetOutgoing -> True + _ -> False + + +-- | Parse Go stack frame: "goroutine 1 [running]:" or "main.foo(0x1234)" +-- Format: package.function(args) or /path/to/file.go:123 +0x1f +parseGoFrame :: Text -> Maybe StackFrame +parseGoFrame line + | "goroutine" `T.isPrefixOf` line = Nothing -- Skip goroutine headers + | ".go:" `T.isInfixOf` line = + -- File path line: /path/to/file.go:123 +0x1f + let (pathPart, _) = T.breakOn " +" line + (filePath, lineCol) = T.breakOnEnd ":" pathPart + lineNum = readMaybe $ toString $ T.takeWhile (/= ':') lineCol + in Just StackFrame + { sfFilePath = T.dropEnd 1 filePath -- Remove trailing ':' + , sfModule = extractGoModule filePath + , sfFunction = "" + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isGoInApp filePath + } + | "(" `T.isInfixOf` line = + -- Function call line: main.foo(0x1234, 0x5678) + let (funcPart, _) = T.breakOn "(" line + in Just StackFrame + { sfFilePath = "" + , sfModule = extractGoModuleFromFunc funcPart + , sfFunction = extractGoFuncName funcPart + , sfLineNumber = Nothing + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isGoFuncInApp funcPart + } + | otherwise = Nothing + where + extractGoModule path = + let parts = T.splitOn "/" path + in if length parts > 2 + then Just $ T.intercalate "/" $ take 3 parts + else Nothing + + extractGoModuleFromFunc func = + let parts = T.splitOn "." func + in if length parts > 1 + then Just $ T.intercalate "." $ init parts + else Nothing + + extractGoFuncName func = + let parts = T.splitOn "." func + in if not (null parts) then last parts else func + + isGoInApp path = not $ any (`T.isInfixOf` path) + ["go/src/", "pkg/mod/", "vendor/", "/runtime/", "/net/", "/syscall/"] + + isGoFuncInApp func = not $ any (`T.isPrefixOf` func) + ["runtime.", "syscall.", "net.", "net/http.", "reflect."] + + +-- | Parse JavaScript stack frame +-- Formats: +-- at functionName (filePath:line:col) +-- at filePath:line:col +-- at async functionName (filePath:line:col) +parseJsFrame :: Text -> Maybe StackFrame +parseJsFrame line + | "at " `T.isPrefixOf` T.strip line = + let content = T.strip $ T.drop 3 $ T.strip line + -- Handle "at async ..." + content' = if "async " `T.isPrefixOf` content + then T.drop 6 content + else content + in if "(" `T.isInfixOf` content' + then parseJsWithParens content' + else parseJsWithoutParens content' + | otherwise = Nothing + where + parseJsWithParens txt = + let (funcPart, rest) = T.breakOn " (" txt + locationPart = T.dropAround (\c -> c == '(' || c == ')') rest + (filePath, lineCol) = parseJsLocation locationPart + (lineNum, colNum) = parseLineCol lineCol + in Just StackFrame + { sfFilePath = filePath + , sfModule = extractJsModule filePath + , sfFunction = cleanJsFunction funcPart + , sfLineNumber = lineNum + , sfColumnNumber = colNum + , sfContextLine = Nothing + , sfIsInApp = isJsInApp filePath + } + + parseJsWithoutParens txt = + let (filePath, lineCol) = parseJsLocation txt + (lineNum, colNum) = parseLineCol lineCol + in Just StackFrame + { sfFilePath = filePath + , sfModule = extractJsModule filePath + , sfFunction = "" + , sfLineNumber = lineNum + , sfColumnNumber = colNum + , sfContextLine = Nothing + , sfIsInApp = isJsInApp filePath + } + + parseJsLocation loc = + -- Split from the right to handle paths with colons (Windows) + let parts = T.splitOn ":" loc + n = length parts + in if n >= 3 + then (T.intercalate ":" $ take (n - 2) parts, T.intercalate ":" $ drop (n - 2) parts) + else (loc, "") + + parseLineCol lc = + let parts = T.splitOn ":" lc + in case parts of + [l, c] -> (readMaybe $ toString l, readMaybe $ toString c) + [l] -> (readMaybe $ toString l, Nothing) + _ -> (Nothing, Nothing) + + extractJsModule path = + let baseName = last $ T.splitOn "/" path + in Just $ T.toLower $ fromMaybe baseName $ T.stripSuffix ".js" baseName + <|> T.stripSuffix ".ts" baseName + <|> T.stripSuffix ".mjs" baseName + <|> T.stripSuffix ".cjs" baseName + + cleanJsFunction func = + -- Remove namespacing: Object.foo.bar -> bar + let parts = T.splitOn "." func + in if length parts > 1 then last parts else func + + isJsInApp path = not $ any (`T.isInfixOf` path) + ["node_modules/", "", "internal/", "node:"] + + +-- | Parse Python stack frame +-- Format: File "path/to/file.py", line 123, in function_name +parsePythonFrame :: Text -> Maybe StackFrame +parsePythonFrame line + | "File \"" `T.isPrefixOf` T.strip line = + let content = T.drop 6 $ T.strip line -- Remove 'File "' + (filePath, rest) = T.breakOn "\"" content + -- Parse ", line 123, in func_name" + parts = T.splitOn ", " $ T.drop 2 rest -- Skip '",' + lineNum = case find ("line " `T.isPrefixOf`) parts of + Just p -> readMaybe $ toString $ T.drop 5 p + Nothing -> Nothing + funcName = case find ("in " `T.isPrefixOf`) parts of + Just p -> T.drop 3 p + Nothing -> "" + in Just StackFrame + { sfFilePath = filePath + , sfModule = extractPythonModule filePath + , sfFunction = cleanPythonFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isPythonInApp filePath + } + | otherwise = Nothing + where + extractPythonModule path = + let baseName = last $ T.splitOn "/" path + moduleName = fromMaybe baseName $ T.stripSuffix ".py" baseName + in Just moduleName + + cleanPythonFunction func = + -- Remove lambda indicators + T.replace "" "lambda" $ + T.replace "" "listcomp" $ + T.replace "" "dictcomp" func + + isPythonInApp path = not $ any (`T.isInfixOf` path) + ["site-packages/", "dist-packages/", "/lib/python", " Maybe StackFrame +parseJavaFrame line + | "at " `T.isPrefixOf` T.strip line = + let content = T.drop 3 $ T.strip line + (qualifiedMethod, rest) = T.breakOn "(" content + locationPart = T.dropAround (\c -> c == '(' || c == ')') rest + (fileName, lineNum) = parseJavaLocation locationPart + (moduleName, funcName) = splitJavaQualified qualifiedMethod + in Just StackFrame + { sfFilePath = fileName + , sfModule = Just moduleName + , sfFunction = cleanJavaFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isJavaInApp qualifiedMethod + } + | otherwise = Nothing + where + parseJavaLocation loc = + let (file, lineStr) = T.breakOn ":" loc + in (file, readMaybe $ toString $ T.drop 1 lineStr) + + splitJavaQualified qualified = + let parts = T.splitOn "." qualified + in if length parts > 1 + then (T.intercalate "." $ init parts, last parts) + else ("", qualified) + + cleanJavaFunction func = + -- Remove generics: method -> method + T.takeWhile (/= '<') func + + isJavaInApp qualified = not $ any (`T.isPrefixOf` qualified) + ["java.", "javax.", "sun.", "com.sun.", "jdk.", "org.springframework."] + + +-- | Parse PHP stack frame +-- Format: #0 /path/to/file.php(123): ClassName->methodName() +parsePhpFrame :: Text -> Maybe StackFrame +parsePhpFrame line + | "#" `T.isPrefixOf` T.strip line = + let content = T.drop 1 $ T.dropWhile (/= ' ') $ T.strip line -- Skip "#N " + (pathPart, funcPart) = T.breakOn ": " content + (filePath, lineNum) = parsePhpPath pathPart + funcName = T.takeWhile (/= '(') $ T.drop 2 funcPart + in Just StackFrame + { sfFilePath = filePath + , sfModule = extractPhpModule filePath + , sfFunction = cleanPhpFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isPhpInApp filePath + } + | otherwise = Nothing + where + parsePhpPath path = + let (file, lineStr) = T.breakOn "(" path + lineNum = readMaybe $ toString $ T.takeWhile (/= ')') $ T.drop 1 lineStr + in (file, lineNum) + + extractPhpModule path = + let baseName = last $ T.splitOn "/" path + in Just $ fromMaybe baseName $ T.stripSuffix ".php" baseName + + cleanPhpFunction func = + -- Remove {closure} markers + T.replace "{closure}" "closure" $ + -- Simplify class::method or class->method + let parts = T.splitOn "->" func + in if length parts > 1 then last parts else + let parts' = T.splitOn "::" func + in if length parts' > 1 then last parts' else func + + isPhpInApp path = not $ any (`T.isInfixOf` path) + ["/vendor/", "/phar://"] + + +-- | Parse .NET stack frame +-- Format: at Namespace.Class.Method(params) in /path/to/file.cs:line 123 +parseDotNetFrame :: Text -> Maybe StackFrame +parseDotNetFrame line + | "at " `T.isPrefixOf` T.strip line = + let content = T.drop 3 $ T.strip line + (methodPart, locationPart) = T.breakOn " in " content + qualifiedMethod = T.takeWhile (/= '(') methodPart + (moduleName, funcName) = splitDotNetQualified qualifiedMethod + (filePath, lineNum) = parseDotNetLocation $ T.drop 4 locationPart + in Just StackFrame + { sfFilePath = filePath + , sfModule = Just moduleName + , sfFunction = cleanDotNetFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isDotNetInApp qualifiedMethod + } + | otherwise = Nothing + where + splitDotNetQualified qualified = + let parts = T.splitOn "." qualified + in if length parts > 1 + then (T.intercalate "." $ init parts, last parts) + else ("", qualified) + + parseDotNetLocation loc = + let (path, lineStr) = T.breakOn ":line " loc + in (path, readMaybe $ toString $ T.drop 6 lineStr) + + cleanDotNetFunction func = + -- Remove generic arity: Method`1 -> Method + T.takeWhile (/= '`') func + + isDotNetInApp qualified = not $ any (`T.isPrefixOf` qualified) + ["System.", "Microsoft.", "Newtonsoft."] + + +-- | Generic stack frame parser for unknown formats +parseGenericFrame :: Text -> Maybe StackFrame +parseGenericFrame line = + let trimmed = T.strip line + in if T.null trimmed || "..." `T.isPrefixOf` trimmed + then Nothing + else Just StackFrame + { sfFilePath = trimmed + , sfModule = Nothing + , sfFunction = extractGenericFunction trimmed + , sfLineNumber = extractGenericLineNumber trimmed + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = True -- Assume in-app by default + } + where + extractGenericFunction txt = + -- Try to find function-like patterns + let parts = T.words txt + in case find (\p -> "(" `T.isInfixOf` p || "." `T.isInfixOf` p) parts of + Just p -> T.takeWhile (/= '(') p + Nothing -> fromMaybe "" $ listToMaybe parts + + extractGenericLineNumber txt = + -- Look for :NUMBER or line NUMBER patterns + let parts = T.splitOn ":" txt + in case parts of + _ : rest -> listToMaybe $ mapMaybe (readMaybe . toString . T.takeWhile (/= ':')) rest + _ -> Nothing + + +-- | Normalize a stack trace for fingerprinting +-- Returns a list of normalized frame strings suitable for hashing +normalizeStackTrace :: Text -> Text -> Text +normalizeStackTrace runtime stackText = + let frames = parseStackTrace runtime stackText + inAppFrames = filter sfIsInApp frames + framesToUse = if null inAppFrames then frames else inAppFrames + normalizedFrames = map normalizeFrame framesToUse + in T.intercalate "\n" normalizedFrames + where + normalizeFrame :: StackFrame -> Text + normalizeFrame frame = + let modulePart = fromMaybe "" frame.sfModule + funcPart = normalizeFunction Nothing frame.sfFunction + -- Context line: normalize whitespace, truncate if > 120 chars + contextPart = maybe "" normalizeContextLine frame.sfContextLine + in T.intercalate "|" $ filter (not . T.null) [modulePart, funcPart, contextPart] + + normalizeFunction :: Text -> Text -> Text + normalizeFunction _ func = + -- Common normalizations across platforms: + -- 1. Remove memory addresses (0x...) + -- 2. Remove generic type parameters + -- 3. Remove parameter types + -- 4. Normalize anonymous/lambda markers + let noAddr = T.unwords $ filter (not . ("0x" `T.isPrefixOf`)) $ T.words func + noGenerics = T.takeWhile (/= '<') noAddr + noParams = T.takeWhile (/= '(') noGenerics + in T.strip noParams + + normalizeContextLine :: Text -> Text + normalizeContextLine ctx = + let normalized = T.unwords $ T.words ctx + in if T.length normalized > 120 + then "" -- Skip overly long context lines (like Sentry does) + else normalized + + +-- | Normalize an error message for fingerprinting +-- Limits to first 2 non-empty lines and replaces variable content +normalizeMessage :: Text -> Text +normalizeMessage msg = + let lns = take 2 $ filter (not . T.null . T.strip) $ T.lines msg + combined = T.intercalate " " $ map T.strip lns + -- Replace variable patterns with placeholders + normalized = replaceMessagePatterns combined + in T.strip normalized + + +-- | Replace variable patterns in messages (UUIDs, IPs, numbers, etc.) +-- Similar to Sentry's parameterization +replaceMessagePatterns :: Text -> Text +replaceMessagePatterns = applyPatterns messageNormalizationPatterns + where + applyPatterns :: [SearchReplace RE Text] -> Text -> Text + applyPatterns [] txt = txt + applyPatterns (p : ps) txt = applyPatterns ps (txt *=~/ p) + + +-- | Patterns for message normalization +-- Order matters: more specific patterns first +messageNormalizationPatterns :: [SearchReplace RE Text] +messageNormalizationPatterns = + [ -- UUIDs + [ed|[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}///|] + , [ed|[0-9a-fA-F]{24}///|] + -- Hashes + , [ed|[a-fA-F0-9]{64}///|] + , [ed|[a-fA-F0-9]{40}///|] + , [ed|[a-fA-F0-9]{32}///|] + -- Network + , [ed|((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)///|] + , [ed|[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}///|] + , [ed|https?://[^\s]+///|] + -- Dates/Times (ISO format) + , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|[+\-][0-9]{2}:[0-9]{2})?///|] + , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}///|] + , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2}///|] + -- Timestamps + , [ed|1[0-9]{12}///|] + , [ed|1[0-9]{9}///|] + -- Hex values + , [ed|0x[0-9A-Fa-f]+///|] + -- Quoted strings (after other patterns) + , [ed|"[^"]*"///|] + , [ed|'[^']*'///|] + -- Numbers (last, as they're most general) + , [ed|[+-]?[0-9]+\.[0-9]+///|] + , [ed|[0-9]+///|] + ] + + +-- | Compute the error fingerprint hash using Sentry-style prioritization +-- Priority: +-- 1. Stack trace (if has meaningful in-app frames) +-- 2. Exception type + message +-- 3. Message only +computeErrorFingerprint :: Projects.ProjectId -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text +computeErrorFingerprint projectId mService mEndpoint runtime exceptionType message stackTrace = + let -- Normalize components + normalizedStack = normalizeStackTrace runtime stackTrace + normalizedMsg = normalizeMessage message + normalizedType = T.strip exceptionType + + -- Build fingerprint components based on priority + fingerprintComponents = + if hasUsableStackTrace normalizedStack + then + [ projectId.toText + , fromMaybe "" mService + , normalizedType + , normalizedStack + ] + else if not (T.null normalizedType) + then + [ projectId.toText + , fromMaybe "" mService + , fromMaybe "" mEndpoint + , normalizedType + , normalizedMsg + ] + else + [ projectId.toText + , fromMaybe "" mService + , fromMaybe "" mEndpoint + , normalizedMsg + ] + + -- Combine and hash + combined = T.intercalate "|" $ filter (not . T.null) fingerprintComponents + in toXXHash combined + where + hasUsableStackTrace :: Text -> Bool + hasUsableStackTrace normalized = + let lns = T.lines normalized + nonEmptyLines = filter (not . T.null . T.strip) lns + in length nonEmptyLines >= 1 From 34994cd9e68007c2dd87c32d8aac3dd6bb0bca58 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 15:03:37 +0000 Subject: [PATCH 36/71] group stack trace using language instead of sdk types --- src/Models/Apis/Errors.hs | 112 +++--------------------------- src/Models/Telemetry/Telemetry.hs | 2 +- 2 files changed, 12 insertions(+), 102 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 44dbf0bab..91469f26a 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -61,6 +61,7 @@ import Relude hiding (id) import System.Types (DB) import Text.RE.TDFA (RE, SearchReplace, ed, (*=~/)) import Utils (DBField (MkDBField), toXXHash) +import RequestMessages (replaceAllFormats) newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -601,63 +602,17 @@ parseStackTrace mSdk stackText = -- | Parse a single stack frame line based on SDK type parseStackFrame :: Text -> Text -> Maybe StackFrame -parseStackFrame mSdk line = +parseStackFrame runtime line = let trimmed = T.strip line - in case mSdk of - Just sdk | isGoSDK sdk -> parseGoFrame trimmed - Just sdk | isJsSDK sdk -> parseJsFrame trimmed - Just sdk | isPythonSDK sdk -> parsePythonFrame trimmed - Just sdk | isJavaSDK sdk -> parseJavaFrame trimmed - Just sdk | isPhpSDK sdk -> parsePhpFrame trimmed - Just sdk | isDotNetSDK sdk -> parseDotNetFrame trimmed + in case runtime of + "go" -> parseGoFrame trimmed + "nodejs" -> parseJsFrame trimmed + "webjs" -> parseJsFrame trimmed + "python" -> parsePythonFrame trimmed + "java" -> parseJavaFrame trimmed + "php" -> parsePhpFrame trimmed + "dotnet" -> parseDotNetFrame trimmed _ -> parseGenericFrame trimmed - where - isGoSDK = \case - RequestDumps.GoGin -> True - RequestDumps.GoBuiltIn -> True - RequestDumps.GoGorillaMux -> True - RequestDumps.GoFiber -> True - RequestDumps.GoDefault -> True - RequestDumps.GoOutgoing -> True - _ -> False - - isJsSDK = \case - RequestDumps.JsExpress -> True - RequestDumps.JsNest -> True - RequestDumps.JsFastify -> True - RequestDumps.JsAdonis -> True - RequestDumps.JsNext -> True - RequestDumps.JsAxiosOutgoing -> True - RequestDumps.JsOutgoing -> True - _ -> False - - isPythonSDK = \case - RequestDumps.PythonFastApi -> True - RequestDumps.PythonFlask -> True - RequestDumps.PythonDjango -> True - RequestDumps.PythonOutgoing -> True - RequestDumps.PythonPyramid -> True - _ -> False - - isJavaSDK = \case - RequestDumps.JavaSpringBoot -> True - RequestDumps.JavaSpring -> True - RequestDumps.JavaApacheOutgoing -> True - RequestDumps.JavaVertx -> True - _ -> False - - isPhpSDK = \case - RequestDumps.PhpLaravel -> True - RequestDumps.PhpSymfony -> True - RequestDumps.PhpSlim -> True - RequestDumps.GuzzleOutgoing -> True - _ -> False - - isDotNetSDK = \case - RequestDumps.DotNet -> True - RequestDumps.DotNetOutgoing -> True - _ -> False - -- | Parse Go stack frame: "goroutine 1 [running]:" or "main.foo(0x1234)" -- Format: package.function(args) or /path/to/file.go:123 +0x1f @@ -1021,7 +976,6 @@ normalizeStackTrace runtime stackText = then "" -- Skip overly long context lines (like Sentry does) else normalized - -- | Normalize an error message for fingerprinting -- Limits to first 2 non-empty lines and replaces variable content normalizeMessage :: Text -> Text @@ -1029,53 +983,9 @@ normalizeMessage msg = let lns = take 2 $ filter (not . T.null . T.strip) $ T.lines msg combined = T.intercalate " " $ map T.strip lns -- Replace variable patterns with placeholders - normalized = replaceMessagePatterns combined + normalized = replaceAllFormats combined in T.strip normalized - --- | Replace variable patterns in messages (UUIDs, IPs, numbers, etc.) --- Similar to Sentry's parameterization -replaceMessagePatterns :: Text -> Text -replaceMessagePatterns = applyPatterns messageNormalizationPatterns - where - applyPatterns :: [SearchReplace RE Text] -> Text -> Text - applyPatterns [] txt = txt - applyPatterns (p : ps) txt = applyPatterns ps (txt *=~/ p) - - --- | Patterns for message normalization --- Order matters: more specific patterns first -messageNormalizationPatterns :: [SearchReplace RE Text] -messageNormalizationPatterns = - [ -- UUIDs - [ed|[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}///|] - , [ed|[0-9a-fA-F]{24}///|] - -- Hashes - , [ed|[a-fA-F0-9]{64}///|] - , [ed|[a-fA-F0-9]{40}///|] - , [ed|[a-fA-F0-9]{32}///|] - -- Network - , [ed|((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)///|] - , [ed|[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}///|] - , [ed|https?://[^\s]+///|] - -- Dates/Times (ISO format) - , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|[+\-][0-9]{2}:[0-9]{2})?///|] - , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}///|] - , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2}///|] - -- Timestamps - , [ed|1[0-9]{12}///|] - , [ed|1[0-9]{9}///|] - -- Hex values - , [ed|0x[0-9A-Fa-f]+///|] - -- Quoted strings (after other patterns) - , [ed|"[^"]*"///|] - , [ed|'[^']*'///|] - -- Numbers (last, as they're most general) - , [ed|[+-]?[0-9]+\.[0-9]+///|] - , [ed|[0-9]+///|] - ] - - -- | Compute the error fingerprint hash using Sentry-style prioritization -- Priority: -- 1. Stack trace (if has meaningful in-app frames) diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index 4395084a2..3593c38da 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -1060,7 +1060,7 @@ extractATError spanObj (AE.Object o) = do Just (AE.Object tel) -> KEM.lookup "sdk" tel >>= ( \case - AE.Object sdkObj -> KEM.lookup "name" sdkObj >>= asText + AE.Object sdkObj -> KEM.lookup "language" sdkObj >>= asText _ -> Nothing ) _ -> Nothing From a37aa8bad69f4262ea27d89810fae4f2b6b3720c Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 15:43:05 +0000 Subject: [PATCH 37/71] fix cyclic dependency and use new fingerprint function --- src/Models/Apis/Errors.hs | 48 +++++----- src/Models/Telemetry/Telemetry.hs | 10 ++- src/RequestMessages.hs | 140 +---------------------------- src/Utils.hs | 143 ++++++++++++++++++++++++++++++ 4 files changed, 175 insertions(+), 166 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 91469f26a..5d7029801 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -60,8 +60,7 @@ import Pkg.DeriveUtils (BaselineState (..)) import Relude hiding (id) import System.Types (DB) import Text.RE.TDFA (RE, SearchReplace, ed, (*=~/)) -import Utils (DBField (MkDBField), toXXHash) -import RequestMessages (replaceAllFormats) +import Utils (DBField (MkDBField), toXXHash, replaceAllFormats) newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -656,12 +655,12 @@ parseGoFrame line extractGoModuleFromFunc func = let parts = T.splitOn "." func in if length parts > 1 - then Just $ T.intercalate "." $ init parts + then T.intercalate "." <$> viaNonEmpty init parts else Nothing extractGoFuncName func = let parts = T.splitOn "." func - in if not (null parts) then last parts else func + in fromMaybe func $ viaNonEmpty last parts isGoInApp path = not $ any (`T.isInfixOf` path) ["go/src/", "pkg/mod/", "vendor/", "/runtime/", "/net/", "/syscall/"] @@ -732,7 +731,7 @@ parseJsFrame line _ -> (Nothing, Nothing) extractJsModule path = - let baseName = last $ T.splitOn "/" path + let baseName = fromMaybe path $ viaNonEmpty last $ T.splitOn "/" path in Just $ T.toLower $ fromMaybe baseName $ T.stripSuffix ".js" baseName <|> T.stripSuffix ".ts" baseName <|> T.stripSuffix ".mjs" baseName @@ -741,7 +740,7 @@ parseJsFrame line cleanJsFunction func = -- Remove namespacing: Object.foo.bar -> bar let parts = T.splitOn "." func - in if length parts > 1 then last parts else func + in fromMaybe func $ viaNonEmpty last parts isJsInApp path = not $ any (`T.isInfixOf` path) ["node_modules/", "", "internal/", "node:"] @@ -774,7 +773,7 @@ parsePythonFrame line | otherwise = Nothing where extractPythonModule path = - let baseName = last $ T.splitOn "/" path + let baseName = fromMaybe path $ viaNonEmpty last $ T.splitOn "/" path moduleName = fromMaybe baseName $ T.stripSuffix ".py" baseName in Just moduleName @@ -816,7 +815,9 @@ parseJavaFrame line splitJavaQualified qualified = let parts = T.splitOn "." qualified in if length parts > 1 - then (T.intercalate "." $ init parts, last parts) + then case (viaNonEmpty init parts, viaNonEmpty last parts) of + (Just ps, Just l) -> (T.intercalate "." ps, l) + _ -> ("", qualified) else ("", qualified) cleanJavaFunction func = @@ -853,7 +854,7 @@ parsePhpFrame line in (file, lineNum) extractPhpModule path = - let baseName = last $ T.splitOn "/" path + let baseName = fromMaybe path $ viaNonEmpty last $ T.splitOn "/" path in Just $ fromMaybe baseName $ T.stripSuffix ".php" baseName cleanPhpFunction func = @@ -861,9 +862,9 @@ parsePhpFrame line T.replace "{closure}" "closure" $ -- Simplify class::method or class->method let parts = T.splitOn "->" func - in if length parts > 1 then last parts else + in if length parts > 1 then fromMaybe func $ viaNonEmpty last parts else let parts' = T.splitOn "::" func - in if length parts' > 1 then last parts' else func + in if length parts' > 1 then fromMaybe func $ viaNonEmpty last parts' else func isPhpInApp path = not $ any (`T.isInfixOf` path) ["/vendor/", "/phar://"] @@ -893,7 +894,9 @@ parseDotNetFrame line splitDotNetQualified qualified = let parts = T.splitOn "." qualified in if length parts > 1 - then (T.intercalate "." $ init parts, last parts) + then case (viaNonEmpty init parts, viaNonEmpty last parts) of + (Just ps, Just l) -> (T.intercalate "." ps, l) + _ -> ("", qualified) else ("", qualified) parseDotNetLocation loc = @@ -952,13 +955,13 @@ normalizeStackTrace runtime stackText = normalizeFrame :: StackFrame -> Text normalizeFrame frame = let modulePart = fromMaybe "" frame.sfModule - funcPart = normalizeFunction Nothing frame.sfFunction + funcPart = normalizeFunction frame.sfFunction -- Context line: normalize whitespace, truncate if > 120 chars contextPart = maybe "" normalizeContextLine frame.sfContextLine in T.intercalate "|" $ filter (not . T.null) [modulePart, funcPart, contextPart] - normalizeFunction :: Text -> Text -> Text - normalizeFunction _ func = + normalizeFunction :: Text -> Text + normalizeFunction func = -- Common normalizations across platforms: -- 1. Remove memory addresses (0x...) -- 2. Remove generic type parameters @@ -991,8 +994,8 @@ normalizeMessage msg = -- 1. Stack trace (if has meaningful in-app frames) -- 2. Exception type + message -- 3. Message only -computeErrorFingerprint :: Projects.ProjectId -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text -computeErrorFingerprint projectId mService mEndpoint runtime exceptionType message stackTrace = +computeErrorFingerprint :: Text -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text +computeErrorFingerprint projectIdText mService spanName runtime exceptionType message stackTrace = let -- Normalize components normalizedStack = normalizeStackTrace runtime stackTrace normalizedMsg = normalizeMessage message @@ -1002,23 +1005,22 @@ computeErrorFingerprint projectId mService mEndpoint runtime exceptionType messa fingerprintComponents = if hasUsableStackTrace normalizedStack then - [ projectId.toText - , fromMaybe "" mService + [ projectIdText , normalizedType , normalizedStack ] else if not (T.null normalizedType) then - [ projectId.toText + [ projectIdText , fromMaybe "" mService - , fromMaybe "" mEndpoint + , fromMaybe "" spanName , normalizedType , normalizedMsg ] else - [ projectId.toText + [ projectIdText , fromMaybe "" mService - , fromMaybe "" mEndpoint + , fromMaybe "" spanName , normalizedMsg ] diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index 3593c38da..1d1824bbe 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -98,7 +98,7 @@ import System.Logging qualified as Log import System.Types (DB) import Text.Regex.TDFA.Text () import UnliftIO (throwIO, tryAny) -import Utils (lookupValueText, toXXHash) +import Utils (lookupValueText) -- Helper function to get nested value from a map using dot notation @@ -1074,22 +1074,24 @@ extractATError spanObj (AE.Object o) = do asText (AE.String t) = Just t asText _ = Nothing + pid = UUID.fromText spanObj.project_id >>= (Just . UUIDId) -- Build ATError structure for anomaly detection -- The hash is critical for grouping similar errors together -- Hash components: projectId + service + span name + error type + sanitized message/stack -- This ensures similar errors are grouped while allowing variations in the actual message - + -- projectId mService mEndpoint runtime exceptionType message stackTrace + return $ Errors.ATError - { projectId = UUID.fromText spanObj.project_id >>= (Just . UUIDId) + { projectId = pid , when = spanObj.timestamp , errorType = typ , rootErrorType = typ , message = msg , rootErrorMessage = msg , stackTrace = stack - , hash = (toXXHash (spanObj.project_id <> <> typ <> replaceAllFormats (msg <> stack))) + , hash = Errors.computeErrorFingerprint spanObj.project_id serviceName spanObj.name (fromMaybe "unknown" tech) typ msg stack , technology = Nothing , serviceName = serviceName , requestMethod = method diff --git a/src/RequestMessages.hs b/src/RequestMessages.hs index 07ec5753d..08915bd3f 100644 --- a/src/RequestMessages.hs +++ b/src/RequestMessages.hs @@ -58,7 +58,7 @@ import Relude import Relude.Unsafe as Unsafe (read) import Text.RE.Replace (matched) import Text.RE.TDFA (RE, SearchReplace, ed, re, (*=~/), (?=~)) -import Utils (DBField (), toXXHash) +import Utils (DBField (), toXXHash, replaceAllFormats) -- $setup @@ -359,144 +359,6 @@ commonFormatPatterns = ] --- | Replaces all format patterns in the input text with their format names --- This function applies all regex patterns and replaces matches until no more replacements are made --- --- >>> map replaceAllFormats ["123", "c73bcdcc-2669-4bf6-81d3-e4ae73fb11fd", "550e8400-e29b-41d4-a716-446655440000", "507f1f77bcf86cd799439011"] --- ["{integer}","{uuid}","{uuid}","{uuid}"] --- --- >>> map replaceAllFormats ["e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "356a192b7913b04c54574d18c28d46e6395428ab", "5d41402abc4b2a76b9719d911017c592"] --- ["{sha256}","{sha1}","{md5}"] --- --- >>> replaceAllFormats "User 123 accessed endpoint c73bcdcc-2669-4bf6-81d3-e4ae73fb11fd" --- "User {integer} accessed endpoint {uuid}" --- --- >>> replaceAllFormats "Error at 192.168.0.1:8080 with status 404" --- "Error at {ipv4}{port} with status {integer}" --- --- >>> map replaceAllFormats ["Server on :8080", "localhost:3000", "api.com:443", ":22"] --- ["Server on {port}","localhost{port}","api.com{port}","{port}"] --- --- >>> replaceAllFormats "Connected to 10.0.0.1:443 and 192.168.1.100:22" --- "Connected to {ipv4}{port} and {ipv4}{port}" --- --- >>> replaceAllFormats "Responses: 200, 301, 404, 500" --- "Responses: {integer}, {integer}, {integer}, {integer}" --- --- >>> replaceAllFormats "GET /api/v2/users/123/orders/456 returned 200" --- "GET /api/v{integer}/users/{integer}/orders/{integer} returned {integer}" --- --- >>> replaceAllFormats "User 550e8400-e29b-41d4-a716-446655440000 connected from 192.168.1.50:9876" --- "User {uuid} connected from {ipv4}{port}" --- --- >>> replaceAllFormats "Hash: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3, Status: 403, Port: :8443" --- "Hash: {sha1}, Status: {integer}, Port: {port}" --- --- >>> replaceAllFormats "Processing request 123456 at 2023-10-15:3000" --- "Processing request {integer} at {integer}-{integer}-{integer}{port}" --- --- >>> map replaceAllFormats ["Mixed: 192.168.1.1 123 :80 404", "0xDEADBEEF", "Values: 999, 1000, 1001"] --- ["Mixed: {ipv4} {integer} {port} {integer}","{hex}","Values: {integer}, {integer}, {integer}"] --- --- >>> map replaceAllFormats ["10.0.0.1", "172.16.0.1", "192.168.1.1", "255.255.255.0"] --- ["{ipv4}","{ipv4}","{ipv4}","{ipv4}"] --- --- >>> map replaceAllFormats ["Multiple formats: 123 abc def456789 :9000", "Log entry 404 at 10.0.0.1:8080"] --- ["Multiple formats: {integer} abc def{integer} {port}","Log entry {integer} at {ipv4}{port}"] -replaceAllFormats :: Text -> Text -replaceAllFormats input = restorePlaceholders $ processPatterns input formatPatternsForReplacement - where - -- Process patterns sequentially with (*=~/) - -- Use special Unicode characters as temporary placeholders to prevent re-matching - processPatterns :: Text -> [SearchReplace RE Text] -> Text - processPatterns txt [] = txt - processPatterns txt (sr : rest) = - let newTxt = txt *=~/ sr - in processPatterns newTxt rest - - -- Restore the Unicode placeholders to the final format - restorePlaceholders :: Text -> Text - restorePlaceholders txt = - txt - *=~/ [ed|〖×UUID×〗///{uuid}|] - *=~/ [ed|〖×SHA-TWO-FIVE-SIX×〗///{sha256}|] - *=~/ [ed|〖×SHA-ONE×〗///{sha1}|] - *=~/ [ed|〖×MD-FIVE×〗///{md5}|] - *=~/ [ed|〖×IPV-FOUR×〗///{ipv4}|] - *=~/ [ed|〖×PORT×〗///{port}|] - *=~/ [ed|〖×HEX×〗///{hex}|] - *=~/ [ed|〖×INTEGER×〗///{integer}|] - - -- Use a subset of patterns that should be replaced in text - -- Using Unicode characters with no alphanumeric content that won't match any patterns - formatPatternsForReplacement :: [SearchReplace RE Text] - formatPatternsForReplacement = - [ -- UUIDs and hashes first (most specific) - [ed|[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}///〖×UUID×〗|] - , [ed|[a-fA-F0-9]{64}///〖×SHA-TWO-FIVE-SIX×〗|] - , [ed|[a-fA-F0-9]{40}///〖×SHA-ONE×〗|] - , [ed|[a-fA-F0-9]{32}///〖×MD-FIVE×〗|] - , [ed|[0-9a-fA-F]{24}///〖×UUID×〗|] - , -- Visa 13 or 16 digits - [ed|5[1-5][0-9]{14}///{credit_card}|] - , -- Mastercard - [ed|3[47][0-9]{13}///{credit_card}|] - , -- Amex - [ed|eyJ[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+///{jwt}|] - , -- JWT - -- IBAN (moved before base64) - [ed|[A-Z]{2}[0-9]{2}[A-Za-z0-9]{4}[0-9]{7}[A-Za-z0-9]{0,16}///{iban}|] - , -- Date patterns (before file paths to avoid conflicts) - [ed|(Mon|Tue|Wed|Thu|Fri|Sat|Sun), [0-9]{1,2} (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) [0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2} [+\-][0-9]{4}///{rfc2822}|] - , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|[+\-][0-9]{2}:[0-9]{2})?///{YYYY-MM-DDThh:mm:ss.sTZD}|] - , -- ISO 8601 - [ed|[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}///{YYYY-MM-DD HH:MM:SS}|] - , -- MySQL datetime - [ed|[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2}///{MM/DD/YYYY HH:MM:SS}|] - , -- US datetime - [ed|[0-9]{2}-[0-9]{2}-[0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2}///{MM-DD-YYYY HH:MM:SS}|] - , [ed|[0-9]{2}\.[0-9]{2}\.[0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2}///{DD.MM.YYYY HH:MM:SS}|] - , -- European datetime - [ed|(0[1-9]|[12][0-9]|3[01])[/](0[1-9]|1[012])[/](19|20)[0-9][0-9]///{dd/mm/yyyy}|] - , -- European date - [ed|(0[1-9]|[12][0-9]|3[01])-(0[1-9]|1[012])-(19|20)[0-9][0-9]///{dd-mm-yyyy}|] - , [ed|(0[1-9]|[12][0-9]|3[01])\.(0[1-9]|1[012])\.(19|20)[0-9][0-9]///{dd.mm.yyyy}|] - , [ed|(0[1-9]|1[012])[/](0[1-9]|[12][0-9]|3[01])[/](19|20)[0-9][0-9]///{mm/dd/yyyy}|] - , -- US date - [ed|(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])-(19|20)[0-9][0-9]///{mm-dd-yyyy}|] - , [ed|(0[1-9]|1[012])\.(0[1-9]|[12][0-9]|3[01])\.(19|20)[0-9][0-9]///{mm.dd.yyyy}|] - , -- Note: Removed [0-9]{4}-[0-9]{2}-[0-9]{2} pattern to avoid matching dates followed by ports (e.g. 2023-10-15:3000) - -- The more specific datetime pattern [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} handles actual dates with times - [ed|[0-9]{4}/[0-9]{2}/[0-9]{2}///{YYYY/MM/DD}|] - , -- Compact date - [ed|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) [0-9]{1,2}, [0-9]{4}///{Mon DD, YYYY}|] - , -- Long month - [ed|[0-9]{1,2}-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-[0-9]{4}///{DD-Mon-YYYY}|] - , -- Oracle date - -- Time patterns - [ed|[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}///{HH:MM:SS.mmm}|] - , -- Time with milliseconds - [ed|[0-9]{2}:[0-9]{2}:[0-9]{2}///{HH:MM:SS}|] - , -- Time only - [ed|[0-9]{1,2}:[0-9]{2} (AM|PM|am|pm)///{H:MM AM/PM}|] - , -- Personal identifiers - [ed|[0-9]{3}-[0-9]{2}-[0-9]{4}///{ssn}|] - , [ed|\+1 \([0-9]{3}\) [0-9]{3}-[0-9]{4}///{phone}|] - , -- Network patterns - [ed|(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)///〖×IPV-FOUR×〗|] - , [ed|:[0-9]{1,5}///〖×PORT×〗|] - , [ed|https?://[^\s]+///{url}|] - , [ed|([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4}///{ipv6}|] - , [ed|([0-9A-Fa-f]{2}[:-]){5}[0-9A-Fa-f]{2}///{mac}|] - , [ed|[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}///{email}|] - , -- Hex numbers (must come before general integers) - [ed|0x[0-9A-Fa-f]+///〖×HEX×〗|] - , --- float - [ed|[+-]?[0-9]+\.[0-9]+///{float}|] - , -- Numbers (including HTTP status codes) - [ed|[0-9]+///〖×INTEGER×〗|] - ] - -- valueToFormatStr will take a string and try to find a format which matches that string best. -- At the moment it takes a text and returns a generic mask that represents the format of that text diff --git a/src/Utils.hs b/src/Utils.hs index cd1d3e14e..ec0c3998f 100644 --- a/src/Utils.hs +++ b/src/Utils.hs @@ -47,6 +47,7 @@ module Utils ( methodFillColor, levelFillColor, changeTypeFillColor, + replaceAllFormats, ) where @@ -90,6 +91,8 @@ import Text.Printf (printf) import Text.Regex.TDFA ((=~)) import Text.Show import "base64" Data.ByteString.Base64 qualified as B64 +import Text.RE.TDFA (RE, SearchReplace, ed, re, (*=~/), (?=~)) + -- Added only for satisfying the tests @@ -767,3 +770,143 @@ getAlertStatusColor status = case status of "Alerting" -> "badge-error" "Warning" -> "badge-warning" _ -> "badge-success" + + +-- | Replaces all format patterns in the input text with their format names +-- This function applies all regex patterns and replaces matches until no more replacements are made +-- +-- >>> map replaceAllFormats ["123", "c73bcdcc-2669-4bf6-81d3-e4ae73fb11fd", "550e8400-e29b-41d4-a716-446655440000", "507f1f77bcf86cd799439011"] +-- ["{integer}","{uuid}","{uuid}","{uuid}"] +-- +-- >>> map replaceAllFormats ["e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "356a192b7913b04c54574d18c28d46e6395428ab", "5d41402abc4b2a76b9719d911017c592"] +-- ["{sha256}","{sha1}","{md5}"] +-- +-- >>> replaceAllFormats "User 123 accessed endpoint c73bcdcc-2669-4bf6-81d3-e4ae73fb11fd" +-- "User {integer} accessed endpoint {uuid}" +-- +-- >>> replaceAllFormats "Error at 192.168.0.1:8080 with status 404" +-- "Error at {ipv4}{port} with status {integer}" +-- +-- >>> map replaceAllFormats ["Server on :8080", "localhost:3000", "api.com:443", ":22"] +-- ["Server on {port}","localhost{port}","api.com{port}","{port}"] +-- +-- >>> replaceAllFormats "Connected to 10.0.0.1:443 and 192.168.1.100:22" +-- "Connected to {ipv4}{port} and {ipv4}{port}" +-- +-- >>> replaceAllFormats "Responses: 200, 301, 404, 500" +-- "Responses: {integer}, {integer}, {integer}, {integer}" +-- +-- >>> replaceAllFormats "GET /api/v2/users/123/orders/456 returned 200" +-- "GET /api/v{integer}/users/{integer}/orders/{integer} returned {integer}" +-- +-- >>> replaceAllFormats "User 550e8400-e29b-41d4-a716-446655440000 connected from 192.168.1.50:9876" +-- "User {uuid} connected from {ipv4}{port}" +-- +-- >>> replaceAllFormats "Hash: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3, Status: 403, Port: :8443" +-- "Hash: {sha1}, Status: {integer}, Port: {port}" +-- +-- >>> replaceAllFormats "Processing request 123456 at 2023-10-15:3000" +-- "Processing request {integer} at {integer}-{integer}-{integer}{port}" +-- +-- >>> map replaceAllFormats ["Mixed: 192.168.1.1 123 :80 404", "0xDEADBEEF", "Values: 999, 1000, 1001"] +-- ["Mixed: {ipv4} {integer} {port} {integer}","{hex}","Values: {integer}, {integer}, {integer}"] +-- +-- >>> map replaceAllFormats ["10.0.0.1", "172.16.0.1", "192.168.1.1", "255.255.255.0"] +-- ["{ipv4}","{ipv4}","{ipv4}","{ipv4}"] +-- +-- >>> map replaceAllFormats ["Multiple formats: 123 abc def456789 :9000", "Log entry 404 at 10.0.0.1:8080"] +-- ["Multiple formats: {integer} abc def{integer} {port}","Log entry {integer} at {ipv4}{port}"] +replaceAllFormats :: Text -> Text +replaceAllFormats input = restorePlaceholders $ processPatterns input formatPatternsForReplacement + where + -- Process patterns sequentially with (*=~/) + -- Use special Unicode characters as temporary placeholders to prevent re-matching + processPatterns :: Text -> [SearchReplace RE Text] -> Text + processPatterns txt [] = txt + processPatterns txt (sr : rest) = + let newTxt = txt *=~/ sr + in processPatterns newTxt rest + + -- Restore the Unicode placeholders to the final format + restorePlaceholders :: Text -> Text + restorePlaceholders txt = + txt + *=~/ [ed|〖×UUID×〗///{uuid}|] + *=~/ [ed|〖×SHA-TWO-FIVE-SIX×〗///{sha256}|] + *=~/ [ed|〖×SHA-ONE×〗///{sha1}|] + *=~/ [ed|〖×MD-FIVE×〗///{md5}|] + *=~/ [ed|〖×IPV-FOUR×〗///{ipv4}|] + *=~/ [ed|〖×PORT×〗///{port}|] + *=~/ [ed|〖×HEX×〗///{hex}|] + *=~/ [ed|〖×INTEGER×〗///{integer}|] + + -- Use a subset of patterns that should be replaced in text + -- Using Unicode characters with no alphanumeric content that won't match any patterns + formatPatternsForReplacement :: [SearchReplace RE Text] + formatPatternsForReplacement = + [ -- UUIDs and hashes first (most specific) + [ed|[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}///〖×UUID×〗|] + , [ed|[a-fA-F0-9]{64}///〖×SHA-TWO-FIVE-SIX×〗|] + , [ed|[a-fA-F0-9]{40}///〖×SHA-ONE×〗|] + , [ed|[a-fA-F0-9]{32}///〖×MD-FIVE×〗|] + , [ed|[0-9a-fA-F]{24}///〖×UUID×〗|] + , -- Visa 13 or 16 digits + [ed|5[1-5][0-9]{14}///{credit_card}|] + , -- Mastercard + [ed|3[47][0-9]{13}///{credit_card}|] + , -- Amex + [ed|eyJ[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+///{jwt}|] + , -- JWT + -- IBAN (moved before base64) + [ed|[A-Z]{2}[0-9]{2}[A-Za-z0-9]{4}[0-9]{7}[A-Za-z0-9]{0,16}///{iban}|] + , -- Date patterns (before file paths to avoid conflicts) + [ed|(Mon|Tue|Wed|Thu|Fri|Sat|Sun), [0-9]{1,2} (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) [0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2} [+\-][0-9]{4}///{rfc2822}|] + , [ed|[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|[+\-][0-9]{2}:[0-9]{2})?///{YYYY-MM-DDThh:mm:ss.sTZD}|] + , -- ISO 8601 + [ed|[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}///{YYYY-MM-DD HH:MM:SS}|] + , -- MySQL datetime + [ed|[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2}///{MM/DD/YYYY HH:MM:SS}|] + , -- US datetime + [ed|[0-9]{2}-[0-9]{2}-[0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2}///{MM-DD-YYYY HH:MM:SS}|] + , [ed|[0-9]{2}\.[0-9]{2}\.[0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2}///{DD.MM.YYYY HH:MM:SS}|] + , -- European datetime + [ed|(0[1-9]|[12][0-9]|3[01])[/](0[1-9]|1[012])[/](19|20)[0-9][0-9]///{dd/mm/yyyy}|] + , -- European date + [ed|(0[1-9]|[12][0-9]|3[01])-(0[1-9]|1[012])-(19|20)[0-9][0-9]///{dd-mm-yyyy}|] + , [ed|(0[1-9]|[12][0-9]|3[01])\.(0[1-9]|1[012])\.(19|20)[0-9][0-9]///{dd.mm.yyyy}|] + , [ed|(0[1-9]|1[012])[/](0[1-9]|[12][0-9]|3[01])[/](19|20)[0-9][0-9]///{mm/dd/yyyy}|] + , -- US date + [ed|(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])-(19|20)[0-9][0-9]///{mm-dd-yyyy}|] + , [ed|(0[1-9]|1[012])\.(0[1-9]|[12][0-9]|3[01])\.(19|20)[0-9][0-9]///{mm.dd.yyyy}|] + , -- Note: Removed [0-9]{4}-[0-9]{2}-[0-9]{2} pattern to avoid matching dates followed by ports (e.g. 2023-10-15:3000) + -- The more specific datetime pattern [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} handles actual dates with times + [ed|[0-9]{4}/[0-9]{2}/[0-9]{2}///{YYYY/MM/DD}|] + , -- Compact date + [ed|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) [0-9]{1,2}, [0-9]{4}///{Mon DD, YYYY}|] + , -- Long month + [ed|[0-9]{1,2}-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-[0-9]{4}///{DD-Mon-YYYY}|] + , -- Oracle date + -- Time patterns + [ed|[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}///{HH:MM:SS.mmm}|] + , -- Time with milliseconds + [ed|[0-9]{2}:[0-9]{2}:[0-9]{2}///{HH:MM:SS}|] + , -- Time only + [ed|[0-9]{1,2}:[0-9]{2} (AM|PM|am|pm)///{H:MM AM/PM}|] + , -- Personal identifiers + [ed|[0-9]{3}-[0-9]{2}-[0-9]{4}///{ssn}|] + , [ed|\+1 \([0-9]{3}\) [0-9]{3}-[0-9]{4}///{phone}|] + , -- Network patterns + [ed|(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)///〖×IPV-FOUR×〗|] + , [ed|:[0-9]{1,5}///〖×PORT×〗|] + , [ed|https?://[^\s]+///{url}|] + , [ed|([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4}///{ipv6}|] + , [ed|([0-9A-Fa-f]{2}[:-]){5}[0-9A-Fa-f]{2}///{mac}|] + , [ed|[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}///{email}|] + , -- Hex numbers (must come before general integers) + [ed|0x[0-9A-Fa-f]+///〖×HEX×〗|] + , --- float + [ed|[+-]?[0-9]+\.[0-9]+///{float}|] + , -- Numbers (including HTTP status codes) + [ed|[0-9]+///〖×INTEGER×〗|] + ] + From 39f9003ad75dc6d902505fbfeebef099b9a99727 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 10 Jan 2026 15:43:45 +0000 Subject: [PATCH 38/71] Auto-format code with fourmolu --- src/Models/Apis/Errors.hs | 354 +++++++++++++++++------------- src/Models/Telemetry/Telemetry.hs | 2 +- src/RequestMessages.hs | 3 +- src/Utils.hs | 4 +- 4 files changed, 205 insertions(+), 158 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 5d7029801..cf6164d8f 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -60,7 +60,7 @@ import Pkg.DeriveUtils (BaselineState (..)) import Relude hiding (id) import System.Types (DB) import Text.RE.TDFA (RE, SearchReplace, ed, (*=~/)) -import Utils (DBField (MkDBField), toXXHash, replaceAllFormats) +import Utils (DBField (MkDBField), replaceAllFormats, toXXHash) newtype ErrorId = ErrorId {unErrorId :: UUID.UUID} @@ -578,16 +578,22 @@ upsertErrorQueryAndParam pid err = (q, params) -- - Limits to first 2 non-empty lines -- - Replaces UUIDs, IPs, emails, timestamps, numbers with placeholders - -- | Represents a parsed stack frame data StackFrame = StackFrame - { sfFilePath :: Text -- ^ Full file path or module path - , sfModule :: Maybe Text -- ^ Module/package name (extracted from path) - , sfFunction :: Text -- ^ Function/method name - , sfLineNumber :: Maybe Int -- ^ Line number - , sfColumnNumber :: Maybe Int -- ^ Column number - , sfContextLine :: Maybe Text -- ^ Source code at this frame (if available) - , sfIsInApp :: Bool -- ^ Whether this is application code vs library/system + { sfFilePath :: Text + -- ^ Full file path or module path + , sfModule :: Maybe Text + -- ^ Module/package name (extracted from path) + , sfFunction :: Text + -- ^ Function/method name + , sfLineNumber :: Maybe Int + -- ^ Line number + , sfColumnNumber :: Maybe Int + -- ^ Column number + , sfContextLine :: Maybe Text + -- ^ Source code at this frame (if available) + , sfIsInApp :: Bool + -- ^ Whether this is application code vs library/system } deriving stock (Eq, Generic, Show) deriving anyclass (NFData) @@ -613,37 +619,40 @@ parseStackFrame runtime line = "dotnet" -> parseDotNetFrame trimmed _ -> parseGenericFrame trimmed + -- | Parse Go stack frame: "goroutine 1 [running]:" or "main.foo(0x1234)" -- Format: package.function(args) or /path/to/file.go:123 +0x1f parseGoFrame :: Text -> Maybe StackFrame parseGoFrame line - | "goroutine" `T.isPrefixOf` line = Nothing -- Skip goroutine headers + | "goroutine" `T.isPrefixOf` line = Nothing -- Skip goroutine headers | ".go:" `T.isInfixOf` line = -- File path line: /path/to/file.go:123 +0x1f let (pathPart, _) = T.breakOn " +" line (filePath, lineCol) = T.breakOnEnd ":" pathPart lineNum = readMaybe $ toString $ T.takeWhile (/= ':') lineCol - in Just StackFrame - { sfFilePath = T.dropEnd 1 filePath -- Remove trailing ':' - , sfModule = extractGoModule filePath - , sfFunction = "" - , sfLineNumber = lineNum - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = isGoInApp filePath - } + in Just + StackFrame + { sfFilePath = T.dropEnd 1 filePath -- Remove trailing ':' + , sfModule = extractGoModule filePath + , sfFunction = "" + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isGoInApp filePath + } | "(" `T.isInfixOf` line = -- Function call line: main.foo(0x1234, 0x5678) let (funcPart, _) = T.breakOn "(" line - in Just StackFrame - { sfFilePath = "" - , sfModule = extractGoModuleFromFunc funcPart - , sfFunction = extractGoFuncName funcPart - , sfLineNumber = Nothing - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = isGoFuncInApp funcPart - } + in Just + StackFrame + { sfFilePath = "" + , sfModule = extractGoModuleFromFunc funcPart + , sfFunction = extractGoFuncName funcPart + , sfLineNumber = Nothing + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isGoFuncInApp funcPart + } | otherwise = Nothing where extractGoModule path = @@ -662,11 +671,17 @@ parseGoFrame line let parts = T.splitOn "." func in fromMaybe func $ viaNonEmpty last parts - isGoInApp path = not $ any (`T.isInfixOf` path) - ["go/src/", "pkg/mod/", "vendor/", "/runtime/", "/net/", "/syscall/"] + isGoInApp path = + not + $ any + (`T.isInfixOf` path) + ["go/src/", "pkg/mod/", "vendor/", "/runtime/", "/net/", "/syscall/"] - isGoFuncInApp func = not $ any (`T.isPrefixOf` func) - ["runtime.", "syscall.", "net.", "net/http.", "reflect."] + isGoFuncInApp func = + not + $ any + (`T.isPrefixOf` func) + ["runtime.", "syscall.", "net.", "net/http.", "reflect."] -- | Parse JavaScript stack frame @@ -679,9 +694,10 @@ parseJsFrame line | "at " `T.isPrefixOf` T.strip line = let content = T.strip $ T.drop 3 $ T.strip line -- Handle "at async ..." - content' = if "async " `T.isPrefixOf` content - then T.drop 6 content - else content + content' = + if "async " `T.isPrefixOf` content + then T.drop 6 content + else content in if "(" `T.isInfixOf` content' then parseJsWithParens content' else parseJsWithoutParens content' @@ -692,28 +708,30 @@ parseJsFrame line locationPart = T.dropAround (\c -> c == '(' || c == ')') rest (filePath, lineCol) = parseJsLocation locationPart (lineNum, colNum) = parseLineCol lineCol - in Just StackFrame - { sfFilePath = filePath - , sfModule = extractJsModule filePath - , sfFunction = cleanJsFunction funcPart - , sfLineNumber = lineNum - , sfColumnNumber = colNum - , sfContextLine = Nothing - , sfIsInApp = isJsInApp filePath - } + in Just + StackFrame + { sfFilePath = filePath + , sfModule = extractJsModule filePath + , sfFunction = cleanJsFunction funcPart + , sfLineNumber = lineNum + , sfColumnNumber = colNum + , sfContextLine = Nothing + , sfIsInApp = isJsInApp filePath + } parseJsWithoutParens txt = let (filePath, lineCol) = parseJsLocation txt (lineNum, colNum) = parseLineCol lineCol - in Just StackFrame - { sfFilePath = filePath - , sfModule = extractJsModule filePath - , sfFunction = "" - , sfLineNumber = lineNum - , sfColumnNumber = colNum - , sfContextLine = Nothing - , sfIsInApp = isJsInApp filePath - } + in Just + StackFrame + { sfFilePath = filePath + , sfModule = extractJsModule filePath + , sfFunction = "" + , sfLineNumber = lineNum + , sfColumnNumber = colNum + , sfContextLine = Nothing + , sfIsInApp = isJsInApp filePath + } parseJsLocation loc = -- Split from the right to handle paths with colons (Windows) @@ -732,18 +750,24 @@ parseJsFrame line extractJsModule path = let baseName = fromMaybe path $ viaNonEmpty last $ T.splitOn "/" path - in Just $ T.toLower $ fromMaybe baseName $ T.stripSuffix ".js" baseName - <|> T.stripSuffix ".ts" baseName - <|> T.stripSuffix ".mjs" baseName - <|> T.stripSuffix ".cjs" baseName + in Just + $ T.toLower + $ fromMaybe baseName + $ T.stripSuffix ".js" baseName + <|> T.stripSuffix ".ts" baseName + <|> T.stripSuffix ".mjs" baseName + <|> T.stripSuffix ".cjs" baseName cleanJsFunction func = -- Remove namespacing: Object.foo.bar -> bar let parts = T.splitOn "." func in fromMaybe func $ viaNonEmpty last parts - isJsInApp path = not $ any (`T.isInfixOf` path) - ["node_modules/", "", "internal/", "node:"] + isJsInApp path = + not + $ any + (`T.isInfixOf` path) + ["node_modules/", "", "internal/", "node:"] -- | Parse Python stack frame @@ -751,25 +775,26 @@ parseJsFrame line parsePythonFrame :: Text -> Maybe StackFrame parsePythonFrame line | "File \"" `T.isPrefixOf` T.strip line = - let content = T.drop 6 $ T.strip line -- Remove 'File "' + let content = T.drop 6 $ T.strip line -- Remove 'File "' (filePath, rest) = T.breakOn "\"" content -- Parse ", line 123, in func_name" - parts = T.splitOn ", " $ T.drop 2 rest -- Skip '",' + parts = T.splitOn ", " $ T.drop 2 rest -- Skip '",' lineNum = case find ("line " `T.isPrefixOf`) parts of - Just p -> readMaybe $ toString $ T.drop 5 p - Nothing -> Nothing + Just p -> readMaybe $ toString $ T.drop 5 p + Nothing -> Nothing funcName = case find ("in " `T.isPrefixOf`) parts of - Just p -> T.drop 3 p - Nothing -> "" - in Just StackFrame - { sfFilePath = filePath - , sfModule = extractPythonModule filePath - , sfFunction = cleanPythonFunction funcName - , sfLineNumber = lineNum - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = isPythonInApp filePath - } + Just p -> T.drop 3 p + Nothing -> "" + in Just + StackFrame + { sfFilePath = filePath + , sfModule = extractPythonModule filePath + , sfFunction = cleanPythonFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isPythonInApp filePath + } | otherwise = Nothing where extractPythonModule path = @@ -779,12 +804,15 @@ parsePythonFrame line cleanPythonFunction func = -- Remove lambda indicators - T.replace "" "lambda" $ - T.replace "" "listcomp" $ - T.replace "" "dictcomp" func + T.replace "" "lambda" + $ T.replace "" "listcomp" + $ T.replace "" "dictcomp" func - isPythonInApp path = not $ any (`T.isInfixOf` path) - ["site-packages/", "dist-packages/", "/lib/python", " c == '(' || c == ')') rest (fileName, lineNum) = parseJavaLocation locationPart (moduleName, funcName) = splitJavaQualified qualifiedMethod - in Just StackFrame - { sfFilePath = fileName - , sfModule = Just moduleName - , sfFunction = cleanJavaFunction funcName - , sfLineNumber = lineNum - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = isJavaInApp qualifiedMethod - } + in Just + StackFrame + { sfFilePath = fileName + , sfModule = Just moduleName + , sfFunction = cleanJavaFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isJavaInApp qualifiedMethod + } | otherwise = Nothing where parseJavaLocation loc = @@ -816,16 +845,19 @@ parseJavaFrame line let parts = T.splitOn "." qualified in if length parts > 1 then case (viaNonEmpty init parts, viaNonEmpty last parts) of - (Just ps, Just l) -> (T.intercalate "." ps, l) - _ -> ("", qualified) + (Just ps, Just l) -> (T.intercalate "." ps, l) + _ -> ("", qualified) else ("", qualified) cleanJavaFunction func = -- Remove generics: method -> method T.takeWhile (/= '<') func - isJavaInApp qualified = not $ any (`T.isPrefixOf` qualified) - ["java.", "javax.", "sun.", "com.sun.", "jdk.", "org.springframework."] + isJavaInApp qualified = + not + $ any + (`T.isPrefixOf` qualified) + ["java.", "javax.", "sun.", "com.sun.", "jdk.", "org.springframework."] -- | Parse PHP stack frame @@ -833,19 +865,20 @@ parseJavaFrame line parsePhpFrame :: Text -> Maybe StackFrame parsePhpFrame line | "#" `T.isPrefixOf` T.strip line = - let content = T.drop 1 $ T.dropWhile (/= ' ') $ T.strip line -- Skip "#N " + let content = T.drop 1 $ T.dropWhile (/= ' ') $ T.strip line -- Skip "#N " (pathPart, funcPart) = T.breakOn ": " content (filePath, lineNum) = parsePhpPath pathPart funcName = T.takeWhile (/= '(') $ T.drop 2 funcPart - in Just StackFrame - { sfFilePath = filePath - , sfModule = extractPhpModule filePath - , sfFunction = cleanPhpFunction funcName - , sfLineNumber = lineNum - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = isPhpInApp filePath - } + in Just + StackFrame + { sfFilePath = filePath + , sfModule = extractPhpModule filePath + , sfFunction = cleanPhpFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isPhpInApp filePath + } | otherwise = Nothing where parsePhpPath path = @@ -859,15 +892,21 @@ parsePhpFrame line cleanPhpFunction func = -- Remove {closure} markers - T.replace "{closure}" "closure" $ - -- Simplify class::method or class->method - let parts = T.splitOn "->" func - in if length parts > 1 then fromMaybe func $ viaNonEmpty last parts else - let parts' = T.splitOn "::" func - in if length parts' > 1 then fromMaybe func $ viaNonEmpty last parts' else func - - isPhpInApp path = not $ any (`T.isInfixOf` path) - ["/vendor/", "/phar://"] + T.replace "{closure}" "closure" + $ + -- Simplify class::method or class->method + let parts = T.splitOn "->" func + in if length parts > 1 + then fromMaybe func $ viaNonEmpty last parts + else + let parts' = T.splitOn "::" func + in if length parts' > 1 then fromMaybe func $ viaNonEmpty last parts' else func + + isPhpInApp path = + not + $ any + (`T.isInfixOf` path) + ["/vendor/", "/phar://"] -- | Parse .NET stack frame @@ -880,23 +919,24 @@ parseDotNetFrame line qualifiedMethod = T.takeWhile (/= '(') methodPart (moduleName, funcName) = splitDotNetQualified qualifiedMethod (filePath, lineNum) = parseDotNetLocation $ T.drop 4 locationPart - in Just StackFrame - { sfFilePath = filePath - , sfModule = Just moduleName - , sfFunction = cleanDotNetFunction funcName - , sfLineNumber = lineNum - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = isDotNetInApp qualifiedMethod - } + in Just + StackFrame + { sfFilePath = filePath + , sfModule = Just moduleName + , sfFunction = cleanDotNetFunction funcName + , sfLineNumber = lineNum + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = isDotNetInApp qualifiedMethod + } | otherwise = Nothing where splitDotNetQualified qualified = let parts = T.splitOn "." qualified in if length parts > 1 then case (viaNonEmpty init parts, viaNonEmpty last parts) of - (Just ps, Just l) -> (T.intercalate "." ps, l) - _ -> ("", qualified) + (Just ps, Just l) -> (T.intercalate "." ps, l) + _ -> ("", qualified) else ("", qualified) parseDotNetLocation loc = @@ -907,8 +947,11 @@ parseDotNetFrame line -- Remove generic arity: Method`1 -> Method T.takeWhile (/= '`') func - isDotNetInApp qualified = not $ any (`T.isPrefixOf` qualified) - ["System.", "Microsoft.", "Newtonsoft."] + isDotNetInApp qualified = + not + $ any + (`T.isPrefixOf` qualified) + ["System.", "Microsoft.", "Newtonsoft."] -- | Generic stack frame parser for unknown formats @@ -917,15 +960,17 @@ parseGenericFrame line = let trimmed = T.strip line in if T.null trimmed || "..." `T.isPrefixOf` trimmed then Nothing - else Just StackFrame - { sfFilePath = trimmed - , sfModule = Nothing - , sfFunction = extractGenericFunction trimmed - , sfLineNumber = extractGenericLineNumber trimmed - , sfColumnNumber = Nothing - , sfContextLine = Nothing - , sfIsInApp = True -- Assume in-app by default - } + else + Just + StackFrame + { sfFilePath = trimmed + , sfModule = Nothing + , sfFunction = extractGenericFunction trimmed + , sfLineNumber = extractGenericLineNumber trimmed + , sfColumnNumber = Nothing + , sfContextLine = Nothing + , sfIsInApp = True -- Assume in-app by default + } where extractGenericFunction txt = -- Try to find function-like patterns @@ -976,9 +1021,10 @@ normalizeStackTrace runtime stackText = normalizeContextLine ctx = let normalized = T.unwords $ T.words ctx in if T.length normalized > 120 - then "" -- Skip overly long context lines (like Sentry does) + then "" -- Skip overly long context lines (like Sentry does) else normalized + -- | Normalize an error message for fingerprinting -- Limits to first 2 non-empty lines and replaces variable content normalizeMessage :: Text -> Text @@ -989,27 +1035,30 @@ normalizeMessage msg = normalized = replaceAllFormats combined in T.strip normalized + -- | Compute the error fingerprint hash using Sentry-style prioritization -- Priority: -- 1. Stack trace (if has meaningful in-app frames) -- 2. Exception type + message -- 3. Message only -computeErrorFingerprint :: Text -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text +computeErrorFingerprint :: Text -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text computeErrorFingerprint projectIdText mService spanName runtime exceptionType message stackTrace = - let -- Normalize components - normalizedStack = normalizeStackTrace runtime stackTrace - normalizedMsg = normalizeMessage message - normalizedType = T.strip exceptionType - - -- Build fingerprint components based on priority - fingerprintComponents = - if hasUsableStackTrace normalizedStack - then - [ projectIdText - , normalizedType - , normalizedStack - ] - else if not (T.null normalizedType) + let + -- Normalize components + normalizedStack = normalizeStackTrace runtime stackTrace + normalizedMsg = normalizeMessage message + normalizedType = T.strip exceptionType + + -- Build fingerprint components based on priority + fingerprintComponents = + if hasUsableStackTrace normalizedStack + then + [ projectIdText + , normalizedType + , normalizedStack + ] + else + if not (T.null normalizedType) then [ projectIdText , fromMaybe "" mService @@ -1024,9 +1073,10 @@ computeErrorFingerprint projectIdText mService spanName runtime exceptionType me , normalizedMsg ] - -- Combine and hash - combined = T.intercalate "|" $ filter (not . T.null) fingerprintComponents - in toXXHash combined + -- Combine and hash + combined = T.intercalate "|" $ filter (not . T.null) fingerprintComponents + in + toXXHash combined where hasUsableStackTrace :: Text -> Bool hasUsableStackTrace normalized = diff --git a/src/Models/Telemetry/Telemetry.hs b/src/Models/Telemetry/Telemetry.hs index 1d1824bbe..da7556aaf 100644 --- a/src/Models/Telemetry/Telemetry.hs +++ b/src/Models/Telemetry/Telemetry.hs @@ -1091,7 +1091,7 @@ extractATError spanObj (AE.Object o) = do , message = msg , rootErrorMessage = msg , stackTrace = stack - , hash = Errors.computeErrorFingerprint spanObj.project_id serviceName spanObj.name (fromMaybe "unknown" tech) typ msg stack + , hash = Errors.computeErrorFingerprint spanObj.project_id serviceName spanObj.name (fromMaybe "unknown" tech) typ msg stack , technology = Nothing , serviceName = serviceName , requestMethod = method diff --git a/src/RequestMessages.hs b/src/RequestMessages.hs index 08915bd3f..3cb8a99db 100644 --- a/src/RequestMessages.hs +++ b/src/RequestMessages.hs @@ -58,7 +58,7 @@ import Relude import Relude.Unsafe as Unsafe (read) import Text.RE.Replace (matched) import Text.RE.TDFA (RE, SearchReplace, ed, re, (*=~/), (?=~)) -import Utils (DBField (), toXXHash, replaceAllFormats) +import Utils (DBField (), replaceAllFormats, toXXHash) -- $setup @@ -359,7 +359,6 @@ commonFormatPatterns = ] - -- valueToFormatStr will take a string and try to find a format which matches that string best. -- At the moment it takes a text and returns a generic mask that represents the format of that text -- diff --git a/src/Utils.hs b/src/Utils.hs index ec0c3998f..f5ef4c4e5 100644 --- a/src/Utils.hs +++ b/src/Utils.hs @@ -88,11 +88,10 @@ import Pkg.THUtils (hashFile) import Relude hiding (notElem, show) import Servant import Text.Printf (printf) +import Text.RE.TDFA (RE, SearchReplace, ed, re, (*=~/), (?=~)) import Text.Regex.TDFA ((=~)) import Text.Show import "base64" Data.ByteString.Base64 qualified as B64 -import Text.RE.TDFA (RE, SearchReplace, ed, re, (*=~/), (?=~)) - -- Added only for satisfying the tests @@ -909,4 +908,3 @@ replaceAllFormats input = restorePlaceholders $ processPatterns input formatPatt , -- Numbers (including HTTP status codes) [ed|[0-9]+///〖×INTEGER×〗|] ] - From 9042cf5ae505dd66fe104bf5fd3dee0c111fdb1b Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 15:45:11 +0000 Subject: [PATCH 39/71] remove proto lens contraint --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index d2c63590b..361bb2cf5 100644 --- a/cabal.project +++ b/cabal.project @@ -109,5 +109,5 @@ source-repository-package constraints: streamly ^>=0.10.0, - proto-lens-setup ==0.4.0.9 + effectful-postgresql +enable-otel allow-newer: all From c1e0db474005bad7addde749f863362f40b4432d Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 20:35:04 +0000 Subject: [PATCH 40/71] bug fixes and cleanups --- src/BackgroundJobs.hs | 119 +-- src/Models/Apis/Issues.hs | 819 +++++------------- src/Models/Apis/Issues/Enhancement.hs | 120 +-- src/Pages/Anomalies.hs | 84 +- src/Pages/Reports.hs | 2 +- .../migrations/0028_rebuild_issues_table.sql | 1 - static/migrations/0032_drop_issue_index.sql | 4 + 7 files changed, 310 insertions(+), 839 deletions(-) create mode 100644 static/migrations/0032_drop_issue_index.sql diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index fe9755d19..c91f52b20 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -90,14 +90,6 @@ data BgJobs = InviteUserToProject Users.UserId Projects.ProjectId Text Text | CreatedProjectSuccessfully Users.UserId Projects.ProjectId Text Text | SendDiscordData Users.UserId Projects.ProjectId Text [Text] Text - | -- NewAnomaly Projects.ProjectId Anomalies.AnomalyTypes Anomalies.AnomalyActions TargetHash - NewAnomaly - { projectId :: Projects.ProjectId - , createdAt :: ZonedTime - , anomalyType :: Text - , anomalyAction :: Text - , targetHashes :: [Text] - } | DailyReports Projects.ProjectId | WeeklyReports Projects.ProjectId | DailyJob @@ -187,7 +179,6 @@ processBackgroundJob authCtx bgJob = case bgJob of GenerateOtelFacetsBatch pids timestamp -> generateOtelFacetsBatch pids timestamp QueryMonitorsTriggered queryMonitorIds -> queryMonitorsTriggered queryMonitorIds authCtx - NewAnomaly{projectId, createdAt, anomalyType, anomalyAction, targetHashes} -> newAnomalyJob projectId createdAt anomalyType anomalyAction (V.fromList targetHashes) InviteUserToProject userId projectId reciever projectTitle' -> do userM <- Users.userById userId whenJust userM \user -> do @@ -1097,7 +1088,7 @@ sendReportForProject pid rType = do slowQueriesCount = V.length slowDbQueries slowQueriesList = if slowQueriesCount == 0 then [AE.object ["message" AE..= "No slow queries detected."]] else (\(x, y, z) -> AE.object ["statement" AE..= x, "latency" AE..= z, "total" AE..= y]) <$> slowDbQueries totalAnomalies = length anomalies' - (errTotal, apiTotal, qTotal) = L.foldl (\(e, a, m) (_, _, _, _, t) -> (e + if t == Issues.RuntimeException then 1 else 0, a + if t == Issues.APIChange then 1 else 0, m + if t == Issues.QueryAlert then 1 else 0)) (0, 0, 0) anomalies' + (errTotal, apiTotal, qTotal) = L.foldl (\(e, a, m) (_, _, _, _, t) -> (e + if t == Issues.RuntimeException then 1 else 0, a + if t == Issues.NewEndpoint then 1 else 0, m + if t == Issues.QueryAlert then 1 else 0)) (0, 0, 0) anomalies' runtimeErrorsBarPercentage = if totalAnomalies == 0 then 0 else (fromIntegral errTotal / fromIntegral totalAnomalies) * 99 apiChangesBarPercentage = if totalAnomalies == 0 then 0 else (fromIntegral apiTotal / fromIntegral totalAnomalies) * 99 alertIssuesBarPercentage = if totalAnomalies == 0 then 0 else (fromIntegral qTotal / fromIntegral totalAnomalies) * 99 @@ -1130,114 +1121,6 @@ sendReportForProject pid rType = do emailQueryMonitorAlert :: Monitors.QueryMonitorEvaled -> CI.CI Text -> Maybe Users.User -> ATBackgroundCtx () emailQueryMonitorAlert monitorE@Monitors.QueryMonitorEvaled{alertConfig} email userM = whenJust userM (const pass) - --- | Process new anomalies detected by database triggers --- This job is created by the apis.new_anomaly_proc() stored procedure --- when new entities (endpoints, shapes, fields, formats, errors) are inserted. --- --- Anomaly Processing Strategy: --- 1. API Changes (endpoint/shape/format) -> Group by endpoint into single issue --- 2. Runtime Exceptions -> Create individual issues for each error --- 3. All issues are queued for LLM enhancement if configured --- 4. Notifications are sent based on project settings -newAnomalyJob :: Projects.ProjectId -> ZonedTime -> Text -> Text -> V.Vector Text -> ATBackgroundCtx () -newAnomalyJob pid createdAt anomalyTypesT anomalyActionsT targetHashes = do - authCtx <- ask @Config.AuthContext - let anomalyType = fromMaybe (error "parseAnomalyTypes returned Nothing") $ Anomalies.parseAnomalyTypes anomalyTypesT - Log.logInfo "Processing new anomalies" () - case anomalyType of - -- API Change anomalies (endpoint, shape, format) - group into single issue per endpoint - -- This prevents notification spam when multiple related changes occur - Anomalies.ATEndpoint -> processAPIChangeAnomalies pid targetHashes - Anomalies.ATShape -> processAPIChangeAnomalies pid targetHashes - Anomalies.ATFormat -> processAPIChangeAnomalies pid targetHashes - -- Runtime exceptions get individual issues - -- Each unique error pattern gets its own issue for tracking - _ -> pass - - --- | Process API change anomalies (endpoint, shape, format) into unified APIChange issues --- This function groups related anomalies by endpoint to prevent notification spam. --- For example, if a new endpoint is added with 5 fields and 2 formats, instead of --- creating 8 separate issues, we create 1 issue that encompasses all changes. --- --- Grouping Strategy: --- 1. All anomalies are grouped by their endpoint hash --- 2. If an open issue exists for that endpoint, update it with new anomalies --- 3. Otherwise, create a new issue containing all anomalies for that endpoint -processAPIChangeAnomalies :: Projects.ProjectId -> V.Vector Text -> ATBackgroundCtx () -processAPIChangeAnomalies pid targetHashes = do - authCtx <- ask @Config.AuthContext - - -- Get all anomalies - anomaliesList <- Anomalies.getAnomaliesVM pid targetHashes - let anomaliesVM = V.fromList anomaliesList - - -- Group by endpoint hash to consolidate related changes - let anomaliesByEndpoint = groupAnomaliesByEndpointHash anomaliesVM - - -- Process each endpoint group - forM_ anomaliesByEndpoint \(endpointHash, anomalies) -> do - -- Check for existing open issue to avoid duplicates - existingIssueM <- Issues.findOpenIssueForEndpoint pid endpointHash - - case existingIssueM of - Just existingIssue -> do - -- Update existing issue with new anomaly data - let apiChangeData = - Issues.APIChangeData - { endpointMethod = fromMaybe "UNKNOWN" $ viaNonEmpty head $ V.toList $ V.mapMaybe (.endpointMethod) anomalies - , endpointPath = fromMaybe "/" $ viaNonEmpty head $ V.toList $ V.mapMaybe (.endpointUrlPath) anomalies - , endpointHost = "Unknown" - , anomalyHashes = V.map (.targetHash) anomalies - , shapeChanges = V.empty -- Simplified for now - , formatChanges = V.empty -- Simplified for now - , newFields = V.concatMap (.shapeNewUniqueFields) anomalies - , deletedFields = V.concatMap (.shapeDeletedFields) anomalies - , modifiedFields = V.concatMap (.shapeUpdatedFieldFormats) anomalies - } - Issues.updateIssueWithNewAnomaly existingIssue.id apiChangeData - Nothing -> do - -- Create new issue - issue <- liftIO $ Issues.createAPIChangeIssue pid endpointHash anomalies - Issues.insertIssue issue - - -- Queue enhancement job - _ <- liftIO $ withResource authCtx.jobsPool \conn -> - createJob conn "background_jobs" $ BackgroundJobs.EnhanceIssuesWithLLM pid (V.singleton issue.id) - pass - - --- -- Send notifications --- projectM <- Projects.projectById pid --- whenJust projectM \project -> do --- users <- Projects.usersByProjectId pid --- let endpointInfo = --- map --- ( \(_, anoms) -> --- let firstAnom = V.head anoms --- in fromMaybe "UNKNOWN" firstAnom.endpointMethod <> " " <> fromMaybe "/" firstAnom.endpointUrlPath --- ) --- anomaliesByEndpoint --- -- Only send notifications if we have valid endpoint info --- Relude.when (project.endpointAlerts && not (null endpointInfo)) do --- let alert = EndpointAlert{project = project.title, endpoints = V.fromList endpointInfo, endpointHash = fromMaybe "" $ viaNonEmpty head $ V.toList targetHashes} - --- forM_ project.notificationsChannel \case --- Projects.NSlack -> sendSlackAlert alert pid project.title Nothing --- Projects.NDiscord -> sendDiscordAlert alert pid project.title Nothing --- Projects.NPhone -> sendWhatsAppAlert alert pid project.title project.whatsappNumbers --- Projects.NEmail -> do --- forM_ users \u -> do --- let templateVars = --- AE.object --- [ "user_name" AE..= u.firstName --- , "project_name" AE..= project.title --- , "anomaly_url" AE..= (authCtx.env.hostUrl <> "p/" <> pid.toText <> "/issues") --- , "endpoint_name" AE..= (method <> " " <> urlPath) --- ] --- sendPostmarkEmail (CI.original u.email) (Just ("anomaly-endpoint-2", templateVars)) Nothing - -- | Group anomalies by endpoint hash groupAnomaliesByEndpointHash :: V.Vector Anomalies.AnomalyVM -> [(Text, V.Vector Anomalies.AnomalyVM)] groupAnomaliesByEndpointHash anomalies = diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 108c9e875..1c5d3e5d8 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -47,13 +47,9 @@ module Models.Apis.Issues ( updateIssueData, -- * Conversion Functions - createAPIChangeIssue, - createRuntimeExceptionIssue, createQueryAlertIssue, createNewErrorIssue, createErrorSpikeIssue, - createNewLogPatternIssue, - createLogPatternSpikeIssue, createLogPatternIssue, createErrorEscalatingIssue, createErrorRegressedIssue, @@ -129,8 +125,7 @@ issueIdText = idToText -- | Issue types data IssueType - = APIChange - | NewEndpoint + = NewEndpoint | NewShape | FieldChange | RuntimeException @@ -147,12 +142,7 @@ data IssueType deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.ConstructorTagModifier '[DAE.CamelToSnake]] IssueType -instance Default IssueType where - def = APIChange - - issueTypeToText :: IssueType -> Text -issueTypeToText APIChange = "api_change" issueTypeToText NewEndpoint = "new_endpoint" issueTypeToText NewShape = "new_shape" issueTypeToText FieldChange = "field_change" @@ -168,7 +158,6 @@ issueTypeToText EndpointVolumeRateChange = "endpoint_volume_rate_change" parseIssueType :: Text -> Maybe IssueType -parseIssueType "api_change" = Just APIChange parseIssueType "new_endpoint" = Just NewEndpoint parseIssueType "new_shape" = Just NewShape parseIssueType "field_change" = Just FieldChange @@ -451,13 +440,15 @@ data Issue = Issue , updatedAt :: ZonedTime , projectId :: Projects.ProjectId , issueType :: IssueType - , target_hash :: Text + , sourceType :: Text + , targetHash :: Text , endpointHash :: Text , acknowledgedAt :: Maybe ZonedTime , acknowledgedBy :: Maybe Users.UserId , archivedAt :: Maybe ZonedTime , title :: Text - , service :: Text + , service :: Maybe Text + , environment :: Maybe Text , critical :: Bool , severity :: Text -- "critical", "warning", "info" , recommendedAction :: Text @@ -480,14 +471,16 @@ instance Default Issue where , createdAt = error "createdAt must be set" , updatedAt = error "updatedAt must be set" , projectId = def - , issueType = def - , target_hash = "" + , issueType = error "issueType must be set" + , sourceType = "" + , targetHash = "" , endpointHash = "" , acknowledgedAt = Nothing , acknowledgedBy = Nothing , archivedAt = Nothing , title = "" - , service = "" + , service = Nothing + , environment = Nothing , critical = False , severity = "info" , recommendedAction = "" @@ -512,7 +505,7 @@ data IssueL = IssueL , acknowledgedBy :: Maybe Users.UserId , archivedAt :: Maybe ZonedTime , title :: Text - , service :: Text + , service :: Maybe Text , critical :: Bool , severity :: Text -- Computed in query , affectedRequests :: Int -- Will be converted from affected_payloads in query @@ -692,106 +685,72 @@ acknowledgeIssue issueId userId = void $ PG.execute q (userId, issueId) |] --- | Create API Change issue from anomalies -createAPIChangeIssue :: Projects.ProjectId -> Text -> V.Vector Anomalies.AnomalyVM -> IO Issue -createAPIChangeIssue projectId endpointHash anomalies = do +-- | Derive source type from issue type +issueTypeToSourceType :: IssueType -> Text +issueTypeToSourceType NewEndpoint = "endpoint" +issueTypeToSourceType NewShape = "shape" +issueTypeToSourceType FieldChange = "shape" +issueTypeToSourceType RuntimeException = "error" +issueTypeToSourceType QueryAlert = "query" +issueTypeToSourceType LogPattern = "log_pattern" +issueTypeToSourceType ErrorEscalating = "error" +issueTypeToSourceType ErrorRegressed = "error" +issueTypeToSourceType LogPatternRateChange = "log_pattern" +issueTypeToSourceType EndpointLatencyDegradation = "endpoint" +issueTypeToSourceType EndpointErrorRateSpike = "endpoint" +issueTypeToSourceType EndpointVolumeRateChange = "endpoint" + + +-- | Helper to create an issue with common defaults +mkIssue + :: AE.ToJSON a + => Projects.ProjectId + -> IssueType + -> Text -- ^ targetHash + -> Text -- ^ endpointHash + -> Maybe Text -- ^ service + -> Bool -- ^ critical + -> Text -- ^ severity + -> Text -- ^ title + -> Text -- ^ recommendedAction + -> Text -- ^ migrationComplexity + -> a -- ^ issueData + -> IO Issue +mkIssue projectId issueType targetHash endpointHash service critical severity title recommendedAction migrationComplexity issueData = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - - let firstAnomaly = V.head anomalies - apiChangeData = - APIChangeData - { endpointMethod = fromMaybe "UNKNOWN" firstAnomaly.endpointMethod - , endpointPath = fromMaybe "/" firstAnomaly.endpointUrlPath - , endpointHost = "Unknown" - , anomalyHashes = V.map (.targetHash) anomalies - , shapeChanges = V.empty -- Simplified for now - , formatChanges = V.empty -- Simplified for now - , newFields = V.concatMap (.shapeNewUniqueFields) anomalies - , deletedFields = V.concatMap (.shapeDeletedFields) anomalies - , modifiedFields = V.concatMap (.shapeUpdatedFieldFormats) anomalies - } - - breakingChanges = V.length apiChangeData.deletedFields + V.length apiChangeData.modifiedFields - isCritical = breakingChanges > 0 - + zonedNow <- utcToLocalZonedTime now pure Issue { id = issueId - , createdAt = firstAnomaly.createdAt - , updatedAt = firstAnomaly.updatedAt + , createdAt = zonedNow + , updatedAt = zonedNow , projectId = projectId - , issueType = APIChange - , target_hash = endpointHash + , issueType = issueType + , sourceType = issueTypeToSourceType issueType + , targetHash = targetHash , endpointHash = endpointHash , acknowledgedAt = Nothing , acknowledgedBy = Nothing , archivedAt = Nothing - , title = "API structure has changed" - , service = Anomalies.detectService Nothing firstAnomaly.endpointUrlPath - , critical = isCritical - , severity = if isCritical then "critical" else "warning" - , recommendedAction = "Review the API changes and update your integration accordingly." - , migrationComplexity = if breakingChanges > 5 then "high" else if breakingChanges > 0 then "medium" else "low" - , issueData = Aeson $ AE.toJSON apiChangeData - , requestPayloads = Aeson [] -- Will be populated during enhancement - , responsePayloads = Aeson [] -- Will be populated during enhancement - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } - - --- | Create Runtime Exception issue -createRuntimeExceptionIssue :: Projects.ProjectId -> RequestDumps.ATError -> IO Issue -createRuntimeExceptionIssue projectId atError = do - issueId <- UUIDId <$> UUID4.nextRandom - errorZonedTime <- utcToLocalZonedTime atError.when - - let exceptionData = - RuntimeExceptionData - { errorType = atError.errorType - , errorMessage = atError.message - , stackTrace = atError.stackTrace - , requestPath = atError.requestPath - , requestMethod = atError.requestMethod - , occurrenceCount = 1 - , firstSeen = atError.when - , lastSeen = atError.when - } - - pure - Issue - { id = issueId - , createdAt = errorZonedTime - , updatedAt = errorZonedTime - , projectId = projectId - , issueType = RuntimeException - , target_hash = fromMaybe "" atError.hash - , endpointHash = "" - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "New Exception: " <> atError.errorType <> " - " <> T.take 80 atError.message - , service = fromMaybe "unknown-service" atError.serviceName - , critical = True - , severity = "critical" - , recommendedAction = "Investigate the error and implement a fix." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON exceptionData + , title = title + , service = service + , environment = Nothing + , critical = critical + , severity = severity + , recommendedAction = recommendedAction + , migrationComplexity = migrationComplexity + , issueData = Aeson $ AE.toJSON issueData , requestPayloads = Aeson [] , responsePayloads = Aeson [] , llmEnhancedAt = Nothing , llmEnhancementVersion = Nothing } - -- | Create Query Alert issue createQueryAlertIssue :: Projects.ProjectId -> Text -> Text -> Text -> Double -> Double -> Text -> IO Issue createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thresholdType = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let alertData = QueryAlertData { queryId = queryId @@ -802,40 +761,24 @@ createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thr , thresholdType = thresholdType , triggeredAt = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = QueryAlert - , target_hash = "" - , endpointHash = "" - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = queryName <> " threshold " <> thresholdType <> " " <> show threshold - , service = "Monitoring" - , critical = True - , severity = "warning" - , recommendedAction = "Review the query results and take appropriate action." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON alertData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + QueryAlert + "" + "" + (Just "Monitoring") + True + "warning" + (queryName <> " threshold " <> thresholdType <> " " <> show threshold) + "Review the query results and take appropriate action." + "n/a" + alertData -- | Create issue for a new error createNewErrorIssue :: Projects.ProjectId -> Errors.Error -> IO Issue createNewErrorIssue projectId err = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let exceptionData = RuntimeExceptionData { errorType = err.exceptionType @@ -847,40 +790,24 @@ createNewErrorIssue projectId err = do , firstSeen = now , lastSeen = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = RuntimeException - , target_hash = err.hash - , endpointHash = err.hash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "New Error: " <> err.exceptionType <> " - " <> T.take 80 err.message - , service = fromMaybe "unknown-service" err.service - , critical = True - , severity = "critical" - , recommendedAction = "Investigate the new error and implement a fix." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON exceptionData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + RuntimeException + err.hash + err.hash + err.service + True + "critical" + ("New Error: " <> err.exceptionType <> " - " <> T.take 80 err.message) + "Investigate the new error and implement a fix." + "n/a" + exceptionData -- | Create issue for an error spike createErrorSpikeIssue :: Projects.ProjectId -> Errors.Error -> Double -> Double -> Double -> IO Issue createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let zScore = if baselineStddev > 0 then (currentRate - baselineMean) / baselineStddev else 0 increasePercent = if baselineMean > 0 then ((currentRate / baselineMean) - 1) * 100 else 0 exceptionData = @@ -894,141 +821,24 @@ createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do , firstSeen = now , lastSeen = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = RuntimeException - , target_hash = err.hash - , endpointHash = err.hash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Error Spike: " <> err.exceptionType <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)" - , service = fromMaybe "unknown-service" err.service - , critical = True - , severity = "critical" - , recommendedAction = "Error rate has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent deployments or changes." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON exceptionData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + RuntimeException + err.hash + err.hash + err.service + True + "critical" + ("Error Spike: " <> err.exceptionType <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)") + ("Error rate has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent deployments or changes.") + "n/a" + exceptionData -- | Create an issue for a new log pattern -createNewLogPatternIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> IO Issue -createNewLogPatternIssue projectId lp = do - issueId <- UUIDId <$> UUID4.nextRandom - now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - - -- Use RuntimeExceptionData for log patterns (similar to errors) - let exceptionData = - RuntimeExceptionData - { errorType = "LogPattern" - , errorMessage = fromMaybe lp.logPattern lp.sampleMessage - , stackTrace = "" - , requestPath = Nothing - , requestMethod = Nothing - , occurrenceCount = fromIntegral lp.occurrenceCount - , firstSeen = now - , lastSeen = now - } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = RuntimeException - , target_hash = lp.patternHash - , endpointHash = lp.patternHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "New Log Pattern: " <> T.take 100 lp.logPattern - , service = fromMaybe "unknown-service" lp.serviceName - , critical = False - , severity = case lp.logLevel of - Just "error" -> "high" - Just "warning" -> "medium" - _ -> "low" - , recommendedAction = "A new log pattern has been detected. Review to ensure it's expected behavior. Pattern first seen at: " <> T.pack (show lp.firstSeenAt) - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON exceptionData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } - - --- | Create an issue for a log pattern volume spike -createLogPatternSpikeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> IO Issue -createLogPatternSpikeIssue projectId lp currentRate baselineMean baselineStddev = do - issueId <- UUIDId <$> UUID4.nextRandom - now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - - let zScore = if baselineStddev > 0 then (currentRate - baselineMean) / baselineStddev else 0 - increasePercent = if baselineMean > 0 then ((currentRate / baselineMean) - 1) * 100 else 0 - exceptionData = - RuntimeExceptionData - { errorType = "LogPatternSpike" - , errorMessage = fromMaybe lp.logPattern lp.sampleMessage - , stackTrace = "" - , requestPath = Nothing - , requestMethod = Nothing - , occurrenceCount = round currentRate - , firstSeen = now - , lastSeen = now - } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = RuntimeException - , target_hash = lp.patternHash - , endpointHash = lp.patternHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Log Pattern Spike: " <> T.take 60 lp.logPattern <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)" - , service = fromMaybe "unknown-service" lp.serviceName - , critical = case lp.logLevel of - Just "error" -> True - _ -> False - , severity = case lp.logLevel of - Just "error" -> "critical" - Just "warning" -> "high" - _ -> "medium" - , recommendedAction = "Log pattern volume has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent changes." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON exceptionData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } - - --- | Create an issue for a new log pattern (using LogPatternData) createLogPatternIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> IO Issue createLogPatternIssue projectId lp = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let logPatternData = LogPatternData { patternHash = lp.patternHash @@ -1039,43 +849,28 @@ createLogPatternIssue projectId lp = do , firstSeenAt = now , occurrenceCount = fromIntegral lp.occurrenceCount } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = LogPattern - , target_hash = lp.patternHash - , endpointHash = lp.patternHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "New Log Pattern: " <> T.take 100 lp.logPattern - , service = fromMaybe "unknown-service" lp.serviceName - , critical = lp.logLevel == Just "error" - , severity = case lp.logLevel of - Just "error" -> "critical" - Just "warning" -> "warning" - _ -> "info" - , recommendedAction = "A new log pattern has been detected. Review to ensure it's expected behavior." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON logPatternData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + severity = case lp.logLevel of + Just "error" -> "critical" + Just "warning" -> "warning" + _ -> "info" + mkIssue + projectId + LogPattern + lp.patternHash + lp.patternHash + lp.serviceName + (lp.logLevel == Just "error") + severity + ("New Log Pattern: " <> T.take 100 lp.logPattern) + "A new log pattern has been detected. Review to ensure it's expected behavior." + "n/a" + logPatternData -- | Create an issue for an escalating error createErrorEscalatingIssue :: Projects.ProjectId -> Errors.Error -> Text -> Double -> Text -> IO Issue createErrorEscalatingIssue projectId err prevState escalationRate escalationWindow = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let escalatingData = ErrorEscalatingData { errorHash = err.hash @@ -1091,40 +886,24 @@ createErrorEscalatingIssue projectId err prevState escalationRate escalationWind , firstSeenAt = now , lastSeenAt = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = ErrorEscalating - , target_hash = err.hash - , endpointHash = err.hash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Error Escalating: " <> err.exceptionType <> " (" <> T.pack (show (round (escalationRate * 100) :: Int)) <> "% increase)" - , service = fromMaybe "unknown-service" err.service - , critical = True - , severity = "critical" - , recommendedAction = "Error rate is escalating (" <> T.pack (show escalationRate) <> "x over " <> escalationWindow <> "). Investigate immediately." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON escalatingData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + ErrorEscalating + err.hash + err.hash + err.service + True + "critical" + ("Error Escalating: " <> err.exceptionType <> " (" <> T.pack (show (round (escalationRate * 100) :: Int)) <> "% increase)") + ("Error rate is escalating (" <> T.pack (show escalationRate) <> "x over " <> escalationWindow <> "). Investigate immediately.") + "n/a" + escalatingData -- | Create an issue for a regressed error createErrorRegressedIssue :: Projects.ProjectId -> Errors.Error -> UTCTime -> Int -> Int -> IO Issue createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let regressedData = ErrorRegressedData { errorHash = err.hash @@ -1137,40 +916,24 @@ createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences , previousOccurrences = prevOccurrences , newOccurrences = 1 } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = ErrorRegressed - , target_hash = err.hash - , endpointHash = err.hash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Error Regressed: " <> err.exceptionType <> " (after " <> T.pack (show quietMins) <> " min quiet)" - , service = fromMaybe "unknown-service" err.service - , critical = True - , severity = "critical" - , recommendedAction = "Previously resolved error has returned after " <> T.pack (show quietMins) <> " minutes. The original fix may be incomplete." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON regressedData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + ErrorRegressed + err.hash + err.hash + err.service + True + "critical" + ("Error Regressed: " <> err.exceptionType <> " (after " <> T.pack (show quietMins) <> " min quiet)") + ("Previously resolved error has returned after " <> T.pack (show quietMins) <> " minutes. The original fix may be incomplete.") + "n/a" + regressedData -- | Create an issue for a log pattern rate change createLogPatternRateChangeIssue :: Projects.ProjectId -> LogPatterns.LogPattern -> Double -> Double -> Double -> Text -> IO Issue createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineStddev direction = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let zScoreVal = if baselineStddev > 0 then abs (currentRate - baselineMean) / baselineStddev else 0 changePercentVal = if baselineMean > 0 then abs ((currentRate / baselineMean) - 1) * 100 else 0 rateChangeData = @@ -1188,46 +951,31 @@ createLogPatternRateChangeIssue projectId lp currentRate baselineMean baselineSt , changeDirection = direction , detectedAt = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = LogPatternRateChange - , target_hash = lp.patternHash - , endpointHash = lp.patternHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Log Pattern " <> T.toTitle direction <> ": " <> T.take 60 lp.logPattern <> " (" <> T.pack (show (round changePercentVal :: Int)) <> "%)" - , service = fromMaybe "unknown-service" lp.serviceName - , critical = direction == "spike" && lp.logLevel == Just "error" - , severity = case (direction, lp.logLevel) of - ("spike", Just "error") -> "critical" - ("spike", _) -> "warning" - ("drop", _) -> "info" - _ -> "info" - , recommendedAction = "Log pattern volume " <> direction <> " detected. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr (" <> T.pack (show (round zScoreVal :: Int)) <> " std devs)." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON rateChangeData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + severity = case (direction, lp.logLevel) of + ("spike", Just "error") -> "critical" + ("spike", _) -> "warning" + ("drop", _) -> "info" + _ -> "info" + mkIssue + projectId + LogPatternRateChange + lp.patternHash + lp.patternHash + lp.serviceName + (direction == "spike" && lp.logLevel == Just "error") + severity + ("Log Pattern " <> T.toTitle direction <> ": " <> T.take 60 lp.logPattern <> " (" <> T.pack (show (round changePercentVal :: Int)) <> "%)") + ("Log pattern volume " <> direction <> " detected. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr (" <> T.pack (show (round zScoreVal :: Int)) <> " std devs).") + "n/a" + rateChangeData -- | Create an issue for endpoint latency degradation createEndpointLatencyDegradationIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> V.Vector Text -> IO Issue createEndpointLatencyDegradationIssue projectId epHash method path serviceName currentLatency baselineLatency baselineStddev percentile traceIds = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - - let zScoreVal = if baselineStddev > 0 then (currentLatency - baselineLatency) / baselineStddev else 0 - degradationPct = if baselineLatency > 0 then ((currentLatency / baselineLatency) - 1) * 100 else 0 + let degradationPct = if baselineLatency > 0 then ((currentLatency / baselineLatency) - 1) * 100 else 0 + zScoreVal = if baselineStddev > 0 then (currentLatency - baselineLatency) / baselineStddev else 0 latencyData = EndpointLatencyDegradationData { endpointHash = epHash @@ -1243,40 +991,25 @@ createEndpointLatencyDegradationIssue projectId epHash method path serviceName c , sampleTraceIds = traceIds , detectedAt = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = EndpointLatencyDegradation - , target_hash = epHash - , endpointHash = epHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Latency Degradation: " <> method <> " " <> path <> " (" <> percentile <> " +" <> T.pack (show (round degradationPct :: Int)) <> "%)" - , service = fromMaybe "unknown-service" serviceName - , critical = degradationPct > 100 - , severity = if degradationPct > 100 then "critical" else if degradationPct > 50 then "warning" else "info" - , recommendedAction = "Endpoint " <> percentile <> " latency increased from " <> T.pack (show (round baselineLatency :: Int)) <> "ms to " <> T.pack (show (round currentLatency :: Int)) <> "ms. Check recent deployments and dependencies." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON latencyData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + severity = if degradationPct > 100 then "critical" else if degradationPct > 50 then "warning" else "info" + mkIssue + projectId + EndpointLatencyDegradation + epHash + epHash + serviceName + (degradationPct > 100) + severity + ("Latency Degradation: " <> method <> " " <> path <> " (" <> percentile <> " +" <> T.pack (show (round degradationPct :: Int)) <> "%)") + ("Endpoint " <> percentile <> " latency increased from " <> T.pack (show (round baselineLatency :: Int)) <> "ms to " <> T.pack (show (round currentLatency :: Int)) <> "ms. Check recent deployments and dependencies.") + "n/a" + latencyData -- | Create an issue for endpoint error rate spike createEndpointErrorRateSpikeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Int -> Int -> V.Vector Text -> IO Issue createEndpointErrorRateSpikeIssue projectId epHash method path serviceName currentRate baselineRate baselineStddev errorCount totalReqs topErrors = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let zScoreVal = if baselineStddev > 0 then (currentRate - baselineRate) / baselineStddev else 0 spikePct = if baselineRate > 0 then ((currentRate / baselineRate) - 1) * 100 else currentRate * 100 errorRateData = @@ -1295,39 +1028,25 @@ createEndpointErrorRateSpikeIssue projectId epHash method path serviceName curre , topErrorTypes = topErrors , detectedAt = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = EndpointErrorRateSpike - , target_hash = epHash - , endpointHash = epHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Error Rate Spike: " <> method <> " " <> path <> " (" <> T.pack (show (round (currentRate * 100) :: Int)) <> "% errors)" - , service = fromMaybe "unknown-service" serviceName - , critical = currentRate > 0.1 - , severity = if currentRate > 0.1 then "critical" else if currentRate > 0.05 then "warning" else "info" - , recommendedAction = "Error rate spiked from " <> T.pack (show (round (baselineRate * 100) :: Int)) <> "% to " <> T.pack (show (round (currentRate * 100) :: Int)) <> "% (" <> T.pack (show errorCount) <> "/" <> T.pack (show totalReqs) <> " requests failed)." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON errorRateData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } - - + severity = if currentRate > 0.1 then "critical" else if currentRate > 0.05 then "warning" else "info" + mkIssue + projectId + EndpointErrorRateSpike + epHash + epHash + serviceName + (currentRate > 0.1) + severity + ("Error Rate Spike: " <> method <> " " <> path <> " (" <> T.pack (show (round (currentRate * 100) :: Int)) <> "% errors)") + ("Error rate spiked from " <> T.pack (show (round (baselineRate * 100) :: Int)) <> "% to " <> T.pack (show (round (currentRate * 100) :: Int)) <> "% (" <> T.pack (show errorCount) <> "/" <> T.pack (show totalReqs) <> " requests failed).") + "n/a" + errorRateData + + +-- | Create an issue for endpoint volume rate change createEndpointVolumeRateChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Maybe Text -> Double -> Double -> Double -> Text -> IO Issue createEndpointVolumeRateChangeIssue projectId epHash method path serviceName currentRate baselineRate baselineStddev direction = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let zScoreVal = if baselineStddev > 0 then abs (currentRate - baselineRate) / baselineStddev else 0 changePct = if baselineRate > 0 then abs ((currentRate / baselineRate) - 1) * 100 else 0 volumeData = @@ -1344,43 +1063,29 @@ createEndpointVolumeRateChangeIssue projectId epHash method path serviceName cur , changeDirection = direction , detectedAt = now } - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = EndpointVolumeRateChange - , target_hash = epHash - , endpointHash = epHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "Traffic " <> T.toTitle direction <> ": " <> method <> " " <> path <> " (" <> T.pack (show (round changePct :: Int)) <> "%)" - , service = fromMaybe "unknown-service" serviceName - , critical = direction == "drop" && changePct > 80 - , severity = case (direction, changePct) of - ("drop", pct) | pct > 80 -> "critical" - ("drop", pct) | pct > 50 -> "warning" - ("spike", pct) | pct > 200 -> "warning" - _ -> "info" - , recommendedAction = "Endpoint traffic " <> direction <> " detected. Current: " <> T.pack (show (round currentRate :: Int)) <> " req/hr, Baseline: " <> T.pack (show (round baselineRate :: Int)) <> " req/hr." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON volumeData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + severity = case (direction, changePct) of + ("drop", pct) | pct > 80 -> "critical" + ("drop", pct) | pct > 50 -> "warning" + ("spike", pct) | pct > 200 -> "warning" + _ -> "info" + mkIssue + projectId + EndpointVolumeRateChange + epHash + epHash + serviceName + (direction == "drop" && changePct > 80) + severity + ("Traffic " <> T.toTitle direction <> ": " <> method <> " " <> path <> " (" <> T.pack (show (round changePct :: Int)) <> "%)") + ("Endpoint traffic " <> direction <> " detected. Current: " <> T.pack (show (round currentRate :: Int)) <> " req/hr, Baseline: " <> T.pack (show (round baselineRate :: Int)) <> " req/hr.") + "n/a" + volumeData -- | Create an issue for a new endpoint createNewEndpointIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> IO Issue createNewEndpointIssue projectId epHash method path host = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now let endpointData = NewEndpointData { endpointHash = epHash @@ -1390,39 +1095,24 @@ createNewEndpointIssue projectId epHash method path host = do , firstSeenAt = now , initialShapes = V.empty } - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = NewEndpoint - , target_hash = epHash - , endpointHash = epHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "New Endpoint detected" - , service = host - , critical = False - , severity = "info" - , recommendedAction = "A new API endpoint has been detected. Review to ensure it matches your API specification." - , migrationComplexity = "n/a" - , issueData = Aeson $ AE.toJSON endpointData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + NewEndpoint + epHash + epHash + (Just host) + False + "info" + "New Endpoint detected" + "A new API endpoint has been detected. Review to ensure it matches your API specification." + "n/a" + endpointData -- | Create an issue for a new shape createNewShapeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Int -> AE.Value -> AE.Value -> V.Vector Text -> V.Vector Text -> V.Vector Text -> V.Vector Text -> IO Issue createNewShapeIssue projectId shHash epHash method path statusCode reqPayload respPayload newFlds deletedFlds modifiedFlds fldHashes = do - issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let shapeData = NewShapeData { shapeHash = shHash @@ -1439,46 +1129,31 @@ createNewShapeIssue projectId shHash epHash method path statusCode reqPayload re , firstSeenAt = now , newShapesAfterIssue = V.empty } - hasBreakingChanges = not (V.null deletedFlds) || not (V.null modifiedFlds) changeCount = V.length newFlds + V.length deletedFlds + V.length modifiedFlds - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = NewShape - , target_hash = shHash - , endpointHash = epHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = "New Shape: " <> method <> " " <> path <> " (" <> T.pack (show statusCode) <> ") - " <> T.pack (show changeCount) <> " field changes" - , service = "" - , critical = hasBreakingChanges - , severity = if hasBreakingChanges then "critical" else "warning" - , recommendedAction = - if hasBreakingChanges - then "Breaking API changes detected: " <> T.pack (show (V.length deletedFlds)) <> " deleted, " <> T.pack (show (V.length modifiedFlds)) <> " modified fields. Update clients immediately." - else "New API shape detected with " <> T.pack (show (V.length newFlds)) <> " new fields. Review for compatibility." - , migrationComplexity = if V.length deletedFlds > 5 then "high" else if hasBreakingChanges then "medium" else "low" - , issueData = Aeson $ AE.toJSON shapeData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + severity = if hasBreakingChanges then "critical" else "warning" + complexity = if V.length deletedFlds > 5 then "high" else if hasBreakingChanges then "medium" else "low" + action = + if hasBreakingChanges + then "Breaking API changes detected: " <> T.pack (show (V.length deletedFlds)) <> " deleted, " <> T.pack (show (V.length modifiedFlds)) <> " modified fields. Update clients immediately." + else "New API shape detected with " <> T.pack (show (V.length newFlds)) <> " new fields. Review for compatibility." + mkIssue + projectId + NewShape + shHash + epHash + Nothing + hasBreakingChanges + severity + ("New Shape: " <> method <> " " <> path <> " (" <> T.pack (show statusCode) <> ") - " <> T.pack (show changeCount) <> " field changes") + action + complexity + shapeData -- | Create an issue for a field change createFieldChangeIssue :: Projects.ProjectId -> Text -> Text -> Text -> Text -> Text -> Text -> Maybe Text -> Text -> Text -> IO Issue createFieldChangeIssue projectId fldHash epHash method path keyPath category prevType newType changeType = do - issueId <- UUIDId <$> UUID4.nextRandom - now <- getCurrentTime - zonedNow <- utcToLocalZonedTime now - let fieldData = FieldChangeData { fieldHash = fldHash @@ -1491,7 +1166,6 @@ createFieldChangeIssue projectId fldHash epHash method path keyPath category pre , newType = newType , changeType = changeType } - isBreaking = changeType `elem` ["removed", "type_changed"] titlePrefix = case changeType of "added" -> "New Field" @@ -1499,31 +1173,18 @@ createFieldChangeIssue projectId fldHash epHash method path keyPath category pre "type_changed" -> "Field Type Changed" "format_changed" -> "Field Format Changed" _ -> "Field Changed" - - pure - Issue - { id = issueId - , createdAt = zonedNow - , updatedAt = zonedNow - , projectId = projectId - , issueType = FieldChange - , target_hash = fldHash - , endpointHash = epHash - , acknowledgedAt = Nothing - , acknowledgedBy = Nothing - , archivedAt = Nothing - , title = titlePrefix <> ": " <> keyPath <> " in " <> method <> " " <> path - , service = "" - , critical = isBreaking - , severity = if isBreaking then "critical" else "info" - , recommendedAction = "" - , migrationComplexity = if isBreaking then "medium" else "low" - , issueData = Aeson $ AE.toJSON fieldData - , requestPayloads = Aeson [] - , responsePayloads = Aeson [] - , llmEnhancedAt = Nothing - , llmEnhancementVersion = Nothing - } + mkIssue + projectId + FieldChange + fldHash + epHash + Nothing + isBreaking + (if isBreaking then "critical" else "info") + (titlePrefix <> ": " <> keyPath <> " in " <> method <> " " <> path) + "" + (if isBreaking then "medium" else "low") + fieldData updateIssueData :: DB es => IssueId -> AE.Value -> Eff es () diff --git a/src/Models/Apis/Issues/Enhancement.hs b/src/Models/Apis/Issues/Enhancement.hs index f9f0e4605..816cd3a82 100644 --- a/src/Models/Apis/Issues/Enhancement.hs +++ b/src/Models/Apis/Issues/Enhancement.hs @@ -93,28 +93,28 @@ generateEnhancedDescription authCtx issue = do buildTitlePrompt :: Issues.Issue -> Text buildTitlePrompt issue = let baseContext = case issue.issueType of - Issues.APIChange -> - let Aeson issueDataValue = issue.issueData - in case AE.fromJSON issueDataValue of - AE.Success (apiData :: Issues.APIChangeData) -> - "Generate a concise, descriptive title for this API change.\n" - <> "Endpoint: " - <> apiData.endpointMethod - <> " " - <> apiData.endpointPath - <> "\n" - <> "New fields: " - <> toText (show $ V.length apiData.newFields) - <> "\n" - <> "Deleted fields: " - <> toText (show $ V.length apiData.deletedFields) - <> "\n" - <> "Modified fields: " - <> toText (show $ V.length apiData.modifiedFields) - <> "\n" - <> "Service: " - <> issue.service - _ -> "Generate a concise title for this API change." + -- Issues.APIChange -> + -- let Aeson issueDataValue = issue.issueData + -- in case AE.fromJSON issueDataValue of + -- AE.Success (apiData :: Issues.APIChangeData) -> + -- "Generate a concise, descriptive title for this API change.\n" + -- <> "Endpoint: " + -- <> apiData.endpointMethod + -- <> " " + -- <> apiData.endpointPath + -- <> "\n" + -- <> "New fields: " + -- <> toText (show $ V.length apiData.newFields) + -- <> "\n" + -- <> "Deleted fields: " + -- <> toText (show $ V.length apiData.deletedFields) + -- <> "\n" + -- <> "Modified fields: " + -- <> toText (show $ V.length apiData.modifiedFields) + -- <> "\n" + -- <> "Service: " + -- <> issue.service + -- _ -> "Generate a concise title for this API change." Issues.RuntimeException -> let Aeson issueDataValue = issue.issueData in case AE.fromJSON issueDataValue of @@ -127,7 +127,7 @@ buildTitlePrompt issue = <> T.take 100 errorData.errorMessage <> "\n" <> "Service: " - <> issue.service + <> fromMaybe "" issue.service _ -> "Generate a concise title for this runtime exception." Issues.QueryAlert -> let Aeson issueDataValue = issue.issueData @@ -165,31 +165,31 @@ buildTitlePrompt issue = buildDescriptionPrompt :: Issues.Issue -> Text buildDescriptionPrompt issue = let baseContext = case issue.issueType of - Issues.APIChange -> - let Aeson issueDataValue = issue.issueData - in case AE.fromJSON issueDataValue of - AE.Success (apiData :: Issues.APIChangeData) -> - "Describe this API change and its impact.\n" - <> "Endpoint: " - <> apiData.endpointMethod - <> " " - <> apiData.endpointPath - <> "\n" - <> "New fields: " - <> toText (show $ V.toList apiData.newFields) - <> "\n" - <> "Deleted fields: " - <> toText (show $ V.toList apiData.deletedFields) - <> "\n" - <> "Modified fields: " - <> toText (show $ V.toList apiData.modifiedFields) - <> "\n" - <> "Total anomalies grouped: " - <> toText (show $ V.length apiData.anomalyHashes) - <> "\n" - <> "Service: " - <> issue.service - _ -> "Describe this API change and its implications." + -- Issues.APIChange -> + -- let Aeson issueDataValue = issue.issueData + -- in case AE.fromJSON issueDataValue of + -- AE.Success (apiData :: Issues.APIChangeData) -> + -- "Describe this API change and its impact.\n" + -- <> "Endpoint: " + -- <> apiData.endpointMethod + -- <> " " + -- <> apiData.endpointPath + -- <> "\n" + -- <> "New fields: " + -- <> toText (show $ V.toList apiData.newFields) + -- <> "\n" + -- <> "Deleted fields: " + -- <> toText (show $ V.toList apiData.deletedFields) + -- <> "\n" + -- <> "Modified fields: " + -- <> toText (show $ V.toList apiData.modifiedFields) + -- <> "\n" + -- <> "Total anomalies grouped: " + -- <> toText (show $ V.length apiData.anomalyHashes) + -- <> "\n" + -- <> "Service: " + -- <> issue.service + -- _ -> "Describe this API change and its implications." Issues.RuntimeException -> let Aeson issueDataValue = issue.issueData in case AE.fromJSON issueDataValue of @@ -282,18 +282,18 @@ classifyIssueCriticality authCtx issue = do buildCriticalityPrompt :: Issues.Issue -> Text buildCriticalityPrompt issue = let context = case issue.issueType of - Issues.APIChange -> - let Aeson issueDataValue = issue.issueData - in case AE.fromJSON issueDataValue of - AE.Success (apiData :: Issues.APIChangeData) -> - unlines - [ "API change detected" - , "Endpoint: " <> apiData.endpointMethod <> " " <> apiData.endpointPath - , "New fields: " <> toText (show $ V.length apiData.newFields) - , "Deleted fields: " <> toText (show $ V.length apiData.deletedFields) - , "Modified fields: " <> toText (show $ V.length apiData.modifiedFields) - ] - _ -> "API change: " <> issue.title + -- Issues.APIChange -> + -- let Aeson issueDataValue = issue.issueData + -- in case AE.fromJSON issueDataValue of + -- AE.Success (apiData :: Issues.APIChangeData) -> + -- unlines + -- [ "API change detected" + -- , "Endpoint: " <> apiData.endpointMethod <> " " <> apiData.endpointPath + -- , "New fields: " <> toText (show $ V.length apiData.newFields) + -- , "Deleted fields: " <> toText (show $ V.length apiData.deletedFields) + -- , "Modified fields: " <> toText (show $ V.length apiData.modifiedFields) + -- ] + -- _ -> "API change: " <> issue.title Issues.RuntimeException -> "Runtime exception: " <> issue.title Issues.QueryAlert -> diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 3d5c18f80..20bb69aaa 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -353,22 +353,6 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do span_ [class_ "text-sm text-textWeak"] "Service:" span_ [class_ "ml-2 text-sm"] $ toHtml $ fromMaybe "Unknown service" err.errorData.serviceName _ -> pass - Issues.APIChange -> do - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (changeData :: Issues.APIChangeData) -> do - div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do - statBox_ (Just pid) Nothing "Affected Requests" "" "0" Nothing Nothing - statBox_ (Just pid) Nothing "New fields" "" (show $ V.length changeData.newFields) Nothing Nothing - statBox_ (Just pid) Nothing "Modified fields" "" (show $ V.length changeData.modifiedFields) Nothing Nothing - statBox_ (Just pid) Nothing "Deleted fields" "" (show $ V.length changeData.deletedFields) Nothing Nothing - widget - div_ [class_ "flex flex-col gap-4"] do - div_ [class_ "w-full"] do - div_ [class_ "flex items-center gap-2"] do - span_ [class_ $ "badge " <> methodFillColor changeData.endpointMethod] $ toHtml changeData.endpointMethod - span_ [class_ "monospace px-2 py-1 rounded text-xs text-textStrong"] $ toHtml changeData.endpointPath - div_ [class_ "mt-4 border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised"] $ renderPayloadChanges issue.id issue.issueType issue.requestPayloads issue.responsePayloads - _ -> pass Issues.QueryAlert -> do case AE.fromJSON (getAeson issue.issueData) of AE.Success (alertData :: Issues.QueryAlertData) -> do @@ -453,7 +437,7 @@ buildAIContext issue errM trDataM spans = , Just $ "- **Title**: " <> issue.title , Just $ "- **Type**: " <> show issue.issueType , Just $ "- **Severity**: " <> issue.severity - , Just $ "- **Service**: " <> issue.service + , Just $ "- **Service**: " <> fromMaybe "" issue.service , Just $ "- **Affected Requests**: 0" , Just $ "- **Affected Clients**: 0" , Just $ "- **Recommended Action**: " <> issue.recommendedAction @@ -949,22 +933,15 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Metadata row (method, endpoint, service, time) div_ [class_ "flex items-center gap-4 text-sm text-textWeak mb-3 flex-wrap"] do -- Method and endpoint (for API changes) - when (issue.issueType == Issues.APIChange) do - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (apiData :: Issues.APIChangeData) -> do - div_ [class_ "flex items-center gap-2"] do - span_ [class_ $ "badge " <> methodFillColor apiData.endpointMethod] $ toHtml apiData.endpointMethod - span_ [class_ "monospace bg-fillWeak px-2 py-1 rounded text-xs text-textStrong"] $ toHtml apiData.endpointPath - _ -> pass -- Service badge span_ [class_ "flex items-center gap-1"] do div_ [class_ "w-3 h-3 bg-fillYellow rounded-sm"] "" - span_ [class_ "text-textStrong"] $ toHtml issue.service + span_ [class_ "text-textStrong"] $ toHtml $ fromMaybe "" issue.service -- Time since span_ [class_ "text-textWeak"] $ toHtml timeSinceString -- Statistics row (only for API changes) - when (issue.issueType == Issues.APIChange) do + let allChanges = getAeson issue.requestPayloads <> getAeson issue.responsePayloads :: [Anomalies.PayloadChange] countChange (!b, !i, !t) c = case c.changeType of Anomalies.Breaking -> (b + 1, i, t + 1) @@ -1011,12 +988,6 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Recommended action div_ [class_ "border-l-4 border-strokeBrand pl-4 mb-4"] $ p_ [class_ "text-sm text-textStrong leading-relaxed"] $ toHtml issue.recommendedAction - -- Collapsible payload changes (only for API changes) - when (issue.issueType == Issues.APIChange) $ details_ [class_ "group mb-4"] do - summary_ [class_ "inline-flex items-center cursor-pointer whitespace-nowrap text-sm font-medium transition-all rounded-md gap-1.5 text-textBrand hover:text-textBrand/80 list-none"] do - faSprite_ "chevron-right" "regular" "h-4 w-4 mr-1 transition-transform group-open:rotate-90" - "View detailed payload changes" - div_ [class_ "mt-4 border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised"] $ renderPayloadChanges issue.id issue.issueType issue.requestPayloads issue.responsePayloads -- Action buttons div_ [class_ "flex items-center gap-3 mt-4 pt-4 border-t border-strokeWeak"] do @@ -1052,53 +1023,6 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue span_ [class_ "leading-none"] $ if isArchived then "Unarchive" else "Archive" --- Render payload changes section -renderPayloadChanges :: Issues.IssueId -> Issues.IssueType -> Aeson [PayloadChange] -> Aeson [PayloadChange] -> Html () -renderPayloadChanges issueId issueType requestPayloads responsePayloads = - when (issueType == Issues.APIChange) do - let requestChanges = getAeson requestPayloads :: [Anomalies.PayloadChange] - let responseChanges = getAeson responsePayloads :: [Anomalies.PayloadChange] - - when (not (null requestChanges) || not (null responseChanges)) do - div_ [class_ "border border-strokeWeak rounded-lg overflow-hidden bg-bgRaised group/payloadtabs"] do - div_ [class_ "flex flex-col gap-2"] do - div_ [role_ "tablist", Aria.orientation_ "horizontal", class_ "text-muted-foreground h-9 items-center justify-center rounded-xl p-[3px] w-full grid grid-cols-2 bg-fillWeak"] do - label_ - [ role_ "tab" - , class_ "h-[calc(100%-1px)] flex-1 justify-center rounded-xl border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap transition-all flex items-center gap-2 cursor-pointer has-[:checked]:bg-bgRaised has-[:checked]:text-textStrong bg-transparent text-textWeak" - ] - do - input_ [type_ "radio", name_ ("payload-tab-" <> Issues.issueIdText issueId), class_ "hidden payload-tab-response", checked_] - faSprite_ "arrow-right" "regular" "w-4 h-4" - span_ [] $ "Response Payloads (" <> show (length responseChanges) <> ")" - - label_ - [ role_ "tab" - , class_ "h-[calc(100%-1px)] flex-1 justify-center rounded-xl border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap transition-all flex items-center gap-2 cursor-pointer has-[:checked]:bg-bgRaised has-[:checked]:text-textStrong bg-transparent text-textWeak" - ] - do - input_ [type_ "radio", name_ ("payload-tab-" <> Issues.issueIdText issueId), class_ "hidden payload-tab-request"] - faSprite_ "arrow-right" "regular" "w-4 h-4 rotate-180" - span_ [] $ "Request Payloads (" <> show (length requestChanges) <> ")" - - div_ - [ role_ "tabpanel" - , class_ "flex-1 outline-none p-4 space-y-4 hidden group-has-[.payload-tab-response:checked]/payloadtabs:block" - ] - do - if null responseChanges - then div_ [class_ "text-center py-8 text-textWeak"] "No response payload changes" - else forM_ responseChanges (renderPayloadChange True) - div_ - [ role_ "tabpanel" - , class_ "flex-1 outline-none p-4 space-y-4 hidden group-has-[.payload-tab-request:checked]/payloadtabs:block" - ] - do - if null requestChanges - then div_ [class_ "text-center py-8 text-textWeak"] "No request payload changes" - else forM_ requestChanges (renderPayloadChange False) - - -- Render individual payload change renderPayloadChange :: Bool -> Anomalies.PayloadChange -> Html () renderPayloadChange isResponse change = @@ -1264,7 +1188,7 @@ issueTypeBadge issueType critical = badge cls icon txt (cls, icon, txt) = case issueType of Issues.RuntimeException -> ("bg-fillError-strong", "triangle-alert", "ERROR") Issues.QueryAlert -> ("bg-fillWarning-strong", "zap", "ALERT") - Issues.APIChange + _ | critical -> ("bg-fillError-strong", "exclamation-triangle", "BREAKING") | otherwise -> ("bg-fillInformation-strong", "info", "Incremental") badge c i t = span_ [class_ $ "badge " <> c] do faSprite_ i "regular" "w-3 h-3"; t diff --git a/src/Pages/Reports.hs b/src/Pages/Reports.hs index 58cf016f9..7731f4f72 100644 --- a/src/Pages/Reports.hs +++ b/src/Pages/Reports.hs @@ -274,7 +274,7 @@ singleReportPage pid report = span_ [class_ "h-3 w-3 rounded bg-yellow-500"] pass span_ [class_ "text-xs"] "Monitor alerts" let totalAnomalies = length v.issues - (errTotal, apiTotal, qTotal) = L.foldl (\(e, a, m) x -> (e + if x.issueType == Issues.RuntimeException then 1 else 0, a + if x.issueType == Issues.APIChange then 1 else 0, m + if x.issueType == Issues.QueryAlert then 1 else 0)) (0, 0, 0) v.issues + (errTotal, apiTotal, qTotal) = L.foldl (\(e, a, m) x -> (e + if x.issueType == Issues.RuntimeException then 1 else 0, a + if x.issueType == Issues.NewEndpoint then 1 else 0, m + if x.issueType == Issues.QueryAlert then 1 else 0)) (0, 0, 0) v.issues div_ [class_ "w-full h-3 rounded overflow-x-hidden bg-fillWeak"] do when (totalAnomalies > 0) do div_ [class_ "h-full bg-fillError-strong", style_ $ "width: " <> show (errTotal `div` totalAnomalies * 100) <> "%"] pass diff --git a/static/migrations/0028_rebuild_issues_table.sql b/static/migrations/0028_rebuild_issues_table.sql index a56922fcb..cc0b3670b 100644 --- a/static/migrations/0028_rebuild_issues_table.sql +++ b/static/migrations/0028_rebuild_issues_table.sql @@ -31,7 +31,6 @@ CREATE TABLE apis.issues ( last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), acknowledged_at TIMESTAMPTZ, acknowledged_by UUID REFERENCES users.users(id) ON DELETE SET NULL, - resolved_at TIMESTAMPTZ, recommended_action TEXT NOT NULL DEFAULT '', migration_complexity TEXT NOT NULL DEFAULT 'n/a', -- 'low', 'medium', 'high', 'n/a' request_payloads JSONB NOT NULL DEFAULT '[]'::jsonb, diff --git a/static/migrations/0032_drop_issue_index.sql b/static/migrations/0032_drop_issue_index.sql new file mode 100644 index 000000000..1404b9af4 --- /dev/null +++ b/static/migrations/0032_drop_issue_index.sql @@ -0,0 +1,4 @@ +BEGIN; + DROP INDEX IF EXISTS apis.idx_issues_unresolved; + DROP INDEX IF EXISTS apis.idx_issues_unique_open; +COMMIT; \ No newline at end of file From be5e7a1bed709de54e8f09ad0fe4987b99b9ea04 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 10 Jan 2026 20:35:35 +0000 Subject: [PATCH 41/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 1 + src/Models/Apis/Issues.hs | 28 +++++++++++++++++++--------- src/Pages/Anomalies.hs | 5 ++--- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index c91f52b20..e1b45589e 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1121,6 +1121,7 @@ sendReportForProject pid rType = do emailQueryMonitorAlert :: Monitors.QueryMonitorEvaled -> CI.CI Text -> Maybe Users.User -> ATBackgroundCtx () emailQueryMonitorAlert monitorE@Monitors.QueryMonitorEvaled{alertConfig} email userM = whenJust userM (const pass) + -- | Group anomalies by endpoint hash groupAnomaliesByEndpointHash :: V.Vector Anomalies.AnomalyVM -> [(Text, V.Vector Anomalies.AnomalyVM)] groupAnomaliesByEndpointHash anomalies = diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 1c5d3e5d8..f3432f901 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -706,15 +706,24 @@ mkIssue :: AE.ToJSON a => Projects.ProjectId -> IssueType - -> Text -- ^ targetHash - -> Text -- ^ endpointHash - -> Maybe Text -- ^ service - -> Bool -- ^ critical - -> Text -- ^ severity - -> Text -- ^ title - -> Text -- ^ recommendedAction - -> Text -- ^ migrationComplexity - -> a -- ^ issueData + -> Text + -- ^ targetHash + -> Text + -- ^ endpointHash + -> Maybe Text + -- ^ service + -> Bool + -- ^ critical + -> Text + -- ^ severity + -> Text + -- ^ title + -> Text + -- ^ recommendedAction + -> Text + -- ^ migrationComplexity + -> a + -- ^ issueData -> IO Issue mkIssue projectId issueType targetHash endpointHash service critical severity title recommendedAction migrationComplexity issueData = do issueId <- UUIDId <$> UUID4.nextRandom @@ -747,6 +756,7 @@ mkIssue projectId issueType targetHash endpointHash service critical severity ti , llmEnhancementVersion = Nothing } + -- | Create Query Alert issue createQueryAlertIssue :: Projects.ProjectId -> Text -> Text -> Text -> Double -> Double -> Text -> IO Issue createQueryAlertIssue projectId queryId queryName queryExpr threshold actual thresholdType = do diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 20bb69aaa..9e4c37b11 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -437,7 +437,7 @@ buildAIContext issue errM trDataM spans = , Just $ "- **Title**: " <> issue.title , Just $ "- **Type**: " <> show issue.issueType , Just $ "- **Severity**: " <> issue.severity - , Just $ "- **Service**: " <> fromMaybe "" issue.service + , Just $ "- **Service**: " <> fromMaybe "" issue.service , Just $ "- **Affected Requests**: 0" , Just $ "- **Affected Clients**: 0" , Just $ "- **Recommended Action**: " <> issue.recommendedAction @@ -940,7 +940,7 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Time since span_ [class_ "text-textWeak"] $ toHtml timeSinceString - -- Statistics row (only for API changes) + -- Statistics row (only for API changes) let allChanges = getAeson issue.requestPayloads <> getAeson issue.responsePayloads :: [Anomalies.PayloadChange] countChange (!b, !i, !t) c = case c.changeType of @@ -988,7 +988,6 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Recommended action div_ [class_ "border-l-4 border-strokeBrand pl-4 mb-4"] $ p_ [class_ "text-sm text-textStrong leading-relaxed"] $ toHtml issue.recommendedAction - -- Action buttons div_ [class_ "flex items-center gap-3 mt-4 pt-4 border-t border-strokeWeak"] do button_ [class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 text-textBrand hover:text-textBrand/80 hover:bg-fillBrand-weak"] do From 8cfb141e857c34a4e0ef897ac89d236e61d555ad Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 10 Jan 2026 22:47:20 +0000 Subject: [PATCH 42/71] add .claude to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 1201ca0e2..e6ae13d42 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,4 @@ cabal.project.local stack-work tests.log tests_optimized.log +.claude \ No newline at end of file From 02e07c168169ef95890b7ff3ba8f04085cfdb0ea Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sun, 11 Jan 2026 20:03:14 +0000 Subject: [PATCH 43/71] add extra shape and endpoint fields --- src/Models/Apis/Endpoints.hs | 7 +++++-- src/Models/Apis/Shapes.hs | 20 +++++++++++++++----- src/ProcessMessage.hs | 26 ++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 7 deletions(-) diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index 2568fe8f1..6d2922b70 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -91,8 +91,8 @@ bulkInsertEndpoints :: DB es => V.Vector Endpoint -> Eff es () bulkInsertEndpoints endpoints = void $ PG.executeMany q $ V.toList rowsToInsert where q = - [sql| INSERT INTO apis.endpoints (project_id, url_path, url_params, method, host, hash, outgoing) - VALUES (?, ?, ?, ?, ?, ?, ?) ON CONFLICT (hash) DO NOTHING; + [sql| INSERT INTO apis.endpoints (project_id, url_path, url_params, method, host, hash, outgoing, first_trace_id, recent_trace_id, service) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT (hash) DO NOTHING; |] rowsToInsert = endpoints <&> \endpoint -> @@ -103,6 +103,9 @@ bulkInsertEndpoints endpoints = void $ PG.executeMany q $ V.toList rowsToInsert , endpoint.host , endpoint.hash , endpoint.outgoing + , endpoint.firstTraceId + , endpoint.recentTraceId + , endpoint.service ) diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index c8eff4950..7527bafc5 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -62,6 +62,11 @@ data Shape = Shape , statusCode :: Int , responseDescription :: Text , requestDescription :: Text + , exampleRequestPayload :: AE.Value + , exampleResponsePayload :: AE.Value + , firstTraceId :: Maybe Text + , recentTraceId :: Maybe Text + , service :: Maybe Text } deriving stock (Generic, Show) deriving anyclass (Default, FromRow, NFData, ToRow) @@ -74,10 +79,10 @@ bulkInsertShapes :: DB es => V.Vector Shape -> Eff es () bulkInsertShapes shapes = void $ PG.executeMany q $ V.toList rowsToInsert where q = - [sql| + [sql| INSERT INTO apis.shapes - (project_id, endpoint_hash, query_params_keypaths, request_body_keypaths, response_body_keypaths, request_headers_keypaths, response_headers_keypaths, field_hashes, hash, status_code, request_description, response_description) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING|] + (project_id, endpoint_hash, query_params_keypaths, request_body_keypaths, response_body_keypaths, request_headers_keypaths, response_headers_keypaths, field_hashes, hash, status_code, request_description, response_description, example_request_payload, example_response_payload, first_trace_id, recent_trace_id, service) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING|] rowsToInsert = V.map ( \shape -> @@ -91,8 +96,13 @@ bulkInsertShapes shapes = void $ PG.executeMany q $ V.toList rowsToInsert , shape.fieldHashes , shape.hash , fromIntegral shape.statusCode - , "" - , "" + , shape.requestDescription + , shape.responseDescription + , shape.exampleRequestPayload + , shape.exampleResponsePayload + , shape.firstTraceId + , shape.recentTraceId + , shape.service ) ) shapes diff --git a/src/ProcessMessage.hs b/src/ProcessMessage.hs index 7ca1b5462..8fdd5ac3f 100644 --- a/src/ProcessMessage.hs +++ b/src/ProcessMessage.hs @@ -20,6 +20,7 @@ import Data.ByteString.Lazy.Char8 qualified as BL import Pkg.DeriveUtils (AesonText (..), UUIDId (..), unAesonTextMaybe) import Data.Cache qualified as Cache +import Data.Default (def) import Data.Effectful.UUID (UUIDEff) import Data.Effectful.UUID qualified as UUID import Data.HashMap.Strict qualified as HashMap @@ -290,6 +291,12 @@ processSpanToEntities pjc otelSpan dumpId = -- Determine if request is outgoing based on span kind !outgoing = otelSpan.kind == Just "client" + -- Extract trace ID from context for linking + !traceId = otelSpan.context >>= Telemetry.trace_id + + -- Extract service name from resource + !serviceName = Telemetry.atMapText "service.name" (unAesonTextMaybe otelSpan.resource) + -- Build endpoint if not in cache -- Only create endpoint entity if: -- 1. Not already in project cache (prevents duplicate anomalies) @@ -312,6 +319,20 @@ processSpanToEntities pjc otelSpan dumpId = , hash = endpointHash , outgoing = outgoing , description = "" + , firstTraceId = traceId + , recentTraceId = traceId + , service = serviceName + , baselineState = def + , baselineSamples = 0 + , baselineUpdatedAt = Nothing + , baselineErrorRateMean = Nothing + , baselineErrorRateStddev = Nothing + , baselineLatencyMean = Nothing + , baselineLatencyStddev = Nothing + , baselineLatencyP95 = Nothing + , baselineLatencyP99 = Nothing + , baselineVolumeHourlyMean = Nothing + , baselineVolumeHourlyStddev = Nothing } -- Build shape if not in cache @@ -340,6 +361,11 @@ processSpanToEntities pjc otelSpan dumpId = , statusCode = statusCode , responseDescription = "" , requestDescription = "" + , exampleRequestPayload = requestBody + , exampleResponsePayload = responseBody + , firstTraceId = traceId + , recentTraceId = traceId + , service = serviceName } !fields' = if statusCode == 404 then V.empty else fields From 3f115768a8317bde6ecba2f982389cce6a3a52f8 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 07:59:40 +0000 Subject: [PATCH 44/71] manual enhancement title, desc, and criticality gen --- src/Models/Apis/Issues.hs | 24 +- src/Models/Apis/Issues/Enhancement.hs | 461 ++++++++++++++++++++------ src/Models/Apis/Shapes.hs | 12 +- 3 files changed, 365 insertions(+), 132 deletions(-) diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index f3432f901..bac5b7ed8 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -702,29 +702,7 @@ issueTypeToSourceType EndpointVolumeRateChange = "endpoint" -- | Helper to create an issue with common defaults -mkIssue - :: AE.ToJSON a - => Projects.ProjectId - -> IssueType - -> Text - -- ^ targetHash - -> Text - -- ^ endpointHash - -> Maybe Text - -- ^ service - -> Bool - -- ^ critical - -> Text - -- ^ severity - -> Text - -- ^ title - -> Text - -- ^ recommendedAction - -> Text - -- ^ migrationComplexity - -> a - -- ^ issueData - -> IO Issue +mkIssue :: AE.ToJSON a => Projects.ProjectId -> IssueType -> Text -> Text -> Maybe Text -> Bool -> Text -> Text -> Text -> Text -> a -> IO Issue mkIssue projectId issueType targetHash endpointHash service critical severity title recommendedAction migrationComplexity issueData = do issueId <- UUIDId <$> UUID4.nextRandom now <- getCurrentTime diff --git a/src/Models/Apis/Issues/Enhancement.hs b/src/Models/Apis/Issues/Enhancement.hs index 816cd3a82..0862c58f6 100644 --- a/src/Models/Apis/Issues/Enhancement.hs +++ b/src/Models/Apis/Issues/Enhancement.hs @@ -55,66 +55,54 @@ enhanceIssueWithLLM authCtx issue = do } --- | Generate an enhanced title using LLM +-- | Generate an enhanced title using LLM (or simple generator for supported types) generateEnhancedTitle :: ELLM.LLM :> es => AuthContext -> Issues.Issue -> Eff es (Either Text Text) generateEnhancedTitle authCtx issue = do - let prompt = buildTitlePrompt issue - result <- AI.callOpenAIAPIEff prompt authCtx.config.openaiApiKey - case result of - Left err -> pure $ Left err - Right r -> do - let response' = AI.getNormalTupleReponse r - case response' of - Left e -> pure $ Left e - Right (title, _) -> pure $ Right $ T.take 200 title -- Limit title length + -- First try simple title generation for supported types + case simpleTitle issue of + Just title -> pure $ Right title + Nothing -> do + -- Fall back to LLM for RuntimeException and QueryAlert + let prompt = buildTitlePrompt issue + result <- AI.callOpenAIAPIEff prompt authCtx.config.openaiApiKey + case result of + Left err -> pure $ Left err + Right r -> do + let response' = AI.getNormalTupleReponse r + case response' of + Left e -> pure $ Left e + Right (title, _) -> pure $ Right $ T.take 200 title -- Limit title length --- | Generate enhanced description with recommended actions +-- | Generate enhanced description with recommended actions (or simple generator for supported types) generateEnhancedDescription :: ELLM.LLM :> es => AuthContext -> Issues.Issue -> Eff es (Either Text (Text, Text, Text)) generateEnhancedDescription authCtx issue = do - let prompt = buildDescriptionPrompt issue - result <- AI.callOpenAIAPIEff prompt authCtx.config.openaiApiKey - case result of - Left err -> pure $ Left err - Right r -> do - let response' = AI.getNormalTupleReponse r - case response' of - Left e -> pure $ Left e - Right (response, _) -> do - let lines' = lines response - description = fromMaybe "" $ viaNonEmpty head lines' - recommendedAction = fromMaybe "Review the changes and update your integration accordingly." $ lines' !!? 1 - complexity = fromMaybe "medium" $ lines' !!? 2 - -- Note: Classification happens separately in the background job - pure $ Right (description, recommendedAction, complexity) + -- First try simple description generation for supported types + case simpleDescription issue of + Just result -> pure $ Right result + Nothing -> do + -- Fall back to LLM for RuntimeException and QueryAlert + let prompt = buildDescriptionPrompt issue + result <- AI.callOpenAIAPIEff prompt authCtx.config.openaiApiKey + case result of + Left err -> pure $ Left err + Right r -> do + let response' = AI.getNormalTupleReponse r + case response' of + Left e -> pure $ Left e + Right (response, _) -> do + let lines' = lines response + description = fromMaybe "" $ viaNonEmpty head lines' + recommendedAction = fromMaybe "Review the changes and update your integration accordingly." $ lines' !!? 1 + complexity = fromMaybe "medium" $ lines' !!? 2 + -- Note: Classification happens separately in the background job + pure $ Right (description, recommendedAction, complexity) -- | Build prompt for title generation buildTitlePrompt :: Issues.Issue -> Text buildTitlePrompt issue = let baseContext = case issue.issueType of - -- Issues.APIChange -> - -- let Aeson issueDataValue = issue.issueData - -- in case AE.fromJSON issueDataValue of - -- AE.Success (apiData :: Issues.APIChangeData) -> - -- "Generate a concise, descriptive title for this API change.\n" - -- <> "Endpoint: " - -- <> apiData.endpointMethod - -- <> " " - -- <> apiData.endpointPath - -- <> "\n" - -- <> "New fields: " - -- <> toText (show $ V.length apiData.newFields) - -- <> "\n" - -- <> "Deleted fields: " - -- <> toText (show $ V.length apiData.deletedFields) - -- <> "\n" - -- <> "Modified fields: " - -- <> toText (show $ V.length apiData.modifiedFields) - -- <> "\n" - -- <> "Service: " - -- <> issue.service - -- _ -> "Generate a concise title for this API change." Issues.RuntimeException -> let Aeson issueDataValue = issue.issueData in case AE.fromJSON issueDataValue of @@ -165,31 +153,6 @@ buildTitlePrompt issue = buildDescriptionPrompt :: Issues.Issue -> Text buildDescriptionPrompt issue = let baseContext = case issue.issueType of - -- Issues.APIChange -> - -- let Aeson issueDataValue = issue.issueData - -- in case AE.fromJSON issueDataValue of - -- AE.Success (apiData :: Issues.APIChangeData) -> - -- "Describe this API change and its impact.\n" - -- <> "Endpoint: " - -- <> apiData.endpointMethod - -- <> " " - -- <> apiData.endpointPath - -- <> "\n" - -- <> "New fields: " - -- <> toText (show $ V.toList apiData.newFields) - -- <> "\n" - -- <> "Deleted fields: " - -- <> toText (show $ V.toList apiData.deletedFields) - -- <> "\n" - -- <> "Modified fields: " - -- <> toText (show $ V.toList apiData.modifiedFields) - -- <> "\n" - -- <> "Total anomalies grouped: " - -- <> toText (show $ V.length apiData.anomalyHashes) - -- <> "\n" - -- <> "Service: " - -- <> issue.service - -- _ -> "Describe this API change and its implications." Issues.RuntimeException -> let Aeson issueDataValue = issue.issueData in case AE.fromJSON issueDataValue of @@ -256,44 +219,37 @@ buildDescriptionPrompt issue = in systemPrompt <> "\n\n" <> baseContext --- | Classify issue as critical/safe and count breaking/incremental changes +-- | Classify issue as critical/safe and count breaking/incremental changes (or simple classifier for supported types) classifyIssueCriticality :: ELLM.LLM :> es => AuthContext -> Issues.Issue -> Eff es (Either Text (Bool, Int, Int)) classifyIssueCriticality authCtx issue = do - let prompt = buildCriticalityPrompt issue - result' <- AI.callOpenAIAPIEff prompt authCtx.config.openaiApiKey - case result' of - Left err -> pure $ Left err - Right res -> do - let r = AI.getNormalTupleReponse res - case r of - Left e -> pure $ Left e - Right (response, _) -> do - let lines' = lines response - case lines' of - [criticalStr, breakingStr, incrementalStr] -> do - let isCritical = T.toLower criticalStr == "critical" - breakingCount = fromMaybe 0 $ readMaybe $ toString breakingStr - incrementalCount = fromMaybe 0 $ readMaybe $ toString incrementalStr - pure $ Right (isCritical, breakingCount, incrementalCount) - _ -> pure $ Left "Invalid response format from LLM" + -- First try simple criticality classification for supported types + case simpleCriticality issue of + Just result -> pure $ Right result + Nothing -> do + -- Fall back to LLM for RuntimeException and QueryAlert + let prompt = buildCriticalityPrompt issue + result' <- AI.callOpenAIAPIEff prompt authCtx.config.openaiApiKey + case result' of + Left err -> pure $ Left err + Right res -> do + let r = AI.getNormalTupleReponse res + case r of + Left e -> pure $ Left e + Right (response, _) -> do + let lines' = lines response + case lines' of + [criticalStr, breakingStr, incrementalStr] -> do + let isCritical = T.toLower criticalStr == "critical" + breakingCount = fromMaybe 0 $ readMaybe $ toString breakingStr + incrementalCount = fromMaybe 0 $ readMaybe $ toString incrementalStr + pure $ Right (isCritical, breakingCount, incrementalCount) + _ -> pure $ Left "Invalid response format from LLM" -- | Build prompt for criticality classification buildCriticalityPrompt :: Issues.Issue -> Text buildCriticalityPrompt issue = let context = case issue.issueType of - -- Issues.APIChange -> - -- let Aeson issueDataValue = issue.issueData - -- in case AE.fromJSON issueDataValue of - -- AE.Success (apiData :: Issues.APIChangeData) -> - -- unlines - -- [ "API change detected" - -- , "Endpoint: " <> apiData.endpointMethod <> " " <> apiData.endpointPath - -- , "New fields: " <> toText (show $ V.length apiData.newFields) - -- , "Deleted fields: " <> toText (show $ V.length apiData.deletedFields) - -- , "Modified fields: " <> toText (show $ V.length apiData.modifiedFields) - -- ] - -- _ -> "API change: " <> issue.title Issues.RuntimeException -> "Runtime exception: " <> issue.title Issues.QueryAlert -> @@ -331,3 +287,302 @@ updateIssueClassification issueId isCritical breakingCount incrementalCount = do | breakingCount > 0 = "warning" | otherwise = "info" Issues.updateIssueCriticality issueId isCritical severity + + +-- | Generate a simple title for issue types that don't need LLM +simpleTitle :: Issues.Issue -> Maybe Text +simpleTitle issue = case issue.issueType of + Issues.NewEndpoint -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewEndpointData) -> + Just $ "New endpoint discovered: " <> d.endpointMethod <> " " <> d.endpointPath + _ -> Just "New endpoint discovered" + Issues.NewShape -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewShapeData) -> + let changes = V.length d.newFields + V.length d.deletedFields + V.length d.modifiedFields + in Just $ "New response shape detected on " <> d.endpointMethod <> " " <> d.endpointPath <> " (" <> toText (show changes) <> " field changes)" + _ -> Just "New response shape detected" + Issues.FieldChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.FieldChangeData) -> + Just $ "Field " <> d.changeType <> " at " <> d.keyPath <> " on " <> d.endpointMethod <> " " <> d.endpointPath + _ -> Just "Field change detected" + Issues.LogPattern -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternData) -> + let level = fromMaybe "LOG" d.logLevel + in Just $ "New " <> level <> " pattern detected" <> maybe "" (" in " <>) d.serviceName + _ -> Just "New log pattern detected" + Issues.ErrorEscalating -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorEscalatingData) -> + Just $ "Error escalating: " <> T.take 60 d.exceptionType <> " (" <> toText (show d.escalationRate) <> "x in " <> d.escalationWindow <> ")" + _ -> Just "Error rate escalating" + Issues.ErrorRegressed -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorRegressedData) -> + Just $ "Error regressed: " <> T.take 80 d.exceptionType + _ -> Just "Previously resolved error has regressed" + Issues.LogPatternRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternRateChangeData) -> + let direction = if d.changeDirection == "spike" then "spike" else "drop" + in Just $ "Log pattern " <> direction <> ": " <> toText (show (round d.changePercent :: Int)) <> "% change" + _ -> Just "Log pattern rate change detected" + Issues.EndpointLatencyDegradation -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> + Just $ "Latency degradation on " <> d.endpointMethod <> " " <> d.endpointPath <> " (" <> d.percentile <> " +" <> toText (show (round d.degradationPercent :: Int)) <> "%)" + _ -> Just "Endpoint latency degradation detected" + Issues.EndpointErrorRateSpike -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> + Just $ "Error rate spike on " <> d.endpointMethod <> " " <> d.endpointPath <> " (" <> toText (show (round (d.currentErrorRate * 100) :: Int)) <> "% error rate)" + _ -> Just "Endpoint error rate spike detected" + Issues.EndpointVolumeRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> + let direction = if d.changeDirection == "spike" then "Traffic spike" else "Traffic drop" + in Just $ direction <> " on " <> d.endpointMethod <> " " <> d.endpointPath <> " (" <> toText (show (round d.changePercent :: Int)) <> "%)" + _ -> Just "Endpoint traffic volume change detected" + -- LLM-based types return Nothing to use LLM + Issues.RuntimeException -> Nothing + Issues.QueryAlert -> Nothing + + +-- | Generate a simple description for issue types that don't need LLM +-- Returns (description, recommendedAction, complexity) +simpleDescription :: Issues.Issue -> Maybe (Text, Text, Text) +simpleDescription issue = case issue.issueType of + Issues.NewEndpoint -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewEndpointData) -> + Just + ( "A new endpoint " <> d.endpointMethod <> " " <> d.endpointPath <> " was discovered on host " <> d.endpointHost <> ". This endpoint was not previously tracked." + , "Review the endpoint to ensure it's expected and properly documented." + , "low" + ) + _ -> Just ("A new endpoint was discovered.", "Review the endpoint.", "low") + Issues.NewShape -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewShapeData) -> + let newCount = V.length d.newFields + deletedCount = V.length d.deletedFields + modifiedCount = V.length d.modifiedFields + complexity + | deletedCount > 0 = "high" + | modifiedCount > 0 = "medium" + | otherwise = "low" + in Just + ( "New response shape detected on " <> d.endpointMethod <> " " <> d.endpointPath <> " (status " <> toText (show d.statusCode) <> "). " + <> "New fields: " + <> toText (show newCount) + <> ", deleted fields: " + <> toText (show deletedCount) + <> ", modified fields: " + <> toText (show modifiedCount) + <> "." + , "Review the schema changes and update API clients if necessary." + , complexity + ) + _ -> Just ("A new response shape was detected.", "Review the schema changes.", "medium") + Issues.FieldChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.FieldChangeData) -> + let complexity = case d.changeType of + "deleted" -> "high" + "type_changed" -> "high" + "added" -> "low" + _ -> "medium" + typeInfo = case d.previousType of + Just prev -> " Changed from " <> prev <> " to " <> d.newType <> "." + Nothing -> " New type: " <> d.newType <> "." + in Just + ( "Field " <> d.changeType <> " detected at path '" <> d.keyPath <> "' on " <> d.endpointMethod <> " " <> d.endpointPath <> "." <> typeInfo + , "Update API clients to handle this field change." + , complexity + ) + _ -> Just ("A field change was detected.", "Review the field change.", "medium") + Issues.LogPattern -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternData) -> + let level = fromMaybe "LOG" d.logLevel + svc = fromMaybe "unknown service" d.serviceName + in Just + ( "New " <> level <> " log pattern detected in " <> svc <> ". Pattern: " <> T.take 200 d.logPattern <> ". Occurrences: " <> toText (show d.occurrenceCount) <> "." + , "Investigate the log pattern to determine if action is needed." + , "low" + ) + _ -> Just ("A new log pattern was detected.", "Review the log pattern.", "low") + Issues.ErrorEscalating -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorEscalatingData) -> + let svc = fromMaybe "unknown service" d.serviceName + in Just + ( "Error '" <> T.take 100 d.exceptionType <> "' is escalating in " <> svc <> ". " + <> "Rate increased " + <> toText (show d.escalationRate) + <> "x over the last " + <> d.escalationWindow + <> ". " + <> "Last hour: " + <> toText (show d.occurrences1h) + <> " occurrences, last 24h: " + <> toText (show d.occurrences24h) + <> "." + , "Investigate and fix the root cause urgently to prevent further escalation." + , "high" + ) + _ -> Just ("An error is escalating.", "Investigate immediately.", "high") + Issues.ErrorRegressed -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorRegressedData) -> + let svc = fromMaybe "unknown service" d.serviceName + quietDays = d.quietPeriodMinutes `div` 1440 + quietHours = (d.quietPeriodMinutes `mod` 1440) `div` 60 + quietStr + | quietDays > 0 = toText (show quietDays) <> " days" + | otherwise = toText (show quietHours) <> " hours" + in Just + ( "Previously resolved error '" <> T.take 100 d.exceptionType <> "' has regressed in " <> svc <> ". " + <> "It was quiet for " + <> quietStr + <> " before reappearing. " + <> "New occurrences: " + <> toText (show d.newOccurrences) + <> "." + , "Investigate why this error has returned after being resolved." + , "high" + ) + _ -> Just ("A previously resolved error has regressed.", "Investigate the regression.", "high") + Issues.LogPatternRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternRateChangeData) -> + let svc = fromMaybe "unknown service" d.serviceName + direction = if d.changeDirection == "spike" then "spiked" else "dropped" + complexity = if d.changeDirection == "spike" then "medium" else "low" + in Just + ( "Log pattern rate " <> direction <> " in " <> svc <> ". " + <> "Current rate: " + <> toText (show (round d.currentRatePerHour :: Int)) + <> "/hour (baseline: " + <> toText (show (round d.baselineMean :: Int)) + <> "/hour). " + <> "Change: " + <> toText (show (round d.changePercent :: Int)) + <> "%, z-score: " + <> toText (show (round d.zScore :: Int)) + <> "." + , "Review the log volume change to determine if it indicates an issue." + , complexity + ) + _ -> Just ("Log pattern rate changed significantly.", "Review the change.", "medium") + Issues.EndpointLatencyDegradation -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> + let svc = maybe "" (" in " <>) d.serviceName + in Just + ( "Latency degradation detected on " <> d.endpointMethod <> " " <> d.endpointPath <> svc <> ". " + <> d.percentile + <> " latency increased from " + <> toText (show (round d.baselineLatencyMs :: Int)) + <> "ms to " + <> toText (show (round d.currentLatencyMs :: Int)) + <> "ms (+" + <> toText (show (round d.degradationPercent :: Int)) + <> "%)." + , "Profile the endpoint to identify performance bottlenecks." + , "medium" + ) + _ -> Just ("Endpoint latency has degraded.", "Investigate performance issues.", "medium") + Issues.EndpointErrorRateSpike -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> + let svc = maybe "" (" in " <>) d.serviceName + in Just + ( "Error rate spike on " <> d.endpointMethod <> " " <> d.endpointPath <> svc <> ". " + <> "Current error rate: " + <> toText (show (round (d.currentErrorRate * 100) :: Int)) + <> "% (" + <> toText (show d.errorCount) + <> " errors out of " + <> toText (show d.totalRequests) + <> " requests). " + <> "Baseline: " + <> toText (show (round (d.baselineErrorRate * 100) :: Int)) + <> "%." + , "Investigate the error spike and fix the underlying issues." + , "high" + ) + _ -> Just ("Endpoint error rate has spiked.", "Investigate errors immediately.", "high") + Issues.EndpointVolumeRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> + let svc = maybe "" (" in " <>) d.serviceName + direction = if d.changeDirection == "spike" then "spiked" else "dropped" + complexity = if d.changeDirection == "spike" then "medium" else "medium" + in Just + ( "Traffic volume " <> direction <> " on " <> d.endpointMethod <> " " <> d.endpointPath <> svc <> ". " + <> "Current rate: " + <> toText (show (round d.currentRatePerHour :: Int)) + <> " req/hour (baseline: " + <> toText (show (round d.baselineRatePerHour :: Int)) + <> " req/hour). " + <> "Change: " + <> toText (show (round d.changePercent :: Int)) + <> "%." + , "Review traffic patterns to determine if this is expected or indicates an issue." + , complexity + ) + _ -> Just ("Endpoint traffic volume changed significantly.", "Review traffic patterns.", "medium") + -- LLM-based types return Nothing to use LLM + Issues.RuntimeException -> Nothing + Issues.QueryAlert -> Nothing + + +-- | Simple criticality classification for non-LLM issue types +-- Returns (isCritical, breakingCount, incrementalCount) +simpleCriticality :: Issues.Issue -> Maybe (Bool, Int, Int) +simpleCriticality issue = case issue.issueType of + Issues.NewEndpoint -> Just (False, 0, 1) + Issues.NewShape -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewShapeData) -> + let deletedCount = V.length d.deletedFields + modifiedCount = V.length d.modifiedFields + newCount = V.length d.newFields + isCritical = deletedCount > 0 || modifiedCount > 0 + in Just (isCritical, deletedCount + modifiedCount, newCount) + _ -> Just (False, 0, 1) + Issues.FieldChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.FieldChangeData) -> + let isCritical = d.changeType `elem` ["deleted", "type_changed"] + in Just (isCritical, if isCritical then 1 else 0, if isCritical then 0 else 1) + _ -> Just (False, 0, 1) + Issues.LogPattern -> Just (False, 0, 1) + Issues.ErrorEscalating -> Just (True, 1, 0) + Issues.ErrorRegressed -> Just (True, 1, 0) + Issues.LogPatternRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternRateChangeData) -> + let isCritical = d.changeDirection == "spike" && d.zScore > 5 + in Just (isCritical, if isCritical then 1 else 0, 1) + _ -> Just (False, 0, 1) + Issues.EndpointLatencyDegradation -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> + let isCritical = d.degradationPercent > 100 || d.zScore > 5 + in Just (isCritical, if isCritical then 1 else 0, 1) + _ -> Just (False, 0, 1) + Issues.EndpointErrorRateSpike -> Just (True, 1, 0) + Issues.EndpointVolumeRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> + let isCritical = d.changeDirection == "drop" && d.changePercent < -50 + in Just (isCritical, if isCritical then 1 else 0, 1) + _ -> Just (False, 0, 1) + -- LLM-based types return Nothing to use LLM + Issues.RuntimeException -> Nothing + Issues.QueryAlert -> Nothing diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index 7527bafc5..9b895ba59 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -157,12 +157,12 @@ getShapeForIssue pid hash = listToMaybe <$> PG.query q (Only hash) COALESCE(e.method, 'UNKNOWN'), COALESCE(e.url_path, '/'), s.status_code, - COALESCE(s.example_request_payload, '{}'::jsonb), - COALESCE(s.example_response_payload, '{}'::jsonb), - COALESCE(s.new_unique_fields, '{}'::TEXT[]), - COALESCE(s.deleted_fields, '{}'::TEXT[]), - COALESCE(s.updated_field_formats, '{}'::TEXT[]), - COALESCE(s.field_hashes, '{}'::TEXT[]) + s.example_request_payload, '{}'::jsonb, + s.example_response_payload, '{}'::jsonb, + s.new_unique_fields, '{}'::TEXT[], + s.deleted_fields, '{}'::TEXT[], + s.updated_field_formats, '{}'::TEXT[], + s.field_hashes, '{}'::TEXT[] FROM apis.shapes s LEFT JOIN apis.endpoints e ON e.hash = s.endpoint_hash WHERE s.project_id = ? AND s.hash = ? From dc38fc4968800aaa555d7fa37998f51f3b6da60a Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 11:22:38 +0000 Subject: [PATCH 45/71] handle all anomaly types in ui --- src/Pages/Anomalies.hs | 305 ++++++++++++++++++++++++++++++++++++++--- src/Pages/Reports.hs | 11 +- 2 files changed, 296 insertions(+), 20 deletions(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 9e4c37b11..f6ff73036 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -361,6 +361,169 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do div_ [class_ "bg-fillInformation-weak border border-strokeInformation-weak rounded-lg p-3 text-sm font-mono text-fillInformation-strong max-w-2xl overflow-x-auto"] $ toHtml alertData.queryExpression _ -> pass + Issues.NewEndpoint -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewEndpointData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Method" "" d.endpointMethod Nothing Nothing + statBox_ (Just pid) Nothing "Path" "" d.endpointPath Nothing Nothing + statBox_ (Just pid) Nothing "Host" "" d.endpointHost Nothing Nothing + timeStatBox_ "First Seen" $ prettyTimeAuto now d.firstSeenAt + _ -> pass + Issues.NewShape -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewShapeData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing + statBox_ (Just pid) Nothing "Status Code" "" (show d.statusCode) Nothing Nothing + statBox_ (Just pid) Nothing "New Fields" "" (show $ V.length d.newFields) Nothing Nothing + statBox_ (Just pid) Nothing "Deleted Fields" "" (show $ V.length d.deletedFields) Nothing Nothing + unless (V.null d.newFields) do + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "New Fields" + div_ [class_ "p-4"] do + ul_ [class_ "list-disc list-inside text-sm text-textWeak"] do + forM_ d.newFields $ \f -> li_ [] $ toHtml f + unless (V.null d.deletedFields) do + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textError"] "Deleted Fields" + div_ [class_ "p-4"] do + ul_ [class_ "list-disc list-inside text-sm text-textWeak"] do + forM_ d.deletedFields $ \f -> li_ [] $ toHtml f + _ -> pass + Issues.FieldChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.FieldChangeData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing + statBox_ (Just pid) Nothing "Field Path" "" d.keyPath Nothing Nothing + statBox_ (Just pid) Nothing "Change Type" "" d.changeType Nothing Nothing + statBox_ (Just pid) Nothing "New Type" "" d.newType Nothing Nothing + whenJust d.previousType $ \prev -> do + div_ [class_ "mb-4 p-4 bg-fillWeaker rounded-lg border border-strokeWeak"] do + span_ [class_ "text-sm text-textWeak"] "Previous type: " + span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml prev + _ -> pass + Issues.LogPattern -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Log Level" "" (fromMaybe "Unknown" d.logLevel) Nothing Nothing + statBox_ (Just pid) Nothing "Service" "" (fromMaybe "Unknown" d.serviceName) Nothing Nothing + statBox_ (Just pid) Nothing "Occurrences" "" (show d.occurrenceCount) Nothing Nothing + timeStatBox_ "First Seen" $ prettyTimeAuto now d.firstSeenAt + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "Log Pattern" + div_ [class_ "p-4"] do + pre_ [class_ "text-sm text-textWeak font-mono whitespace-pre-wrap"] $ toHtml d.logPattern + whenJust d.sampleMessage $ \msg -> do + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "Sample Message" + div_ [class_ "p-4"] do + pre_ [class_ "text-sm text-textWeak font-mono whitespace-pre-wrap"] $ toHtml msg + _ -> pass + Issues.ErrorEscalating -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorEscalatingData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Escalation Rate" "" (show d.escalationRate <> "x") Nothing Nothing + statBox_ (Just pid) Nothing "Window" "" d.escalationWindow Nothing Nothing + statBox_ (Just pid) Nothing "Last Hour" "" (show d.occurrences1h) Nothing Nothing + statBox_ (Just pid) Nothing "Last 24h" "" (show d.occurrences24h) Nothing Nothing + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "Error Details" + div_ [class_ "p-4 flex flex-col gap-2"] do + div_ [] do + span_ [class_ "text-sm text-textWeak"] "Exception: " + span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml d.exceptionType + div_ [] do + span_ [class_ "text-sm text-textWeak"] "Message: " + span_ [class_ "text-sm text-textStrong"] $ toHtml d.errorMessage + whenJust d.serviceName $ \svc -> do + div_ [] do + span_ [class_ "text-sm text-textWeak"] "Service: " + span_ [class_ "text-sm text-textStrong"] $ toHtml svc + _ -> pass + Issues.ErrorRegressed -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorRegressedData) -> do + let quietDays = d.quietPeriodMinutes `div` 1440 + quietHours = (d.quietPeriodMinutes `mod` 1440) `div` 60 + quietStr = if quietDays > 0 then show quietDays <> " days" else show quietHours <> " hours" + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Quiet Period" "" quietStr Nothing Nothing + statBox_ (Just pid) Nothing "Previous Occurrences" "" (show d.previousOccurrences) Nothing Nothing + statBox_ (Just pid) Nothing "New Occurrences" "" (show d.newOccurrences) Nothing Nothing + timeStatBox_ "Regressed At" $ prettyTimeAuto now d.regressedAt + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "Error Details" + div_ [class_ "p-4 flex flex-col gap-2"] do + div_ [] do + span_ [class_ "text-sm text-textWeak"] "Exception: " + span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml d.exceptionType + div_ [] do + span_ [class_ "text-sm text-textWeak"] "Message: " + span_ [class_ "text-sm text-textStrong"] $ toHtml d.errorMessage + _ -> pass + Issues.LogPatternRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternRateChangeData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Direction" "" d.changeDirection Nothing Nothing + statBox_ (Just pid) Nothing "Change" "" (show (round d.changePercent :: Int) <> "%") Nothing Nothing + statBox_ (Just pid) Nothing "Current Rate" "" (show (round d.currentRatePerHour :: Int) <> "/hr") Nothing Nothing + statBox_ (Just pid) Nothing "Baseline" "" (show (round d.baselineMean :: Int) <> "/hr") Nothing Nothing + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "Log Pattern" + div_ [class_ "p-4"] do + pre_ [class_ "text-sm text-textWeak font-mono whitespace-pre-wrap"] $ toHtml d.logPattern + _ -> pass + Issues.EndpointLatencyDegradation -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing + statBox_ (Just pid) Nothing "Percentile" "" d.percentile Nothing Nothing + statBox_ (Just pid) Nothing "Current Latency" "" (show (round d.currentLatencyMs :: Int) <> "ms") Nothing Nothing + statBox_ (Just pid) Nothing "Baseline" "" (show (round d.baselineLatencyMs :: Int) <> "ms") Nothing Nothing + div_ [class_ "p-4 bg-fillWarning-weak rounded-lg border border-strokeWarning-weak mb-4"] do + span_ [class_ "text-sm text-fillWarning-strong font-medium"] $ toHtml $ "Latency increased by " <> show (round d.degradationPercent :: Int) <> "% (z-score: " <> show (round d.zScore :: Int) <> ")" + _ -> pass + Issues.EndpointErrorRateSpike -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing + statBox_ (Just pid) Nothing "Error Rate" "" (show (round (d.currentErrorRate * 100) :: Int) <> "%") Nothing Nothing + statBox_ (Just pid) Nothing "Errors" "" (show d.errorCount <> "/" <> show d.totalRequests) Nothing Nothing + statBox_ (Just pid) Nothing "Baseline" "" (show (round (d.baselineErrorRate * 100) :: Int) <> "%") Nothing Nothing + unless (V.null d.topErrorTypes) do + div_ [class_ "surface-raised rounded-2xl overflow-hidden mb-4"] do + div_ [class_ "px-4 py-3 border-b border-strokeWeak"] do + span_ [class_ "text-sm font-medium text-textStrong"] "Top Error Types" + div_ [class_ "p-4"] do + ul_ [class_ "list-disc list-inside text-sm text-textWeak"] do + forM_ d.topErrorTypes $ \e -> li_ [class_ "font-mono"] $ toHtml e + _ -> pass + Issues.EndpointVolumeRateChange -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> do + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do + statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing + statBox_ (Just pid) Nothing "Direction" "" d.changeDirection Nothing Nothing + statBox_ (Just pid) Nothing "Current Rate" "" (show (round d.currentRatePerHour :: Int) <> "/hr") Nothing Nothing + statBox_ (Just pid) Nothing "Baseline" "" (show (round d.baselineRatePerHour :: Int) <> "/hr") Nothing Nothing + let alertClass = if d.changeDirection == "drop" then "bg-fillWarning-weak border-strokeWarning-weak text-fillWarning-strong" else "bg-fillInformation-weak border-strokeInformation-weak text-fillInformation-strong" + div_ [class_ $ "p-4 rounded-lg border mb-4 " <> alertClass] do + span_ [class_ "text-sm font-medium"] $ toHtml $ "Traffic " <> d.changeDirection <> " by " <> show (round (abs d.changePercent) :: Int) <> "%" + _ -> pass div_ [class_ "surface-raised rounded-2xl overflow-hidden", id_ "error-details-container"] do div_ [class_ "px-4 border-b border-b-strokeWeak flex items-center justify-between"] do @@ -538,6 +701,14 @@ aiChatPostH pid issueId form Just issue -> do errorM <- case issue.issueType of Issues.RuntimeException -> Anomalies.errorByHash pid issue.endpointHash + Issues.ErrorEscalating -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorEscalatingData) -> Anomalies.errorByHash pid d.errorHash + _ -> pure Nothing + Issues.ErrorRegressed -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorRegressedData) -> Anomalies.errorByHash pid d.errorHash + _ -> pure Nothing _ -> pure Nothing (traceDataM, spans) <- case errorM of @@ -553,7 +724,23 @@ aiChatPostH pid issueId form pure (Just trData, V.fromList spanRecs) Nothing -> pure (Nothing, V.empty) Nothing -> pure (Nothing, V.empty) - Nothing -> pure (Nothing, V.empty) + Nothing -> + -- For EndpointLatencyDegradation, try to fetch trace from sampleTraceIds + case issue.issueType of + Issues.EndpointLatencyDegradation -> + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> + case V.headM d.sampleTraceIds of + Just tId -> do + trM <- Telemetry.getTraceDetails pid tId Nothing now + case trM of + Just trData -> do + spanRecs <- Telemetry.getSpanRecordsByTraceId pid trData.traceId (Just trData.traceStartTime) now + pure (Just trData, V.fromList spanRecs) + Nothing -> pure (Nothing, V.empty) + Nothing -> pure (Nothing, V.empty) + _ -> pure (Nothing, V.empty) + _ -> pure (Nothing, V.empty) let context = buildAIContext issue errorM traceDataM spans anomalyContext = unlines [anomalySystemPrompt, "", "--- ISSUE CONTEXT ---", context] @@ -685,19 +872,7 @@ anomalyAIChat_ pid issueId = do $ toHtml txt -anomalyListGetH - :: Projects.ProjectId - -> Maybe Text - -> Maybe Text - -> Maybe Text - -> Maybe Text - -> Maybe Text - -> Maybe Text - -> Maybe Text - -> Maybe Endpoints.EndpointId - -> Maybe Text - -> Maybe Text - -> ATAuthCtx (RespHeaders AnomalyListGet) +anomalyListGetH :: Projects.ProjectId -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Endpoints.EndpointId -> Maybe Text -> Maybe Text -> ATAuthCtx (RespHeaders AnomalyListGet) anomalyListGetH pid layoutM filterTM sortM timeFilter pageM perPageM loadM endpointM hxRequestM hxBoostedM = do (sess, project) <- Sessions.sessionAndProject pid appCtx <- ask @AuthContext @@ -966,7 +1141,7 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue strong_ [class_ "text-textBrand"] $ toHtml $ show totalChanges " payloads affected" - -- Stack trace for runtime exceptions or Query for alerts + -- Issue-specific details case issue.issueType of Issues.RuntimeException -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> @@ -983,7 +1158,88 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue span_ [class_ "text-sm text-textWeak mb-2 block font-medium"] "Query:" div_ [class_ "bg-fillInformation-weak border border-strokeInformation-weak rounded-lg p-3 text-sm monospace text-fillInformation-strong max-w-2xl overflow-x-auto"] $ toHtml alertData.queryExpression _ -> pass - _ -> pass + Issues.NewEndpoint -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewEndpointData) -> + div_ [class_ "mb-4 p-3 bg-fillInformation-weak border border-strokeInformation-weak rounded-lg"] do + div_ [class_ "flex items-center gap-2 text-sm"] do + span_ [class_ "font-medium text-fillInformation-strong"] $ toHtml d.endpointMethod + span_ [class_ "text-fillInformation-strong"] $ toHtml d.endpointPath + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ "Host: " <> d.endpointHost + _ -> pass + Issues.NewShape -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewShapeData) -> do + div_ [class_ "mb-4"] do + div_ [class_ "flex items-center gap-2 text-sm mb-2"] do + span_ [class_ "font-medium text-textStrong"] $ toHtml d.endpointMethod + span_ [class_ "text-textWeak"] $ toHtml d.endpointPath + span_ [class_ "text-xs px-2 py-0.5 rounded bg-fillWeak"] $ toHtml $ "Status " <> show d.statusCode + unless (V.null d.newFields) do + div_ [class_ "text-xs text-textWeak"] $ toHtml $ "+" <> show (V.length d.newFields) <> " new fields" + unless (V.null d.deletedFields) do + div_ [class_ "text-xs text-textError"] $ toHtml $ "-" <> show (V.length d.deletedFields) <> " deleted fields" + _ -> pass + Issues.FieldChange -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.FieldChangeData) -> + div_ [class_ "mb-4 p-3 bg-fillWeak border border-strokeWeak rounded-lg"] do + div_ [class_ "text-sm"] do + span_ [class_ "text-textWeak"] "Field: " + span_ [class_ "font-mono text-textStrong"] $ toHtml d.keyPath + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ d.changeType <> " on " <> d.endpointMethod <> " " <> d.endpointPath + _ -> pass + Issues.LogPattern -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternData) -> + div_ [class_ "border border-strokeWeak rounded-lg group/lp mb-4"] do + label_ [class_ "text-sm text-textWeak font-semibold rounded-lg p-2 flex gap-2 items-center cursor-pointer"] do + faSprite_ "chevron-right" "regular" "h-3 w-3 group-has-[.lp-input:checked]/lp:rotate-90" + toHtml $ fromMaybe "LOG" d.logLevel <> " pattern (" <> show d.occurrenceCount <> " occurrences)" + input_ [class_ "lp-input w-0 h-0 opacity-0", type_ "checkbox"] + div_ [class_ "bg-fillWeak p-4 overflow-x-scroll hidden group-has-[.lp-input:checked]/lp:block text-sm monospace text-textStrong"] $ pre_ [class_ "whitespace-pre-wrap"] $ toHtml d.logPattern + _ -> pass + Issues.ErrorEscalating -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorEscalatingData) -> + div_ [class_ "mb-4 p-3 bg-fillError-weak border border-strokeError-weak rounded-lg"] do + div_ [class_ "text-sm text-fillError-strong font-medium"] $ toHtml d.exceptionType + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ show d.escalationRate <> "x increase in " <> d.escalationWindow + _ -> pass + Issues.ErrorRegressed -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.ErrorRegressedData) -> + div_ [class_ "mb-4 p-3 bg-fillError-weak border border-strokeError-weak rounded-lg"] do + div_ [class_ "text-sm text-fillError-strong font-medium"] $ toHtml d.exceptionType + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ "Regressed after " <> show (d.quietPeriodMinutes `div` 60) <> " hours quiet" + _ -> pass + Issues.LogPatternRateChange -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternRateChangeData) -> + div_ [class_ "border border-strokeWeak rounded-lg group/lpr mb-4"] do + label_ [class_ "text-sm text-textWeak font-semibold rounded-lg p-2 flex gap-2 items-center cursor-pointer"] do + faSprite_ "chevron-right" "regular" "h-3 w-3 group-has-[.lpr-input:checked]/lpr:rotate-90" + toHtml $ "Rate " <> d.changeDirection <> " (" <> show (round d.changePercent :: Int) <> "%)" + input_ [class_ "lpr-input w-0 h-0 opacity-0", type_ "checkbox"] + div_ [class_ "bg-fillWeak p-4 overflow-x-scroll hidden group-has-[.lpr-input:checked]/lpr:block text-sm monospace text-textStrong"] $ pre_ [class_ "whitespace-pre-wrap"] $ toHtml d.logPattern + _ -> pass + Issues.EndpointLatencyDegradation -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> + div_ [class_ "mb-4 p-3 bg-fillWarning-weak border border-strokeWarning-weak rounded-lg"] do + div_ [class_ "flex items-center gap-2 text-sm"] do + span_ [class_ "font-medium text-fillWarning-strong"] $ toHtml d.endpointMethod + span_ [class_ "text-fillWarning-strong"] $ toHtml d.endpointPath + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ d.percentile <> ": " <> show (round d.baselineLatencyMs :: Int) <> "ms → " <> show (round d.currentLatencyMs :: Int) <> "ms" + _ -> pass + Issues.EndpointErrorRateSpike -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> + div_ [class_ "mb-4 p-3 bg-fillError-weak border border-strokeError-weak rounded-lg"] do + div_ [class_ "flex items-center gap-2 text-sm"] do + span_ [class_ "font-medium text-fillError-strong"] $ toHtml d.endpointMethod + span_ [class_ "text-fillError-strong"] $ toHtml d.endpointPath + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ show (round (d.currentErrorRate * 100) :: Int) <> "% error rate (" <> show d.errorCount <> " errors)" + _ -> pass + Issues.EndpointVolumeRateChange -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> + div_ [class_ $ "mb-4 p-3 rounded-lg border " <> if d.changeDirection == "drop" then "bg-fillWarning-weak border-strokeWarning-weak" else "bg-fillInformation-weak border-strokeInformation-weak"] do + div_ [class_ "flex items-center gap-2 text-sm"] do + span_ [class_ "font-medium text-textStrong"] $ toHtml d.endpointMethod + span_ [class_ "text-textStrong"] $ toHtml d.endpointPath + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ "Traffic " <> d.changeDirection <> ": " <> show (round d.changePercent :: Int) <> "%" + _ -> pass -- Recommended action div_ [class_ "border-l-4 border-strokeBrand pl-4 mb-4"] $ p_ [class_ "text-sm text-textStrong leading-relaxed"] $ toHtml issue.recommendedAction @@ -1187,7 +1443,18 @@ issueTypeBadge issueType critical = badge cls icon txt (cls, icon, txt) = case issueType of Issues.RuntimeException -> ("bg-fillError-strong", "triangle-alert", "ERROR") Issues.QueryAlert -> ("bg-fillWarning-strong", "zap", "ALERT") - _ - | critical -> ("bg-fillError-strong", "exclamation-triangle", "BREAKING") - | otherwise -> ("bg-fillInformation-strong", "info", "Incremental") + Issues.NewEndpoint -> ("bg-fillInformation-strong", "plus-circle", "NEW ENDPOINT") + Issues.NewShape + | critical -> ("bg-fillError-strong", "exclamation-triangle", "BREAKING CHANGE") + | otherwise -> ("bg-fillInformation-strong", "shapes", "NEW SHAPE") + Issues.FieldChange + | critical -> ("bg-fillError-strong", "exclamation-triangle", "BREAKING CHANGE") + | otherwise -> ("bg-fillWarning-strong", "pen-to-square", "FIELD CHANGE") + Issues.LogPattern -> ("bg-fillInformation-strong", "file-lines", "LOG PATTERN") + Issues.ErrorEscalating -> ("bg-fillError-strong", "arrow-trend-up", "ESCALATING") + Issues.ErrorRegressed -> ("bg-fillError-strong", "rotate-left", "REGRESSED") + Issues.LogPatternRateChange -> ("bg-fillWarning-strong", "chart-line", "RATE CHANGE") + Issues.EndpointLatencyDegradation -> ("bg-fillWarning-strong", "clock", "LATENCY") + Issues.EndpointErrorRateSpike -> ("bg-fillError-strong", "chart-line-up", "ERROR SPIKE") + Issues.EndpointVolumeRateChange -> ("bg-fillWarning-strong", "arrows-up-down", "TRAFFIC") badge c i t = span_ [class_ $ "badge " <> c] do faSprite_ i "regular" "w-3 h-3"; t diff --git a/src/Pages/Reports.hs b/src/Pages/Reports.hs index 7731f4f72..6eebd585c 100644 --- a/src/Pages/Reports.hs +++ b/src/Pages/Reports.hs @@ -288,7 +288,16 @@ singleReportPage pid report = let titleCls = case iss.issueType of Issues.RuntimeException -> "text-textError" Issues.QueryAlert -> "text-yellow-500" - _ -> "text-textBrand-strong" + Issues.NewEndpoint -> "text-textBrand-strong" + Issues.NewShape -> "text-textBrand-strong" + Issues.FieldChange -> "text-yellow-500" + Issues.LogPattern -> "text-textBrand-strong" + Issues.ErrorEscalating -> "text-textError" + Issues.ErrorRegressed -> "text-textError" + Issues.LogPatternRateChange -> "text-yellow-500" + Issues.EndpointLatencyDegradation -> "text-yellow-500" + Issues.EndpointErrorRateSpike -> "text-textError" + Issues.EndpointVolumeRateChange -> "text-yellow-500" span_ [class_ $ "text-sm font-medium " <> titleCls] $ toHtml iss.title -- span_ [] $ toHtml iss.severity From c102eebf8177ec7ea940275bfac880ea6f08d1e1 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 11:41:43 +0000 Subject: [PATCH 46/71] fix issues seelct --- src/Models/Apis/Issues.hs | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index bac5b7ed8..473f5bffe 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -536,24 +536,20 @@ insertIssue issue = void $ PG.execute q issue q = [sql| INSERT INTO apis.issues ( - id, created_at, updated_at, project_id, issue_type, endpoint_hash, + id, created_at, updated_at, project_id, issue_type, source_type, target_hash, endpoint_hash, acknowledged_at, acknowledged_by, archived_at, - title, service, critical, severity, - affected_requests, affected_clients, error_rate, + title, service, environment, critical, severity, recommended_action, migration_complexity, issue_data, request_payloads, response_payloads, llm_enhanced_at, llm_enhancement_version ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) -ON CONFLICT (project_id, endpoint_hash) - WHERE issue_type = 'api_change' - AND acknowledged_at IS NULL +ON CONFLICT (project_id, target_hash) + WHERE acknowledged_at IS NULL AND archived_at IS NULL - AND endpoint_hash != '' + AND target_hash != '' DO UPDATE SET updated_at = EXCLUDED.updated_at, - affected_requests = issues.affected_requests + EXCLUDED.affected_requests, - affected_clients = GREATEST(issues.affected_clients, EXCLUDED.affected_clients), - issue_data = issues.issue_data || EXCLUDED.issue_data + issue_data = EXCLUDED.issue_data |] @@ -587,7 +583,7 @@ selectIssues pid _typeM isAcknowledged isArchived limit offset timeRangeM sortM q = [text| SELECT id, created_at, updated_at, project_id, issue_type::text, endpoint_hash, acknowledged_at, acknowledged_by, archived_at, title, service, critical, - CASE WHEN critical THEN 'critical' ELSE 'info' END, affected_requests, affected_clients, NULL::double precision, + CASE WHEN critical THEN 'critical' ELSE 'info' END, 0::int, 0::int, NULL::double precision, recommended_action, migration_complexity, issue_data, request_payloads, response_payloads, NULL::timestamp with time zone, NULL::int, 0::bigint, updated_at FROM apis.issues WHERE project_id = ? $timefilter $ackF $archF $orderBy LIMIT ? OFFSET ? |] @@ -633,9 +629,8 @@ updateIssueWithNewAnomaly issueId newData = void $ PG.execute q (Aeson newData, q = [sql| UPDATE apis.issues - SET + SET issue_data = issue_data || ?::jsonb, - affected_requests = affected_requests + 1, updated_at = NOW() WHERE id = ? |] From ac3ac104642edb9bdc4a071e14e0e72f61985634 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 11:42:03 +0000 Subject: [PATCH 47/71] update proto-lense fork --- cabal.project | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cabal.project b/cabal.project index 361bb2cf5..9eb59cf2c 100644 --- a/cabal.project +++ b/cabal.project @@ -39,8 +39,8 @@ source-repository-package source-repository-package type: git - location: https://github.com/tonyalaribe/proto-lens - tag: da3a3c7d8f43b7b22a3325a6706eb2aad98f41be + location: https://github.com/dawkaka/proto-lens + tag: cf4e060ea3376b68b65a9e207115f55b38cf1a12 subdir: proto-lens-setup subdir: proto-lens-protoc subdir: proto-lens-runtime From 6d7bb6f41e1f49c99f880eb1c332e8eb191656f4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 14 Jan 2026 11:42:40 +0000 Subject: [PATCH 48/71] Auto-format code with fourmolu --- src/Models/Apis/Issues/Enhancement.hs | 49 +++++++++++++++++++++++---- 1 file changed, 42 insertions(+), 7 deletions(-) diff --git a/src/Models/Apis/Issues/Enhancement.hs b/src/Models/Apis/Issues/Enhancement.hs index 0862c58f6..7fc0eefac 100644 --- a/src/Models/Apis/Issues/Enhancement.hs +++ b/src/Models/Apis/Issues/Enhancement.hs @@ -375,7 +375,13 @@ simpleDescription issue = case issue.issueType of | modifiedCount > 0 = "medium" | otherwise = "low" in Just - ( "New response shape detected on " <> d.endpointMethod <> " " <> d.endpointPath <> " (status " <> toText (show d.statusCode) <> "). " + ( "New response shape detected on " + <> d.endpointMethod + <> " " + <> d.endpointPath + <> " (status " + <> toText (show d.statusCode) + <> "). " <> "New fields: " <> toText (show newCount) <> ", deleted fields: " @@ -420,7 +426,11 @@ simpleDescription issue = case issue.issueType of AE.Success (d :: Issues.ErrorEscalatingData) -> let svc = fromMaybe "unknown service" d.serviceName in Just - ( "Error '" <> T.take 100 d.exceptionType <> "' is escalating in " <> svc <> ". " + ( "Error '" + <> T.take 100 d.exceptionType + <> "' is escalating in " + <> svc + <> ". " <> "Rate increased " <> toText (show d.escalationRate) <> "x over the last " @@ -445,7 +455,11 @@ simpleDescription issue = case issue.issueType of | quietDays > 0 = toText (show quietDays) <> " days" | otherwise = toText (show quietHours) <> " hours" in Just - ( "Previously resolved error '" <> T.take 100 d.exceptionType <> "' has regressed in " <> svc <> ". " + ( "Previously resolved error '" + <> T.take 100 d.exceptionType + <> "' has regressed in " + <> svc + <> ". " <> "It was quiet for " <> quietStr <> " before reappearing. " @@ -463,7 +477,11 @@ simpleDescription issue = case issue.issueType of direction = if d.changeDirection == "spike" then "spiked" else "dropped" complexity = if d.changeDirection == "spike" then "medium" else "low" in Just - ( "Log pattern rate " <> direction <> " in " <> svc <> ". " + ( "Log pattern rate " + <> direction + <> " in " + <> svc + <> ". " <> "Current rate: " <> toText (show (round d.currentRatePerHour :: Int)) <> "/hour (baseline: " @@ -483,7 +501,12 @@ simpleDescription issue = case issue.issueType of AE.Success (d :: Issues.EndpointLatencyDegradationData) -> let svc = maybe "" (" in " <>) d.serviceName in Just - ( "Latency degradation detected on " <> d.endpointMethod <> " " <> d.endpointPath <> svc <> ". " + ( "Latency degradation detected on " + <> d.endpointMethod + <> " " + <> d.endpointPath + <> svc + <> ". " <> d.percentile <> " latency increased from " <> toText (show (round d.baselineLatencyMs :: Int)) @@ -501,7 +524,12 @@ simpleDescription issue = case issue.issueType of AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> let svc = maybe "" (" in " <>) d.serviceName in Just - ( "Error rate spike on " <> d.endpointMethod <> " " <> d.endpointPath <> svc <> ". " + ( "Error rate spike on " + <> d.endpointMethod + <> " " + <> d.endpointPath + <> svc + <> ". " <> "Current error rate: " <> toText (show (round (d.currentErrorRate * 100) :: Int)) <> "% (" @@ -523,7 +551,14 @@ simpleDescription issue = case issue.issueType of direction = if d.changeDirection == "spike" then "spiked" else "dropped" complexity = if d.changeDirection == "spike" then "medium" else "medium" in Just - ( "Traffic volume " <> direction <> " on " <> d.endpointMethod <> " " <> d.endpointPath <> svc <> ". " + ( "Traffic volume " + <> direction + <> " on " + <> d.endpointMethod + <> " " + <> d.endpointPath + <> svc + <> ". " <> "Current rate: " <> toText (show (round d.currentRatePerHour :: Int)) <> " req/hour (baseline: " From 6bdba4379a0e9385df094403d06c01df26d57935 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 17:14:53 +0000 Subject: [PATCH 49/71] error insert bug fixes --- src/BackgroundJobs.hs | 2 +- src/Models/Apis/Errors.hs | 44 +++++++++++-------- src/Models/Apis/Issues.hs | 26 +++++------ src/Models/Apis/Issues/Enhancement.hs | 8 ++-- src/Pages/Anomalies.hs | 9 ++-- .../migrations/0030_error_events_trigger.sql | 4 +- static/migrations/0032_drop_issue_index.sql | 7 +++ 7 files changed, 55 insertions(+), 45 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index e1b45589e..1200a52c8 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1556,7 +1556,7 @@ detectErrorSpikes pid authCtx = do isSpike = zScore > 3.0 && currentRate > mean + 5 Relude.when isSpike $ do - Log.logInfo "Error spike detected" (errRate.errorId, errRate.exceptionType, currentRate, mean, zScore) + Log.logInfo "Error spike detected" (errRate.errorId, errRate.errorType, currentRate, mean, zScore) -- Get full error record for issue creation errorM <- Errors.getErrorById errRate.errorId diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index cf6164d8f..00ab07814 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -122,15 +122,16 @@ data Error = Error , projectId :: Projects.ProjectId , createdAt :: ZonedTime , updatedAt :: ZonedTime - , exceptionType :: Text + , errorType :: Text , message :: Text , stacktrace :: Text , hash :: Text - , environment :: Text + , environment :: Maybe Text , service :: Maybe Text , runtime :: Maybe Text , errorData :: ATError - , representativeMessage :: Maybe Text + , firstTraceId :: Maybe Text + , recentTraceId :: Maybe Text , firstEventId :: Maybe ErrorEventId , lastEventId :: Maybe ErrorEventId , state :: ErrorState @@ -199,7 +200,7 @@ data ErrorEvent = ErrorEvent , errorId :: ErrorId , occurredAt :: ZonedTime , targetHash :: Text - , exceptionType :: Text + , errorType :: Text , message :: Text , stackTrace :: Text , serviceName :: Text @@ -234,9 +235,9 @@ getErrors pid mstate limit offset = PG.query q (pid, maybe "%" errorStateToText q = [sql| SELECT id, project_id, created_at, updated_at, - exception_type, message, stacktrace, hash, + error_type, message, stacktrace, hash, environment, service, runtime, error_data, - representative_message, first_event_id, last_event_id, + first_trace_id, recent_trace_id, first_event_id, last_event_id, state, assignee_id, assigned_at, resolved_at, regressed_at, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, quiet_minutes, resolution_threshold_minutes, @@ -259,9 +260,9 @@ getErrorById eid = do q = [sql| SELECT id, project_id, created_at, updated_at, - exception_type, message, stacktrace, hash, + error_type, message, stacktrace, hash, environment, service, runtime, error_data, - representative_message, first_event_id, last_event_id, + first_trace_id, recent_trace_id, first_event_id, last_event_id, state, assignee_id, assigned_at, resolved_at, regressed_at, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, quiet_minutes, resolution_threshold_minutes, @@ -282,9 +283,9 @@ getErrorByHash pid hash = do q = [sql| SELECT id, project_id, created_at, updated_at, - exception_type, message, stacktrace, hash, + error_type, message, stacktrace, hash, environment, service, runtime, error_data, - representative_message, first_event_id, last_event_id, + first_trace_id, recent_trace_id, first_event_id, last_event_id, state, assignee_id, assigned_at, resolved_at, regressed_at, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, quiet_minutes, resolution_threshold_minutes, @@ -303,9 +304,9 @@ getActiveErrors pid = PG.query q (Only pid) q = [sql| SELECT id, project_id, created_at, updated_at, - exception_type, message, stacktrace, hash, + error_type, message, stacktrace, hash, environment, service, runtime, error_data, - representative_message, first_event_id, last_event_id, + first_trace_id, recent_trace_id, first_event_id, last_event_id, state, assignee_id, assigned_at, resolved_at, regressed_at, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, quiet_minutes, resolution_threshold_minutes, @@ -480,7 +481,7 @@ checkErrorSpike err = do data ErrorWithCurrentRate = ErrorWithCurrentRate { errorId :: ErrorId , projectId :: Projects.ProjectId - , exceptionType :: Text + , errorType :: Text , message :: Text , service :: Maybe Text , baselineState :: BaselineState @@ -501,7 +502,7 @@ getErrorsWithCurrentRates pid = SELECT e.id, e.project_id, - e.exception_type, + e.error_type, e.message, e.service, e.baseline_state, @@ -528,12 +529,15 @@ upsertErrorQueryAndParam pid err = (q, params) q = [sql| INSERT INTO apis.errors ( - project_id, exception_type, message, stacktrace, hash, - environment, service, runtime, error_data, occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 1, 1, 1, 1) - ON CONFLICT (hash) DO UPDATE SET + project_id, error_type, message, stacktrace, hash, + environment, service, runtime, error_data, + first_trace_id, recent_trace_id, + occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1, 1, 1, 1) + ON CONFLICT (project_id, hash) DO UPDATE SET updated_at = NOW(), - representative_message = EXCLUDED.message, + message = EXCLUDED.message, + recent_trace_id = EXCLUDED.recent_trace_id, occurrences_1m = apis.errors.occurrences_1m + 1, occurrences_5m = apis.errors.occurrences_5m + 1, occurrences_1h = apis.errors.occurrences_1h + 1, @@ -558,6 +562,8 @@ upsertErrorQueryAndParam pid err = (q, params) , MkDBField err.serviceName , MkDBField err.runtime , MkDBField err + , MkDBField err.traceId + , MkDBField err.traceId ] diff --git a/src/Models/Apis/Issues.hs b/src/Models/Apis/Issues.hs index 473f5bffe..3601ce8ea 100644 --- a/src/Models/Apis/Issues.hs +++ b/src/Models/Apis/Issues.hs @@ -258,7 +258,7 @@ data LogPatternData = LogPatternData -- | Error Escalating issue data (error rate increasing over time) data ErrorEscalatingData = ErrorEscalatingData { errorHash :: Text - , exceptionType :: Text + , errorType :: Text , errorMessage :: Text , serviceName :: Maybe Text , currentState :: Text -- "escalating" @@ -279,7 +279,7 @@ data ErrorEscalatingData = ErrorEscalatingData -- | Error Regressed issue data (previously resolved error returned) data ErrorRegressedData = ErrorRegressedData { errorHash :: Text - , exceptionType :: Text + , errorType :: Text , errorMessage :: Text , serviceName :: Maybe Text , resolvedAt :: UTCTime -- when it was previously resolved @@ -543,10 +543,8 @@ INSERT INTO apis.issues ( issue_data, request_payloads, response_payloads, llm_enhanced_at, llm_enhancement_version ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) -ON CONFLICT (project_id, target_hash) - WHERE acknowledged_at IS NULL - AND archived_at IS NULL - AND target_hash != '' +ON CONFLICT (project_id, target_hash, issue_type) + WHERE acknowledged_at IS NULL AND archived_at IS NULL DO UPDATE SET updated_at = EXCLUDED.updated_at, issue_data = EXCLUDED.issue_data @@ -764,7 +762,7 @@ createNewErrorIssue projectId err = do now <- getCurrentTime let exceptionData = RuntimeExceptionData - { errorType = err.exceptionType + { errorType = err.errorType , errorMessage = err.message , stackTrace = err.stacktrace , requestPath = Nothing @@ -781,7 +779,7 @@ createNewErrorIssue projectId err = do err.service True "critical" - ("New Error: " <> err.exceptionType <> " - " <> T.take 80 err.message) + ("New Error: " <> err.errorType <> " - " <> T.take 80 err.message) "Investigate the new error and implement a fix." "n/a" exceptionData @@ -795,7 +793,7 @@ createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do increasePercent = if baselineMean > 0 then ((currentRate / baselineMean) - 1) * 100 else 0 exceptionData = RuntimeExceptionData - { errorType = err.exceptionType + { errorType = err.errorType , errorMessage = err.message , stackTrace = err.stacktrace , requestPath = Nothing @@ -812,7 +810,7 @@ createErrorSpikeIssue projectId err currentRate baselineMean baselineStddev = do err.service True "critical" - ("Error Spike: " <> err.exceptionType <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)") + ("Error Spike: " <> err.errorType <> " (" <> T.pack (show (round increasePercent :: Int)) <> "% increase)") ("Error rate has spiked " <> T.pack (show (round zScore :: Int)) <> " standard deviations above baseline. Current: " <> T.pack (show (round currentRate :: Int)) <> "/hr, Baseline: " <> T.pack (show (round baselineMean :: Int)) <> "/hr. Investigate recent deployments or changes.") "n/a" exceptionData @@ -857,7 +855,7 @@ createErrorEscalatingIssue projectId err prevState escalationRate escalationWind let escalatingData = ErrorEscalatingData { errorHash = err.hash - , exceptionType = err.exceptionType + , errorType = err.errorType , errorMessage = err.message , serviceName = err.service , currentState = "escalating" @@ -877,7 +875,7 @@ createErrorEscalatingIssue projectId err prevState escalationRate escalationWind err.service True "critical" - ("Error Escalating: " <> err.exceptionType <> " (" <> T.pack (show (round (escalationRate * 100) :: Int)) <> "% increase)") + ("Error Escalating: " <> err.errorType <> " (" <> T.pack (show (round (escalationRate * 100) :: Int)) <> "% increase)") ("Error rate is escalating (" <> T.pack (show escalationRate) <> "x over " <> escalationWindow <> "). Investigate immediately.") "n/a" escalatingData @@ -890,7 +888,7 @@ createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences let regressedData = ErrorRegressedData { errorHash = err.hash - , exceptionType = err.exceptionType + , errorType = err.errorType , errorMessage = err.message , serviceName = err.service , resolvedAt = resolvedAtTime @@ -907,7 +905,7 @@ createErrorRegressedIssue projectId err resolvedAtTime quietMins prevOccurrences err.service True "critical" - ("Error Regressed: " <> err.exceptionType <> " (after " <> T.pack (show quietMins) <> " min quiet)") + ("Error Regressed: " <> err.errorType <> " (after " <> T.pack (show quietMins) <> " min quiet)") ("Previously resolved error has returned after " <> T.pack (show quietMins) <> " minutes. The original fix may be incomplete.") "n/a" regressedData diff --git a/src/Models/Apis/Issues/Enhancement.hs b/src/Models/Apis/Issues/Enhancement.hs index 7fc0eefac..8feeb4785 100644 --- a/src/Models/Apis/Issues/Enhancement.hs +++ b/src/Models/Apis/Issues/Enhancement.hs @@ -317,12 +317,12 @@ simpleTitle issue = case issue.issueType of Issues.ErrorEscalating -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.ErrorEscalatingData) -> - Just $ "Error escalating: " <> T.take 60 d.exceptionType <> " (" <> toText (show d.escalationRate) <> "x in " <> d.escalationWindow <> ")" + Just $ "Error escalating: " <> T.take 60 d.errorType <> " (" <> toText (show d.escalationRate) <> "x in " <> d.escalationWindow <> ")" _ -> Just "Error rate escalating" Issues.ErrorRegressed -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.ErrorRegressedData) -> - Just $ "Error regressed: " <> T.take 80 d.exceptionType + Just $ "Error regressed: " <> T.take 80 d.errorType _ -> Just "Previously resolved error has regressed" Issues.LogPatternRateChange -> case AE.fromJSON (getAeson issue.issueData) of @@ -427,7 +427,7 @@ simpleDescription issue = case issue.issueType of let svc = fromMaybe "unknown service" d.serviceName in Just ( "Error '" - <> T.take 100 d.exceptionType + <> T.take 100 d.errorType <> "' is escalating in " <> svc <> ". " @@ -456,7 +456,7 @@ simpleDescription issue = case issue.issueType of | otherwise = toText (show quietHours) <> " hours" in Just ( "Previously resolved error '" - <> T.take 100 d.exceptionType + <> T.take 100 d.errorType <> "' has regressed in " <> svc <> ". " diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index f6ff73036..397caab49 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -334,7 +334,6 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do div_ [] do span_ [class_ "text-xs text-textWeak"] "First seen:" span_ [class_ "ml-2 text-xs"] $ toHtml $ compactTimeAgo $ toText $ prettyTimeAuto now (zonedTimeToUTC err.createdAt) - div_ [class_ "flex items-center gap-2"] do faSprite_ "calendar" "regular" "w-3 h-3" div_ [] do @@ -440,7 +439,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do div_ [class_ "p-4 flex flex-col gap-2"] do div_ [] do span_ [class_ "text-sm text-textWeak"] "Exception: " - span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml d.exceptionType + span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml d.errorType div_ [] do span_ [class_ "text-sm text-textWeak"] "Message: " span_ [class_ "text-sm text-textStrong"] $ toHtml d.errorMessage @@ -466,7 +465,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do div_ [class_ "p-4 flex flex-col gap-2"] do div_ [] do span_ [class_ "text-sm text-textWeak"] "Exception: " - span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml d.exceptionType + span_ [class_ "text-sm font-mono text-textStrong"] $ toHtml d.errorType div_ [] do span_ [class_ "text-sm text-textWeak"] "Message: " span_ [class_ "text-sm text-textStrong"] $ toHtml d.errorMessage @@ -1198,13 +1197,13 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue Issues.ErrorEscalating -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.ErrorEscalatingData) -> div_ [class_ "mb-4 p-3 bg-fillError-weak border border-strokeError-weak rounded-lg"] do - div_ [class_ "text-sm text-fillError-strong font-medium"] $ toHtml d.exceptionType + div_ [class_ "text-sm text-fillError-strong font-medium"] $ toHtml d.errorType div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ show d.escalationRate <> "x increase in " <> d.escalationWindow _ -> pass Issues.ErrorRegressed -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.ErrorRegressedData) -> div_ [class_ "mb-4 p-3 bg-fillError-weak border border-strokeError-weak rounded-lg"] do - div_ [class_ "text-sm text-fillError-strong font-medium"] $ toHtml d.exceptionType + div_ [class_ "text-sm text-fillError-strong font-medium"] $ toHtml d.errorType div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ "Regressed after " <> show (d.quietPeriodMinutes `div` 60) <> " hours quiet" _ -> pass Issues.LogPatternRateChange -> case AE.fromJSON (getAeson issue.issueData) of diff --git a/static/migrations/0030_error_events_trigger.sql b/static/migrations/0030_error_events_trigger.sql index 2e3805b7f..86b55f3b1 100644 --- a/static/migrations/0030_error_events_trigger.sql +++ b/static/migrations/0030_error_events_trigger.sql @@ -33,8 +33,8 @@ BEGIN NEW.project_id, COALESCE((NEW.error_data->>'when')::timestamptz, NOW()), NEW.hash, - COALESCE(NEW.error_data->>'root_error_type', NEW.error_data->>'error_type', NEW.exception_type), - COALESCE(NEW.error_data->>'root_error_message', NEW.error_data->>'message', NEW.message), + COALESCE(NEW.error_data->>'root_exception_type', NEW.error_data->>'error_type', NEW.error_type), + COALESCE(NEW.error_data->>'root_exception_message', NEW.error_data->>'message', NEW.message), COALESCE(NEW.error_data->>'stack_trace', NEW.stacktrace), COALESCE(NEW.error_data->>'service_name', NEW.service, 'unknown'), COALESCE(NEW.error_data->>'environment', NEW.environment), diff --git a/static/migrations/0032_drop_issue_index.sql b/static/migrations/0032_drop_issue_index.sql index 1404b9af4..847bbb59b 100644 --- a/static/migrations/0032_drop_issue_index.sql +++ b/static/migrations/0032_drop_issue_index.sql @@ -1,4 +1,11 @@ BEGIN; DROP INDEX IF EXISTS apis.idx_issues_unresolved; DROP INDEX IF EXISTS apis.idx_issues_unique_open; + CREATE UNIQUE INDEX issues_project_target_type_open_idx + ON apis.issues (project_id, target_hash, issue_type) + WHERE acknowledged_at IS NULL AND archived_at IS NULL; + ALTER TABLE apis.errors RENAME COLUMN exception_type TO error_type; + ALTER TABLE apis.errors + ADD COLUMN first_trace_id TEXT, + ADD COLUMN recent_trace_id TEXT; COMMIT; \ No newline at end of file From 0042692a440bf62341dcb02950f55a9eff15b434 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 17:38:02 +0000 Subject: [PATCH 50/71] fix anomaly trace view --- src/Pages/Anomalies.hs | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 397caab49..c0d81c4b0 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -257,7 +257,7 @@ anomalyDetailPage :: Projects.ProjectId -> Issues.Issue -> Maybe Telemetry.Trace anomalyDetailPage pid issue tr otellogs errM now isFirst = do let spanRecs = V.catMaybes $ Telemetry.convertOtelLogsAndSpansToSpanRecord <$> otellogs issueId = UUID.toText issue.id.unUUIDId - div_ [class_ "pt-8 mx-auto px-4 w-full flex flex-col gap-4 h-full overflow-auto pb-32"] do + div_ [class_ "pt-8 mx-auto px-4 w-full flex flex-col gap-4 overflow-auto pb-32"] do -- Header div_ [class_ "flex flex-col gap-3"] do div_ [class_ "flex gap-2 flex-wrap items-center"] do @@ -268,16 +268,6 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do _ -> pass h3_ [class_ "text-textStrong text-2xl font-semibold"] $ toHtml issue.title p_ [class_ "text-sm text-textWeak max-w-3xl"] $ toHtml issue.recommendedAction - - -- -- Metrics & Timeline Row (8-column grid: 4 stats + chart) - -- div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do - -- -- Stats (1 column each) - -- statBox_ (Just pid) Nothing "Affected Requests" "" (show issue.affectedRequests) Nothing Nothing - -- statBox_ (Just pid) Nothing "Affected Clients" "" (show issue.affectedClients) Nothing Nothing - -- whenJust errM $ \err -> do - -- timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt - -- timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.updatedAt - -- Timeline (4 columns) let widget = div_ [class_ "col-span-4"] $ Widget.widget_ @@ -524,7 +514,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do span_ [class_ "text-sm font-medium"] $ toHtml $ "Traffic " <> d.changeDirection <> " by " <> show (round (abs d.changePercent) :: Int) <> "%" _ -> pass - div_ [class_ "surface-raised rounded-2xl overflow-hidden", id_ "error-details-container"] do + div_ [class_ "surface-raised h-max rounded-2xl overflow-hidden", id_ "error-details-container"] do div_ [class_ "px-4 border-b border-b-strokeWeak flex items-center justify-between"] do div_ [class_ "flex items-center gap-2"] do faSprite_ "magnifying-glass-chart" "regular" "w-4 h-4 text-iconNeutral" From a9c8d7970711e72b1fea2198dfe86309f2262775 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Wed, 14 Jan 2026 22:45:16 +0000 Subject: [PATCH 51/71] log pattern extraction improvements --- src/BackgroundJobs.hs | 35 +++++------ src/Models/Apis/Endpoints.hs | 4 +- src/Models/Apis/Errors.hs | 80 +++++++++++++++++++++++ src/Pages/Anomalies.hs | 119 +++++++++++++++++++++++++++++++---- src/Pkg/Drain.hs | 35 ++++++----- src/Web/Routes.hs | 7 +++ 6 files changed, 232 insertions(+), 48 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 1200a52c8..8e6201bac 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -524,59 +524,56 @@ logsPatternExtraction scheduledTime pid = do limitVal = 250 paginate :: Int -> UTCTime -> ATBackgroundCtx () paginate offset startTime = do - otelEvents <- PG.query [sql| SELECT kind, id::text, coalesce(body::text,''), coalesce(summary::text,'') FROM otel_logs_and_spans WHERE project_id = ? AND timestamp >= ? AND timestamp < ? AND (summary_pattern IS NULL OR log_pattern IS NULL) OFFSET ? LIMIT ?|] (pid, startTime, scheduledTime, offset, limitVal) + otelEvents <- PG.query [sql| SELECT kind, id::text, coalesce(body::text,''), coalesce(summary::text,''), context___trace_id, resource___service___name FROM otel_logs_and_spans WHERE project_id = ? AND timestamp >= ? AND timestamp < ? AND (summary_pattern IS NULL OR log_pattern IS NULL) OFFSET ? LIMIT ?|] (pid, startTime, scheduledTime, offset, limitVal) unless (null otelEvents) do Log.logInfo "Fetching events for pattern extraction" ("offset", AE.toJSON offset, "count", AE.toJSON (length otelEvents)) - let (logs, summaries) = L.partition (\(k, _, _, _) -> k == "log") otelEvents - processPatterns "log" "log_pattern" (V.fromList [(i, body) | (_, i, body, _) <- logs]) pid scheduledTime startTime - processPatterns "summary" "summary_pattern" (V.fromList [(i, s) | (_, i, _, s) <- summaries]) pid scheduledTime startTime + let (logs, summaries) = L.partition (\(k, _, _, _, _, _) -> k == "log") otelEvents + processPatterns "log" "log_pattern" (V.fromList [(i, body) | (_, i, body, _, trId, serviceName) <- logs]) pid scheduledTime startTime + processPatterns "summary" "summary_pattern" (V.fromList [(i, s) | (_, i, _, s, _, _) <- summaries]) pid scheduledTime startTime Log.logInfo "Completed events pattern extraction for page" ("offset", AE.toJSON offset) Relude.when (length otelEvents == limitVal) $ paginate (offset + limitVal) startTime -- | Generic pattern extraction for logs or summaries -processPatterns :: Text -> Text -> V.Vector (Text, Text) -> Projects.ProjectId -> UTCTime -> UTCTime -> ATBackgroundCtx () +processPatterns :: Text -> Text -> V.Vector (Text, Text, Text, Text) -> Projects.ProjectId -> UTCTime -> UTCTime -> ATBackgroundCtx () processPatterns kind fieldName events pid scheduledTime since = do Relude.when (not $ V.null events) $ do let qq = [text| select $fieldName from otel_logs_and_spans where project_id= ? AND timestamp >= now() - interval '1 hour' and $fieldName is not null GROUP BY $fieldName ORDER BY count(*) desc limit 20|] - existingPatterns <- if kind == "summary" then coerce @[Only Text] @[Text] <$> PG.query (Query $ encodeUtf8 qq) pid else LogPatterns.getLogPatternTexts pid - let known = V.fromList $ map ("",) existingPatterns - combined = known <> events + existingPatterns <- LogPatterns.getLogPatternTexts pid + let known = V.fromList $ map ("",False,) existingPatterns + combined = known <> ((\(dd,smp) -> (dd, True, smp)) <$> events) drainTree = processBatch (kind == "summary") combined scheduledTime Drain.emptyDrainTree newPatterns = Drain.getAllLogGroups drainTree -- Only log if patterns were extracted Relude.when (V.length newPatterns > 0) $ Log.logInfo ("Extracted " <> kind <> " patterns") ("count", AE.toJSON $ V.length newPatterns) - forM_ newPatterns \(patternTxt, ids) -> do + forM_ newPatterns \(sampleMsg, patternTxt, ids) -> do let q = [text|UPDATE otel_logs_and_spans SET $fieldName = ? WHERE project_id = ? AND timestamp > ? AND id::text = ANY(?)|] unless (V.null ids) $ do -- Update otel_logs_and_spans with pattern void $ PG.execute (Query $ encodeUtf8 q) (patternTxt, pid, since, V.filter (/= "") ids) - Relude.when (kind == "log" && not (T.null patternTxt)) $ do let patternHash = toXXHash patternTxt - sampleMsg = case V.find (/= "") (V.map snd events) of - Just msg -> Just (T.take 500 msg) - Nothing -> Nothing - void $ LogPatterns.upsertLogPattern pid patternTxt patternHash Nothing Nothing sampleMsg + traceShowM ("Upserting log pattern" , pid, patternTxt, patternHash, sampleMsg) + void $ LogPatterns.upsertLogPattern pid patternTxt patternHash Nothing Nothing (Just sampleMsg) -- | Process a batch of (id, content) pairs through Drain -processBatch :: Bool -> V.Vector (Text, Text) -> UTCTime -> Drain.DrainTree -> Drain.DrainTree +processBatch :: Bool -> V.Vector (Text,Bool, Text) -> UTCTime -> Drain.DrainTree -> Drain.DrainTree processBatch isSummary batch now inTree = - V.foldl' (\tree (logId, content) -> processNewLog isSummary logId content now tree) inTree batch + V.foldl' (\tree (logId, isSampleLog, content) -> processNewLog isSummary logId isSampleLog content now tree) inTree batch -processNewLog :: Bool -> Text -> Text -> UTCTime -> Drain.DrainTree -> Drain.DrainTree -processNewLog isSummary logId content now tree = +processNewLog :: Bool -> Text -> Bool -> Text -> UTCTime -> Drain.DrainTree -> Drain.DrainTree +processNewLog isSummary logId isSampleLog content now tree = let tokens = Drain.generateDrainTokens content in if V.null tokens then tree else let tokenCount = V.length tokens firstToken = V.head tokens - in Drain.updateTreeWithLog tree tokenCount firstToken tokens logId content now + in Drain.updateTreeWithLog tree tokenCount firstToken tokens logId isSampleLog content now -- | Process errors from OpenTelemetry spans to detect runtime exceptions diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index 6d2922b70..bafa62692 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -389,8 +389,8 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo -- | Update endpoint baseline values updateEndpointBaseline :: DB es => EndpointId -> BaselineState -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Int -> Eff es () -updateEndpointBaseline eid state errMean errStddev latMean latStddev latP95 latP99 volMean volStddev samples = - void $ PG.execute q (state, errMean, errStddev, latMean, latStddev, latP95, latP99, volMean, volStddev, samples, eid) +updateEndpointBaseline eid bState errMean errStddev latMean latStddev latP95 latP99 volMean volStddev samples = + void $ PG.execute q (bState, errMean, errStddev, latMean, latStddev, latP95, latP99, volMean, volStddev, samples, eid) where q = [sql| diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index 00ab07814..adf2df111 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -5,6 +5,7 @@ module Models.Apis.Errors ( ErrorEvent (..), ErrorEventId, ATError (..), + ErrorL (..), -- Queries getErrors, getErrorById, @@ -12,6 +13,8 @@ module Models.Apis.Errors ( getActiveErrors, updateOccurrenceCounts, updateErrorState, + updateErrorStateByProjectAndHash, + getErrorLByHash, updateBaseline, resolveError, upsertErrorQueryAndParam, @@ -162,6 +165,55 @@ data Error = Error (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] Error +-- error aggreted with number of occurrences and affected users +data ErrorL = ErrorL + { + id :: ErrorId + , projectId :: Projects.ProjectId + , createdAt :: ZonedTime + , updatedAt :: ZonedTime + , errorType :: Text + , message :: Text + , stacktrace :: Text + , hash :: Text + , environment :: Maybe Text + , service :: Maybe Text + , runtime :: Maybe Text + , errorData :: ATError + , firstTraceId :: Maybe Text + , recentTraceId :: Maybe Text + , firstEventId :: Maybe ErrorEventId + , lastEventId :: Maybe ErrorEventId + , state :: ErrorState + , assigneeId :: Maybe Users.UserId + , assignedAt :: Maybe ZonedTime + , resolvedAt :: Maybe ZonedTime + , regressedAt :: Maybe ZonedTime + , occurrences1m :: Int + , occurrences5m :: Int + , occurrences1h :: Int + , occurrences24h :: Int + , quietMinutes :: Int + , resolutionThresholdMinutes :: Int + , baselineState :: BaselineState + , baselineSamples :: Int + , baselineErrorRateMean :: Maybe Double + , baselineErrorRateStddev :: Maybe Double + , baselineUpdatedAt :: Maybe ZonedTime + , isIgnored :: Bool + , ignoredUntil :: Maybe ZonedTime + , occurrences :: Int + , affectedUsers :: Int + , lastOccurredAt :: Maybe ZonedTime + } + deriving stock (Generic, Show) + deriving anyclass (FromRow, NFData, ToRow) + deriving + (Entity) + via (GenericEntity '[Schema "apis", TableName "errors", PrimaryKey "id", FieldModifiers '[CamelToSnake]] Error) + deriving + (AE.FromJSON, AE.ToJSON) + via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ErrorL data ATError = ATError { projectId :: Maybe Projects.ProjectId @@ -296,6 +348,29 @@ getErrorByHash pid hash = do WHERE project_id = ? AND hash = ? |] +getErrorLByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe ErrorL) +getErrorLByHash pid hash = do + results <- PG.query q (pid, hash) + return $ listToMaybe results + where + q = + [sql| + SELECT id, project_id, created_at, updated_at, + error_type, message, stacktrace, hash, + environment, service, runtime, error_data, + first_trace_id, recent_trace_id, first_event_id, last_event_id, + state, assignee_id, assigned_at, resolved_at, regressed_at, + occurrences_1m, occurrences_5m, occurrences_1h, occurrences_24h, + quiet_minutes, resolution_threshold_minutes, + baseline_state, baseline_samples, + baseline_error_rate_mean, baseline_error_rate_stddev, baseline_updated_at, + is_ignored, ignored_until, + (SELECT COUNT(*) FROM apis.error_events WHERE target_hash = e.hash) AS occurrences, + (SELECT COUNT(DISTINCT user_id) FROM apis.error_events WHERE target_hash = e.hash) AS affected_users, + (SELECT MAX(occurred_at) FROM apis.error_events WHERE target_hash = e.hash) AS last_occurred_at + FROM apis.errors e + WHERE project_id = ? AND hash = ? + |] -- | Get active (non-resolved) errors getActiveErrors :: DB es => Projects.ProjectId -> Eff es [Error] @@ -350,6 +425,11 @@ updateErrorState eid newState = PG.execute q (errorStateToText newState, eid) q = [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE id = ? |] +updateErrorStateByProjectAndHash :: DB es => Projects.ProjectId -> Text -> ErrorState -> Eff es Int64 +updateErrorStateByProjectAndHash pid hash newState = PG.execute q (errorStateToText newState, pid, hash) + where + q = + [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE project_id = ? AND hash = ? |] resolveError :: DB es => ErrorId -> Eff es Int64 resolveError eid = PG.execute q (Only eid) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index c0d81c4b0..570aa214f 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -22,6 +22,10 @@ module Pages.Anomalies ( AIChatForm (..), aiChatPostH, aiChatHistoryGetH, + -- Error actions + ErrorAssignForm (..), + errorActionPostH, + errorAssignPostH, ) where @@ -48,6 +52,7 @@ import Lucid.Hyperscript (__) import Models.Apis.Anomalies (FieldChange (..), PayloadChange (..)) import Models.Apis.Anomalies qualified as Anomalies import Models.Apis.Endpoints qualified as Endpoints +import Models.Apis.Errors qualified as Errors import Models.Apis.Fields.Facets qualified as Facets import Models.Apis.Issues qualified as Issues import Models.Apis.RequestDumps qualified as RequestDump @@ -55,7 +60,7 @@ import Models.Projects.Projects qualified as Projects import Models.Telemetry.Schema qualified as Schema import Models.Telemetry.Telemetry qualified as Telemetry import Models.Users.Sessions qualified as Sessions -import Models.Users.Users (User (id)) +import Models.Users.Users (User (id), UserId) import NeatInterpolation (text) import Pages.BodyWrapper (BWConfig (..), PageCtx (..)) import Pages.Components (emptyState_, resizer_, statBox_) @@ -188,16 +193,21 @@ anomalyDetailCore pid firstM fetchIssue = do $ PageCtx baseBwconf $ toHtml ("Issue not found" :: Text) Just issue -> do - let bwconf = + errorM <- + issue.issueType & \case + Issues.RuntimeException -> Errors.getErrorLByHash pid issue.targetHash + _ -> pure Nothing + let isErrorRelated = issue.issueType `elem` [Issues.RuntimeException, Issues.ErrorEscalating, Issues.ErrorRegressed] + isResolved = maybe False (\e -> e.state == Errors.ESResolved) errorM + bwconf = baseBwconf { pageActions = Just $ div_ [class_ "flex gap-2"] do anomalyAcknowledgeButton pid (UUIDId issue.id.unUUIDId) (isJust issue.acknowledgedAt) "" anomalyArchiveButton pid (UUIDId issue.id.unUUIDId) (isJust issue.archivedAt) + -- Add error-specific buttons for runtime error issues + when isErrorRelated $ do + errorResolveButton_ pid issue.targetHash isResolved } - errorM <- - issue.issueType & \case - Issues.RuntimeException -> Anomalies.errorByHash pid issue.endpointHash - _ -> pure Nothing (trItem, spanRecs) <- case errorM of Just err -> do let targetTIdM = maybe err.recentTraceId (const err.firstTraceId) firstM @@ -253,7 +263,7 @@ timeStatBox_ title timeStr | otherwise = pass -anomalyDetailPage :: Projects.ProjectId -> Issues.Issue -> Maybe Telemetry.Trace -> V.Vector Telemetry.OtelLogsAndSpans -> Maybe Anomalies.ATError -> UTCTime -> Bool -> Html () +anomalyDetailPage :: Projects.ProjectId -> Issues.Issue -> Maybe Telemetry.Trace -> V.Vector Telemetry.OtelLogsAndSpans -> Maybe Errors.ErrorL -> UTCTime -> Bool -> Html () anomalyDetailPage pid issue tr otellogs errM now isFirst = do let spanRecs = V.catMaybes $ Telemetry.convertOtelLogsAndSpansToSpanRecord <$> otellogs issueId = UUID.toText issue.id.unUUIDId @@ -289,12 +299,12 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do case AE.fromJSON (getAeson issue.issueData) of AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> do div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do - -- Stats (1 column each) - statBox_ (Just pid) Nothing "Affected Requests" "" "0" Nothing Nothing - statBox_ (Just pid) Nothing "Affected Clients" "" "0" Nothing Nothing + -- Stats (1 column each) whenJust errM $ \err -> do + statBox_ (Just pid) Nothing "Affected Requests" "" (show err.occurrences) Nothing Nothing + statBox_ (Just pid) Nothing "Affected Clients" "" (show err.affectedUsers) Nothing Nothing timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt - timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.updatedAt + timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC (fromMaybe err.updatedAt err.lastOccurredAt) widget div_ [class_ "flex flex-col gap-4"] do div_ [class_ "grid grid-cols-2 gap-4 w-full"] do @@ -570,6 +580,12 @@ newtype AIChatForm = AIChatForm {query :: Text} deriving anyclass (FromForm) +-- | Form for error assignment +newtype ErrorAssignForm = ErrorAssignForm {assigneeId :: UserId} + deriving stock (Generic, Show) + deriving anyclass (FromForm) + + -- | AI response structure from OpenAI data AIInvestigationResponse = AIInvestigationResponse { explanation :: Text @@ -772,6 +788,47 @@ aiChatHistoryGetH pid issueId = do addRespHeaders $ aiChatHistoryView_ pid messages +-- ============================================================================= +-- Error-specific action handlers +-- ============================================================================= + +errorActionPostH :: Projects.ProjectId -> Text -> Text -> ATAuthCtx (RespHeaders (Html ())) +errorActionPostH pid errorHash action = + case action of + "resolve" -> do + _ <- Errors.updateErrorStateByProjectAndHash pid errorHash Errors.ESResolved + addSuccessToast "Error resolved" Nothing + addRespHeaders $ errorResolveButton_ pid errorHash True + "unresolve" -> do + _ <- Errors.updateErrorStateByProjectAndHash pid errorHash Errors.ESOngoing + addSuccessToast "Error unresolved" Nothing + addRespHeaders $ errorResolveButton_ pid errorHash False + _ -> do + addErrorToast "Unknown action" Nothing + addRespHeaders pass + +-- | Assign an error to a user +errorAssignPostH :: Projects.ProjectId -> Text -> ErrorAssignForm -> ATAuthCtx (RespHeaders (Html ())) +errorAssignPostH pid errorHash form = + withError pid errorHash "Error assigned" (`Errors.assignError` form.assigneeId) $ + errorAssignButton_ pid errorHash (Just form.assigneeId) + + +-- | Helper for error actions +withError :: Projects.ProjectId -> Text -> Text -> (Errors.ErrorId -> ATAuthCtx Int64) -> Html () -> ATAuthCtx (RespHeaders (Html ())) +withError pid errorHash successMsg action responseHtml = do + _ <- Sessions.sessionAndProject pid + errorM <- Errors.getErrorByHash pid errorHash + case errorM of + Nothing -> do + addErrorToast "Error not found" Nothing + addRespHeaders pass + Just err -> do + _ <- action err.id + addSuccessToast successMsg Nothing + addRespHeaders responseHtml + + -- | Render a single chat response (user question + AI answer) aiChatResponse_ :: Projects.ProjectId -> Text -> Text -> Maybe [Widget.Widget] -> Html () aiChatResponse_ pid userQuery explanation widgetsM = @@ -1426,6 +1483,46 @@ anomalyArchiveButton pid aid archived = do if archived then "Unarchive" else "Archive" +-- | Resolve/Unresolve error button +errorResolveButton_ :: Projects.ProjectId -> Text -> Bool -> Html () +errorResolveButton_ pid errorHash isResolved = do + let endpoint = "/p/" <> pid.toText <> "/anomalies/errors/" <> errorHash <> "/actions/" <> if isResolved then "unresolve" else "resolve" + a_ + [ class_ + $ "inline-flex items-center gap-2 cursor-pointer py-2 px-3 rounded-xl " + <> (if isResolved then "bg-fillSuccess-weak text-textSuccess" else "btn-primary") + , term "data-tippy-content" $ if isResolved then "Reopen error" else "Resolve error" + , hxPost_ endpoint + , hxSwap_ "outerHTML" + ] + do + faSprite_ (if isResolved then "rotate-left" else "circle-check") "regular" "w-4 h-4" + if isResolved then "Resolved" else "Resolve" + + +-- | Assign error button (dropdown) +errorAssignButton_ :: Projects.ProjectId -> Text -> Maybe UserId -> Html () +errorAssignButton_ pid errorHash assigneeM = do + let isAssigned = isJust assigneeM + div_ [class_ "relative group/assign"] do + button_ + [ class_ + $ "inline-flex items-center gap-2 cursor-pointer py-2 px-3 rounded-xl " + <> (if isAssigned then "bg-fillBrand-weak text-textBrand" else "btn-primary") + , type_ "button" + ] + do + faSprite_ "user-plus" "regular" "w-4 h-4" + if isAssigned then "Assigned" else "Assign" + -- Dropdown will be populated with team members via HTMX if needed + div_ + [ class_ "absolute right-0 top-full mt-1 bg-fill rounded-lg shadow-lg border border-strokeWeak hidden group-hover/assign:block min-w-[200px] z-10" + , id_ $ "assign-dropdown-" <> errorHash + ] + do + div_ [class_ "p-2 text-xs text-textWeak"] "Click to see assignees" + + issueTypeBadge :: Issues.IssueType -> Bool -> Html () issueTypeBadge issueType critical = badge cls icon txt where diff --git a/src/Pkg/Drain.hs b/src/Pkg/Drain.hs index cca3908b2..3a5cb9626 100644 --- a/src/Pkg/Drain.hs +++ b/src/Pkg/Drain.hs @@ -18,6 +18,7 @@ import RequestMessages (replaceAllFormats) data LogGroup = LogGroup { template :: V.Vector Text , templateStr :: Text + , exampleLog :: Text , logIds :: V.Vector Text , frequency :: Int , firstSeen :: UTCTime @@ -88,6 +89,7 @@ createLogGroup templateTokens templateString logId now = { template = templateTokens , templateStr = templateString , logIds = V.singleton logId + , exampleLog = templateString , frequency = 1 , firstSeen = now , lastSeen = now @@ -105,9 +107,9 @@ calculateSimilarity tokens1 tokens2 in fromIntegral matches / fromIntegral total -updateTreeWithLog :: DrainTree -> Int -> Text -> V.Vector Text -> Text -> Text -> UTCTime -> DrainTree -updateTreeWithLog tree tokenCount firstToken tokensVec logId logContent now = - let (updatedChildren, wasUpdated) = updateOrCreateLevelOne (children tree) tokenCount firstToken tokensVec logId logContent now (config tree) +updateTreeWithLog :: DrainTree -> Int -> Text -> V.Vector Text -> Text -> Bool -> Text -> UTCTime -> DrainTree +updateTreeWithLog tree tokenCount firstToken tokensVec logId isSampleLog logContent now = + let (updatedChildren, wasUpdated) = updateOrCreateLevelOne (children tree) tokenCount firstToken tokensVec logId isSampleLog logContent now (config tree) newTotalLogs = totalLogs tree + 1 newTotalPatterns = if wasUpdated then totalPatterns tree else totalPatterns tree + 1 in tree @@ -117,12 +119,12 @@ updateTreeWithLog tree tokenCount firstToken tokensVec logId logContent now = } -updateOrCreateLevelOne :: V.Vector DrainLevelOne -> Int -> Text -> V.Vector Text -> Text -> Text -> UTCTime -> DrainConfig -> (V.Vector DrainLevelOne, Bool) -updateOrCreateLevelOne levelOnes targetCount firstToken tokensVec logId logContent now config = +updateOrCreateLevelOne :: V.Vector DrainLevelOne -> Int -> Text -> V.Vector Text -> Text -> Bool -> Text -> UTCTime -> DrainConfig -> (V.Vector DrainLevelOne, Bool) +updateOrCreateLevelOne levelOnes targetCount firstToken tokensVec logId isSampleLog logContent now config = case V.findIndex (\level -> tokenCount level == targetCount) levelOnes of Just index -> let existingLevel = levelOnes V.! index - (updatedChildren, wasUpdated) = updateOrCreateLevelTwo (nodes existingLevel) firstToken tokensVec logId logContent now config + (updatedChildren, wasUpdated) = updateOrCreateLevelTwo (nodes existingLevel) firstToken tokensVec logId isSampleLog logContent now config updatedLevel = existingLevel{nodes = updatedChildren} updatedLevelOnes = levelOnes V.// [(index, updatedLevel)] in (updatedLevelOnes, wasUpdated) @@ -134,12 +136,12 @@ updateOrCreateLevelOne levelOnes targetCount firstToken tokensVec logId logConte in (updatedLevelOnes, False) -updateOrCreateLevelTwo :: V.Vector DrainLevelTwo -> Text -> V.Vector Text -> Text -> Text -> UTCTime -> DrainConfig -> (V.Vector DrainLevelTwo, Bool) -updateOrCreateLevelTwo levelTwos targetToken tokensVec logId logContent now config = +updateOrCreateLevelTwo :: V.Vector DrainLevelTwo -> Text -> V.Vector Text -> Text -> Bool -> Text -> UTCTime -> DrainConfig -> (V.Vector DrainLevelTwo, Bool) +updateOrCreateLevelTwo levelTwos targetToken tokensVec logId isSampleLog logContent now config = case V.findIndex (\level -> firstToken level == targetToken) levelTwos of Just index -> let existingLevel = levelTwos V.! index - (updatedLogGroups, wasUpdated) = updateOrCreateLogGroup (logGroups existingLevel) tokensVec logId logContent now config + (updatedLogGroups, wasUpdated) = updateOrCreateLogGroup (logGroups existingLevel) tokensVec logId isSampleLog logContent now config updatedLevel = existingLevel{logGroups = updatedLogGroups} updatedLevelTwos = levelTwos V.// [(index, updatedLevel)] in (updatedLevelTwos, wasUpdated) @@ -166,15 +168,15 @@ leastRecentlyUsedIndex logGroups = & maybe 0 fst -updateOrCreateLogGroup :: V.Vector LogGroup -> V.Vector Text -> Text -> Text -> UTCTime -> DrainConfig -> (V.Vector LogGroup, Bool) -updateOrCreateLogGroup logGroups tokensVec logId logContent now config = +updateOrCreateLogGroup :: V.Vector LogGroup -> V.Vector Text -> Text -> Bool -> Text -> UTCTime -> DrainConfig -> (V.Vector LogGroup, Bool) +updateOrCreateLogGroup logGroups tokensVec logId isSampleLog logContent now config = case findBestMatch logGroups tokensVec (similarityThreshold config) of Just (index, bestGroup) -> let updatedTemplate = if V.length tokensVec == V.length (template bestGroup) then mergeTemplates (template bestGroup) tokensVec (wildcardToken config) else template bestGroup - updatedGroup = updateLogGroupWithTemplate bestGroup updatedTemplate logId logContent now + updatedGroup = updateLogGroupWithTemplate bestGroup updatedTemplate logId isSampleLog logContent now updatedGroups = logGroups V.// [(index, updatedGroup)] in (updatedGroups, True) Nothing -> @@ -213,23 +215,24 @@ mergeTemplates template1 template2 wildcardToken = -- Update log group with new template and log information -updateLogGroupWithTemplate :: LogGroup -> V.Vector Text -> Text -> Text -> UTCTime -> LogGroup -updateLogGroupWithTemplate group' newTemplate logId originalLog now = +updateLogGroupWithTemplate :: LogGroup -> V.Vector Text -> Text -> Bool -> Text -> UTCTime -> LogGroup +updateLogGroupWithTemplate group' newTemplate logId isSampleLog originalLog now = group' { template = newTemplate , templateStr = unwords $ V.toList newTemplate + , exampleLog = if isSampleLog then originalLog else exampleLog group' , logIds = V.cons logId (logIds group') , frequency = frequency group' + 1 , lastSeen = now } -getAllLogGroups :: DrainTree -> V.Vector (Text, V.Vector Text) +getAllLogGroups :: DrainTree -> V.Vector (Text, Text, V.Vector Text) getAllLogGroups tree = let levelOnes = children tree levelTwos = V.concatMap nodes levelOnes allLogGroups = V.concatMap logGroups levelTwos - in V.map (\grp -> (templateStr grp, logIds grp)) allLogGroups + in V.map (\grp -> (grp.exampleLog, templateStr grp, logIds grp)) allLogGroups looksLikeJson :: T.Text -> Bool diff --git a/src/Web/Routes.hs b/src/Web/Routes.hs index fcb9d1b7d..8f55f0bba 100644 --- a/src/Web/Routes.hs +++ b/src/Web/Routes.hs @@ -57,6 +57,7 @@ import Data.CaseInsensitive qualified as CI import Data.Effectful.Wreq qualified as Wreq import Data.Text qualified as T import Models.Apis.Anomalies qualified as Anomalies +import Models.Apis.Errors qualified as Errors import Models.Apis.Monitors qualified as Monitors import Models.Apis.Reports qualified as ReportsM import Models.Projects.Dashboards qualified as Dashboards @@ -300,6 +301,9 @@ data AnomaliesRoutes' mode = AnomaliesRoutes' , anomalyHashGet :: mode :- "by_hash" :> Capture "anomalyHash" Text :> QPT "first_occurrence" :> Get '[HTML] (RespHeaders (PageCtx (Html ()))) , aiChatPost :: mode :- Capture "issueID" Anomalies.IssueId :> "ai_chat" :> ReqBody '[FormUrlEncoded] AnomalyList.AIChatForm :> Post '[HTML] (RespHeaders (Html ())) , aiChatHistoryGet :: mode :- Capture "issueID" Anomalies.IssueId :> "ai_chat" :> "history" :> Get '[HTML] (RespHeaders (Html ())) + , -- Error-specific actions + errorActionPost :: mode :- "errors" :> Capture "errorHash" Text :> "actions" :> Capture "action" Text :> Post '[HTML] (RespHeaders (Html ())) + , errorAssignPost :: mode :- "errors" :> Capture "errorHash" Text :> "assign" :> ReqBody '[FormUrlEncoded] AnomalyList.ErrorAssignForm :> Post '[HTML] (RespHeaders (Html ())) } deriving stock (Generic) @@ -518,6 +522,9 @@ anomaliesServer pid = , anomalyHashGet = AnomalyList.anomalyDetailHashGetH pid , aiChatPost = AnomalyList.aiChatPostH pid , aiChatHistoryGet = AnomalyList.aiChatHistoryGetH pid + , -- Error-specific handlers + errorActionPost = AnomalyList.errorActionPostH pid + , errorAssignPost = AnomalyList.errorAssignPostH pid } From 51e55b76ec163e3e2ef8d7051b8c02b2b6d67362 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 15 Jan 2026 08:19:09 +0000 Subject: [PATCH 52/71] log pattern bug fixes --- src/BackgroundJobs.hs | 66 +++++++++++---------- src/Models/Apis/LogPatterns.hs | 14 +++-- static/migrations/0032_drop_issue_index.sql | 1 + 3 files changed, 44 insertions(+), 37 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 8e6201bac..a5011146c 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -524,24 +524,25 @@ logsPatternExtraction scheduledTime pid = do limitVal = 250 paginate :: Int -> UTCTime -> ATBackgroundCtx () paginate offset startTime = do - otelEvents <- PG.query [sql| SELECT kind, id::text, coalesce(body::text,''), coalesce(summary::text,''), context___trace_id, resource___service___name FROM otel_logs_and_spans WHERE project_id = ? AND timestamp >= ? AND timestamp < ? AND (summary_pattern IS NULL OR log_pattern IS NULL) OFFSET ? LIMIT ?|] (pid, startTime, scheduledTime, offset, limitVal) + otelEvents :: [(Text, Text, Text, Text, Maybe Text, Maybe Text, Maybe Text)] <- PG.query [sql| SELECT kind, id::text, coalesce(body::text,''), coalesce(summary::text,''), context___trace_id, resource___service___name, level FROM otel_logs_and_spans WHERE project_id = ? AND timestamp >= ? AND timestamp < ? AND (summary_pattern IS NULL OR log_pattern IS NULL) OFFSET ? LIMIT ?|] (pid, startTime, scheduledTime, offset, limitVal) unless (null otelEvents) do Log.logInfo "Fetching events for pattern extraction" ("offset", AE.toJSON offset, "count", AE.toJSON (length otelEvents)) - let (logs, summaries) = L.partition (\(k, _, _, _, _, _) -> k == "log") otelEvents - processPatterns "log" "log_pattern" (V.fromList [(i, body) | (_, i, body, _, trId, serviceName) <- logs]) pid scheduledTime startTime - processPatterns "summary" "summary_pattern" (V.fromList [(i, s) | (_, i, _, s, _, _) <- summaries]) pid scheduledTime startTime + let (logs, summaries) = L.partition (\(k, _, _, _, _, _, _) -> k == "log") otelEvents + processPatterns "log" "log_pattern" (V.fromList [(i, body, trId, serviceName, level) | (_, i, body, _, trId, serviceName, level) <- logs]) pid scheduledTime startTime + processPatterns "summary" "summary_pattern" (V.fromList [(i, s, trId, serviceName, level) | (_, i, _, s, trId, serviceName, level) <- summaries]) pid scheduledTime startTime Log.logInfo "Completed events pattern extraction for page" ("offset", AE.toJSON offset) Relude.when (length otelEvents == limitVal) $ paginate (offset + limitVal) startTime -- | Generic pattern extraction for logs or summaries -processPatterns :: Text -> Text -> V.Vector (Text, Text, Text, Text) -> Projects.ProjectId -> UTCTime -> UTCTime -> ATBackgroundCtx () +-- events: (id, content, traceId, serviceName, level) +processPatterns :: Text -> Text -> V.Vector (Text, Text, Maybe Text, Maybe Text, Maybe Text) -> Projects.ProjectId -> UTCTime -> UTCTime -> ATBackgroundCtx () processPatterns kind fieldName events pid scheduledTime since = do Relude.when (not $ V.null events) $ do - let qq = [text| select $fieldName from otel_logs_and_spans where project_id= ? AND timestamp >= now() - interval '1 hour' and $fieldName is not null GROUP BY $fieldName ORDER BY count(*) desc limit 20|] existingPatterns <- LogPatterns.getLogPatternTexts pid - let known = V.fromList $ map ("",False,) existingPatterns - combined = known <> ((\(dd,smp) -> (dd, True, smp)) <$> events) + let known = V.fromList $ map (\pat -> ("", False, pat, Nothing, Nothing, Nothing)) existingPatterns + -- Include level in content for pattern matching so different levels create different patterns + combined = known <> ((\(logId, content, trId, serviceName, level) -> (logId, True, content, trId, serviceName, level)) <$> events) drainTree = processBatch (kind == "summary") combined scheduledTime Drain.emptyDrainTree newPatterns = Drain.getAllLogGroups drainTree -- Only log if patterns were extracted @@ -554,19 +555,24 @@ processPatterns kind fieldName events pid scheduledTime since = do -- Update otel_logs_and_spans with pattern void $ PG.execute (Query $ encodeUtf8 q) (patternTxt, pid, since, V.filter (/= "") ids) Relude.when (kind == "log" && not (T.null patternTxt)) $ do + let (serviceName, logLevel, traceId) = case V.head ids of + logId | logId /= "" -> case V.find (\(i, _, _, sName, lvl) -> i == logId) events of + Just (_, _, trId, sName, lvl) -> (sName, lvl, trId) + Nothing -> (Nothing, Nothing, Nothing) + _ -> (Nothing, Nothing, Nothing) let patternHash = toXXHash patternTxt traceShowM ("Upserting log pattern" , pid, patternTxt, patternHash, sampleMsg) - void $ LogPatterns.upsertLogPattern pid patternTxt patternHash Nothing Nothing (Just sampleMsg) + void $ LogPatterns.upsertLogPattern pid patternTxt patternHash serviceName logLevel traceId (Just sampleMsg) --- | Process a batch of (id, content) pairs through Drain -processBatch :: Bool -> V.Vector (Text,Bool, Text) -> UTCTime -> Drain.DrainTree -> Drain.DrainTree +-- | Process a batch of (id, isSampleLog, content, serviceName, level) tuples through Drain +processBatch :: Bool -> V.Vector (Text, Bool, Text, Maybe Text, Maybe Text, Maybe Text) -> UTCTime -> Drain.DrainTree -> Drain.DrainTree processBatch isSummary batch now inTree = - V.foldl' (\tree (logId, isSampleLog, content) -> processNewLog isSummary logId isSampleLog content now tree) inTree batch + V.foldl' (\tree (logId, isSampleLog, content, _, _, _) -> processNewLog isSummary logId isSampleLog content now tree) inTree batch processNewLog :: Bool -> Text -> Bool -> Text -> UTCTime -> Drain.DrainTree -> Drain.DrainTree -processNewLog isSummary logId isSampleLog content now tree = +processNewLog _isSummary logId isSampleLog content now tree = let tokens = Drain.generateDrainTokens content in if V.null tokens then tree @@ -1692,24 +1698,22 @@ detectLogPatternSpikes pid authCtx = do processNewLogPattern :: Projects.ProjectId -> Text -> Config.AuthContext -> ATBackgroundCtx () processNewLogPattern pid patternHash authCtx = do Log.logInfo "Processing new log pattern" (pid, patternHash) - - -- Get the pattern by hash - patternM <- LogPatterns.getLogPatternByHash pid patternHash - - case patternM of - Nothing -> Log.logAttention "Log pattern not found for new pattern processing" (pid, patternHash) - Just lp -> do - -- Only create issue for truly new patterns (state = 'new') - Relude.when (lp.state == LogPatterns.LPSNew) $ do - -- Create a new log pattern issue - issue <- liftIO $ Issues.createLogPatternIssue pid lp - Issues.insertIssue issue - - -- Queue LLM enhancement - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - - Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) + totalEvents <- do + res <- PG.query [sql| SELECT count(5000) from otel_logs_and_spans WHERE project_id = ? AND timestamp >= now() - interval '7 days' |] (Only pid) + case res of + [Only cnt] -> return cnt + _ -> return 0 + if totalEvents < 5000 + then Log.logInfo "Skipping new endpoint issue creation due to low event volume" (pid, patternHash, totalEvents) + else do + patternM <- LogPatterns.getLogPatternByHash pid patternHash + whenJust patternM \lp -> do + Relude.when (lp.state == LogPatterns.LPSNew) $ do + issue <- liftIO $ Issues.createLogPatternIssue pid lp + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 5e1124634..7d81296c9 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -173,18 +173,20 @@ acknowledgeLogPatterns uid patternHashes |] -upsertLogPattern :: DB es => Projects.ProjectId -> Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Eff es Int64 -upsertLogPattern pid pat patHash serviceName logLevel sampleMsg = - PG.execute q (pid, pat, patHash, serviceName, logLevel, sampleMsg) +upsertLogPattern :: DB es => Projects.ProjectId -> Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Eff es Int64 +upsertLogPattern pid pat patHash serviceName logLevel traceId sampleMsg = + PG.execute q (pid, pat, patHash, serviceName, logLevel, traceId, sampleMsg) where q = [sql| - INSERT INTO apis.log_patterns (project_id, log_pattern, pattern_hash, service_name, log_level, sample_message) - VALUES (?, ?, ?, ?, ?, ?) + INSERT INTO apis.log_patterns (project_id, log_pattern, pattern_hash, service_name, log_level, trace_id, sample_message) + VALUES (?, ?, ?, ?, ?, ?, ?) ON CONFLICT (project_id, pattern_hash) DO UPDATE SET last_seen_at = NOW(), occurrence_count = apis.log_patterns.occurrence_count + 1, - sample_message = COALESCE(EXCLUDED.sample_message, apis.log_patterns.sample_message) + sample_message = COALESCE(EXCLUDED.sample_message, apis.log_patterns.sample_message), + service_name = COALESCE(EXCLUDED.service_name, apis.log_patterns.service_name), + trace_id = COALESCE(EXCLUDED.trace_id, apis.log_patterns.trace_id) |] diff --git a/static/migrations/0032_drop_issue_index.sql b/static/migrations/0032_drop_issue_index.sql index 847bbb59b..a8f75b4ab 100644 --- a/static/migrations/0032_drop_issue_index.sql +++ b/static/migrations/0032_drop_issue_index.sql @@ -8,4 +8,5 @@ BEGIN; ALTER TABLE apis.errors ADD COLUMN first_trace_id TEXT, ADD COLUMN recent_trace_id TEXT; + ALTER TABLE apis.log_patterns ADD COLUMN trace_id TEXT; COMMIT; \ No newline at end of file From 75d27ab4ad360ab7ee3ec75a61ca6327dc16b794 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 15 Jan 2026 08:19:49 +0000 Subject: [PATCH 53/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 18 +++++++++--------- src/Models/Apis/Errors.hs | 13 +++++++++---- src/Pages/Anomalies.hs | 9 +++++---- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index a5011146c..6fabd6262 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -561,7 +561,7 @@ processPatterns kind fieldName events pid scheduledTime since = do Nothing -> (Nothing, Nothing, Nothing) _ -> (Nothing, Nothing, Nothing) let patternHash = toXXHash patternTxt - traceShowM ("Upserting log pattern" , pid, patternTxt, patternHash, sampleMsg) + traceShowM ("Upserting log pattern", pid, patternTxt, patternHash, sampleMsg) void $ LogPatterns.upsertLogPattern pid patternTxt patternHash serviceName logLevel traceId (Just sampleMsg) @@ -1706,14 +1706,14 @@ processNewLogPattern pid patternHash authCtx = do if totalEvents < 5000 then Log.logInfo "Skipping new endpoint issue creation due to low event volume" (pid, patternHash, totalEvents) else do - patternM <- LogPatterns.getLogPatternByHash pid patternHash - whenJust patternM \lp -> do - Relude.when (lp.state == LogPatterns.LPSNew) $ do - issue <- liftIO $ Issues.createLogPatternIssue pid lp - Issues.insertIssue issue - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) - Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) + patternM <- LogPatterns.getLogPatternByHash pid patternHash + whenJust patternM \lp -> do + Relude.when (lp.state == LogPatterns.LPSNew) $ do + issue <- liftIO $ Issues.createLogPatternIssue pid lp + Issues.insertIssue issue + liftIO $ withResource authCtx.jobsPool \conn -> + void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + Log.logInfo "Created issue for new log pattern" (pid, lp.id, issue.id) calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index adf2df111..c7de6b21f 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -5,7 +5,7 @@ module Models.Apis.Errors ( ErrorEvent (..), ErrorEventId, ATError (..), - ErrorL (..), + ErrorL (..), -- Queries getErrors, getErrorById, @@ -165,10 +165,10 @@ data Error = Error (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] Error --- error aggreted with number of occurrences and affected users + +-- error aggreted with number of occurrences and affected users data ErrorL = ErrorL - { - id :: ErrorId + { id :: ErrorId , projectId :: Projects.ProjectId , createdAt :: ZonedTime , updatedAt :: ZonedTime @@ -215,6 +215,7 @@ data ErrorL = ErrorL (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.OmitNothingFields, DAE.FieldLabelModifier '[DAE.CamelToSnake]] ErrorL + data ATError = ATError { projectId :: Maybe Projects.ProjectId , when :: UTCTime @@ -348,6 +349,7 @@ getErrorByHash pid hash = do WHERE project_id = ? AND hash = ? |] + getErrorLByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe ErrorL) getErrorLByHash pid hash = do results <- PG.query q (pid, hash) @@ -372,6 +374,7 @@ getErrorLByHash pid hash = do WHERE project_id = ? AND hash = ? |] + -- | Get active (non-resolved) errors getActiveErrors :: DB es => Projects.ProjectId -> Eff es [Error] getActiveErrors pid = PG.query q (Only pid) @@ -425,12 +428,14 @@ updateErrorState eid newState = PG.execute q (errorStateToText newState, eid) q = [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE id = ? |] + updateErrorStateByProjectAndHash :: DB es => Projects.ProjectId -> Text -> ErrorState -> Eff es Int64 updateErrorStateByProjectAndHash pid hash newState = PG.execute q (errorStateToText newState, pid, hash) where q = [sql| UPDATE apis.errors SET state = ?, updated_at = NOW() WHERE project_id = ? AND hash = ? |] + resolveError :: DB es => ErrorId -> Eff es Int64 resolveError eid = PG.execute q (Only eid) where diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 570aa214f..e154e00de 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -299,7 +299,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do case AE.fromJSON (getAeson issue.issueData) of AE.Success (exceptionData :: Issues.RuntimeExceptionData) -> do div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4"] do - -- Stats (1 column each) + -- Stats (1 column each) whenJust errM $ \err -> do statBox_ (Just pid) Nothing "Affected Requests" "" (show err.occurrences) Nothing Nothing statBox_ (Just pid) Nothing "Affected Clients" "" (show err.affectedUsers) Nothing Nothing @@ -795,7 +795,7 @@ aiChatHistoryGetH pid issueId = do errorActionPostH :: Projects.ProjectId -> Text -> Text -> ATAuthCtx (RespHeaders (Html ())) errorActionPostH pid errorHash action = case action of - "resolve" -> do + "resolve" -> do _ <- Errors.updateErrorStateByProjectAndHash pid errorHash Errors.ESResolved addSuccessToast "Error resolved" Nothing addRespHeaders $ errorResolveButton_ pid errorHash True @@ -807,11 +807,12 @@ errorActionPostH pid errorHash action = addErrorToast "Unknown action" Nothing addRespHeaders pass + -- | Assign an error to a user errorAssignPostH :: Projects.ProjectId -> Text -> ErrorAssignForm -> ATAuthCtx (RespHeaders (Html ())) errorAssignPostH pid errorHash form = - withError pid errorHash "Error assigned" (`Errors.assignError` form.assigneeId) $ - errorAssignButton_ pid errorHash (Just form.assigneeId) + withError pid errorHash "Error assigned" (`Errors.assignError` form.assigneeId) + $ errorAssignButton_ pid errorHash (Just form.assigneeId) -- | Helper for error actions From 574756bcd5d9a034023456ad97538e20ff7dad66 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Thu, 15 Jan 2026 10:38:49 +0000 Subject: [PATCH 54/71] fix new new shape and new field processing --- src/Models/Apis/Endpoints.hs | 14 ++++++++++++-- src/Models/Apis/Shapes.hs | 2 +- .../0033_fix_api_change_trigger_payload.sql | 17 +++++++++++++++++ 3 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 static/migrations/0033_fix_api_change_trigger_payload.sql diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index bafa62692..2a23b750d 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -416,7 +416,12 @@ getEndpointByHash pid hash = listToMaybe <$> PG.query q (pid, hash) where q = [sql| - SELECT id, created_at, updated_at, project_id, url_path, url_params, method, host, hash, outgoing, description + SELECT id, created_at, updated_at, project_id, url_path, url_params, method, host, hash, outgoing, description, + first_trace_id, recent_trace_id, service, + baseline_state, baseline_samples, baseline_updated_at, + baseline_error_rate_mean, baseline_error_rate_stddev, + baseline_latency_mean, baseline_latency_stddev, baseline_latency_p95, baseline_latency_p99, + baseline_volume_hourly_mean, baseline_volume_hourly_stddev FROM apis.endpoints WHERE project_id = ? AND hash = ? |] @@ -428,7 +433,12 @@ getActiveEndpoints pid = PG.query q (Only pid) where q = [sql| - SELECT id, created_at, updated_at, project_id, url_path, url_params, method, host, hash, outgoing, description + SELECT id, created_at, updated_at, project_id, url_path, url_params, method, host, hash, outgoing, description, + first_trace_id, recent_trace_id, service, + baseline_state, baseline_samples, baseline_updated_at, + baseline_error_rate_mean, baseline_error_rate_stddev, + baseline_latency_mean, baseline_latency_stddev, baseline_latency_p95, baseline_latency_p99, + baseline_volume_hourly_mean, baseline_volume_hourly_stddev FROM apis.endpoints WHERE project_id = ? |] diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index 9b895ba59..157fe57c9 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -147,7 +147,7 @@ data ShapeForIssue = ShapeForIssue getShapeForIssue :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe ShapeForIssue) -getShapeForIssue pid hash = listToMaybe <$> PG.query q (Only hash) +getShapeForIssue pid hash = listToMaybe <$> PG.query q (pid, hash) where q = [sql| diff --git a/static/migrations/0033_fix_api_change_trigger_payload.sql b/static/migrations/0033_fix_api_change_trigger_payload.sql new file mode 100644 index 000000000..6b51ed7ba --- /dev/null +++ b/static/migrations/0033_fix_api_change_trigger_payload.sql @@ -0,0 +1,17 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION apis.api_change_detected_proc() RETURNS trigger AS $$ +DECLARE + job_tag TEXT; +BEGIN + IF TG_WHEN <> 'AFTER' THEN + RAISE EXCEPTION 'apis.api_change_detected_proc() may only run as an AFTER trigger'; + END IF; + job_tag := TG_ARGV[0]; + INSERT INTO background_jobs (run_at, status, payload) + VALUES (now(), 'queued', jsonb_build_object('tag', job_tag, 'contents', jsonb_build_array(NEW.project_id, NEW.hash))); + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +COMMIT; From 9e66d4cf79612148fe8a7547173fe697ebe2946a Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 16 Jan 2026 08:03:15 +0000 Subject: [PATCH 55/71] fix shape issue insertion --- src/BackgroundJobs.hs | 2 +- src/Models/Apis/Shapes.hs | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 6fabd6262..955768ac1 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1704,7 +1704,7 @@ processNewLogPattern pid patternHash authCtx = do [Only cnt] -> return cnt _ -> return 0 if totalEvents < 5000 - then Log.logInfo "Skipping new endpoint issue creation due to low event volume" (pid, patternHash, totalEvents) + then Log.logInfo "Skipping new log pattern issue creation due to low event volume" (pid, patternHash, totalEvents) else do patternM <- LogPatterns.getLogPatternByHash pid patternHash whenJust patternM \lp -> do diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index 157fe57c9..fb108f48c 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -157,12 +157,12 @@ getShapeForIssue pid hash = listToMaybe <$> PG.query q (pid, hash) COALESCE(e.method, 'UNKNOWN'), COALESCE(e.url_path, '/'), s.status_code, - s.example_request_payload, '{}'::jsonb, - s.example_response_payload, '{}'::jsonb, - s.new_unique_fields, '{}'::TEXT[], - s.deleted_fields, '{}'::TEXT[], - s.updated_field_formats, '{}'::TEXT[], - s.field_hashes, '{}'::TEXT[] + COALESCE(s.example_request_payload, '{}'::jsonb), + COALESCE(s.example_response_payload, '{}'::jsonb), + COALESCE(s.new_unique_fields, '{}'::TEXT[]), + COALESCE(s.deleted_fields, '{}'::TEXT[]), + COALESCE(s.updated_field_formats, '{}'::TEXT[]), + COALESCE(s.field_hashes, '{}'::TEXT[]) FROM apis.shapes s LEFT JOIN apis.endpoints e ON e.hash = s.endpoint_hash WHERE s.project_id = ? AND s.hash = ? From 23ba849fba289d96390cf603c3fb3747d4275e9f Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 16 Jan 2026 08:52:13 +0000 Subject: [PATCH 56/71] fsex --- src/Pages/Anomalies.hs | 54 +++++++++++++++++++++++------------------- 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index e154e00de..c37670e9f 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -1155,6 +1155,12 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Metadata row (method, endpoint, service, time) div_ [class_ "flex items-center gap-4 text-sm text-textWeak mb-3 flex-wrap"] do -- Method and endpoint (for API changes) + when (issue.issueType `elem` [Issues.NewShape, Issues.NewEndpoint, Issues.EndpointLatencyDegradation]) do + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointRelatedData) -> do + span_ [class_ "font-monospace"] $ toHtml d.endpointMethod + span_ [class_ "font-monospace"] $ toHtml d.endpointPath + _ -> pass -- Service badge span_ [class_ "flex items-center gap-1"] do div_ [class_ "w-3 h-3 bg-fillYellow rounded-sm"] "" @@ -1163,30 +1169,30 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue span_ [class_ "text-textWeak"] $ toHtml timeSinceString -- Statistics row (only for API changes) - - let allChanges = getAeson issue.requestPayloads <> getAeson issue.responsePayloads :: [Anomalies.PayloadChange] - countChange (!b, !i, !t) c = case c.changeType of - Anomalies.Breaking -> (b + 1, i, t + 1) - Anomalies.Incremental -> (b, i + 1, t + 1) - _ -> (b, i, t + 1) - (breakingChanges, incrementalChanges, totalChanges) = foldl' countChange (0, 0, 0) allChanges - div_ [class_ "flex items-center gap-4 text-sm mb-4 p-3 bg-fillWeak rounded-lg"] do - span_ [class_ "text-textWeak"] do - strong_ [class_ "text-textStrong"] $ toHtml $ show totalChanges - " total changes" - div_ [class_ "w-px h-4 bg-strokeWeak"] "" - span_ [class_ "text-textWeak"] do - strong_ [class_ "text-fillError-strong"] $ toHtml $ show breakingChanges - " breaking" - when (breakingChanges > 0 && totalChanges > 0) $ span_ [class_ "text-xs ml-1 bg-fillError-weak text-fillError-strong px-1.5 py-0.5 rounded"] $ toHtml $ show (round (fromIntegral breakingChanges / fromIntegral totalChanges * 100 :: Float) :: Int) <> "%" - div_ [class_ "w-px h-4 bg-strokeWeak"] "" - span_ [class_ "text-textWeak"] do - strong_ [class_ "text-fillSuccess-strong"] $ toHtml $ show incrementalChanges - " incremental" - div_ [class_ "w-px h-4 bg-strokeWeak"] "" - span_ [class_ "text-textWeak"] do - strong_ [class_ "text-textBrand"] $ toHtml $ show totalChanges - " payloads affected" + when (issue.issueType `elem` [Issues.NewShape]) do + let allChanges = getAeson issue.requestPayloads <> getAeson issue.responsePayloads :: [Anomalies.PayloadChange] + countChange (!b, !i, !t) c = case c.changeType of + Anomalies.Breaking -> (b + 1, i, t + 1) + Anomalies.Incremental -> (b, i + 1, t + 1) + _ -> (b, i, t + 1) + (breakingChanges, incrementalChanges, totalChanges) = foldl' countChange (0, 0, 0) allChanges + div_ [class_ "flex items-center gap-4 text-sm mb-4 p-3 bg-fillWeak rounded-lg"] do + span_ [class_ "text-textWeak"] do + strong_ [class_ "text-textStrong"] $ toHtml $ show totalChanges + " total changes" + div_ [class_ "w-px h-4 bg-strokeWeak"] "" + span_ [class_ "text-textWeak"] do + strong_ [class_ "text-fillError-strong"] $ toHtml $ show breakingChanges + " breaking" + when (breakingChanges > 0 && totalChanges > 0) $ span_ [class_ "text-xs ml-1 bg-fillError-weak text-fillError-strong px-1.5 py-0.5 rounded"] $ toHtml $ show (round (fromIntegral breakingChanges / fromIntegral totalChanges * 100 :: Float) :: Int) <> "%" + div_ [class_ "w-px h-4 bg-strokeWeak"] "" + span_ [class_ "text-textWeak"] do + strong_ [class_ "text-fillSuccess-strong"] $ toHtml $ show incrementalChanges + " incremental" + div_ [class_ "w-px h-4 bg-strokeWeak"] "" + span_ [class_ "text-textWeak"] do + strong_ [class_ "text-textBrand"] $ toHtml $ show totalChanges + " payloads affected" -- Issue-specific details case issue.issueType of From 7d3da4a966ca583b704af470ad349f4aea2ad8bd Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 16 Jan 2026 19:57:41 +0000 Subject: [PATCH 57/71] improve anomaly display ui --- src/BackgroundJobs.hs | 5 +- src/Models/Apis/Issues/Enhancement.hs | 15 +- src/Models/Apis/Shapes.hs | 16 +++ src/Pages/Anomalies.hs | 191 +++++++++++++++++++------- 4 files changed, 160 insertions(+), 67 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 955768ac1..512a0bbee 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1867,8 +1867,6 @@ processNewEndpoint pid hash authCtx = do Just ep -> do issue <- liftIO $ Issues.createNewEndpointIssue pid ep.hash ep.method ep.urlPath ep.host Issues.insertIssue issue - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) Log.logInfo "Created issue for new endpoint" (pid, hash, issue.id) -- Send notifications only if project exists and has alerts enabled Relude.when project.endpointAlerts $ do @@ -1930,8 +1928,7 @@ processNewShape pid hash authCtx = do shape.modifiedFields shape.fieldHashes Issues.insertIssue issue - liftIO $ withResource authCtx.jobsPool \conn -> - void $ createJob conn "background_jobs" $ EnhanceIssuesWithLLM pid (V.singleton issue.id) + -- llm enhancement job is not needed Log.logInfo "Created issue for new shape" (pid, hash, issue.id) _ -> pass diff --git a/src/Models/Apis/Issues/Enhancement.hs b/src/Models/Apis/Issues/Enhancement.hs index 8feeb4785..4c204e47a 100644 --- a/src/Models/Apis/Issues/Enhancement.hs +++ b/src/Models/Apis/Issues/Enhancement.hs @@ -292,17 +292,8 @@ updateIssueClassification issueId isCritical breakingCount incrementalCount = do -- | Generate a simple title for issue types that don't need LLM simpleTitle :: Issues.Issue -> Maybe Text simpleTitle issue = case issue.issueType of - Issues.NewEndpoint -> - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (d :: Issues.NewEndpointData) -> - Just $ "New endpoint discovered: " <> d.endpointMethod <> " " <> d.endpointPath - _ -> Just "New endpoint discovered" - Issues.NewShape -> - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (d :: Issues.NewShapeData) -> - let changes = V.length d.newFields + V.length d.deletedFields + V.length d.modifiedFields - in Just $ "New response shape detected on " <> d.endpointMethod <> " " <> d.endpointPath <> " (" <> toText (show changes) <> " field changes)" - _ -> Just "New response shape detected" + Issues.NewEndpoint -> Just $ "New endpoint detected" + Issues.NewShape -> Just $ "New response shape detected" Issues.FieldChange -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.FieldChangeData) -> @@ -312,7 +303,7 @@ simpleTitle issue = case issue.issueType of case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.LogPatternData) -> let level = fromMaybe "LOG" d.logLevel - in Just $ "New " <> level <> " pattern detected" <> maybe "" (" in " <>) d.serviceName + in Just $ "New " <> level <> " pattern detected" _ -> Just "New log pattern detected" Issues.ErrorEscalating -> case AE.fromJSON (getAeson issue.issueData) of diff --git a/src/Models/Apis/Shapes.hs b/src/Models/Apis/Shapes.hs index fb108f48c..8a3c3b4f7 100644 --- a/src/Models/Apis/Shapes.hs +++ b/src/Models/Apis/Shapes.hs @@ -6,6 +6,7 @@ module Models.Apis.Shapes ( ShapeForIssue (..), bulkInsertShapes, getShapeForIssue, + getShapeByHash, ) where @@ -167,3 +168,18 @@ getShapeForIssue pid hash = listToMaybe <$> PG.query q (pid, hash) LEFT JOIN apis.endpoints e ON e.hash = s.endpoint_hash WHERE s.project_id = ? AND s.hash = ? |] + + +getShapeByHash :: DB es => Projects.ProjectId -> Text -> Eff es (Maybe Shape) +getShapeByHash pid hash = listToMaybe <$> PG.query q (pid, hash) + where + q = + [sql| + SELECT id, created_at, updated_at, approved_on, project_id, endpoint_hash, + query_params_keypaths, request_body_keypaths, response_body_keypaths, + request_headers_keypaths, response_headers_keypaths, field_hashes, + hash, status_code, response_description, request_description, + example_request_payload, example_response_payload, first_trace_id, recent_trace_id, service + FROM apis.shapes + WHERE project_id = ? AND hash = ? + |] diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index c37670e9f..a39ee2464 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -31,6 +31,7 @@ where import Data.Aeson qualified as AE import Data.Aeson.Types (parseMaybe) +import Deriving.Aeson qualified as DAE import Data.Default (def) import Data.Map qualified as Map import Data.Text qualified as T @@ -52,6 +53,7 @@ import Lucid.Hyperscript (__) import Models.Apis.Anomalies (FieldChange (..), PayloadChange (..)) import Models.Apis.Anomalies qualified as Anomalies import Models.Apis.Endpoints qualified as Endpoints +import Models.Apis.Shapes qualified as Shapes import Models.Apis.Errors qualified as Errors import Models.Apis.Fields.Facets qualified as Facets import Models.Apis.Issues qualified as Issues @@ -87,6 +89,15 @@ newtype AnomalyBulkForm = AnomalyBulk deriving anyclass (FromForm) +-- | Helper type to extract common endpoint fields from any issue data type +data IssueEndpointInfo = IssueEndpointInfo + { endpointMethod :: Text + , endpointPath :: Text + } + deriving stock (Generic) + deriving (AE.FromJSON) via DAE.CustomJSON '[DAE.FieldLabelModifier '[DAE.CamelToSnake]] IssueEndpointInfo + + acknowledgeAnomalyGetH :: Projects.ProjectId -> Anomalies.AnomalyId -> Maybe Text -> ATAuthCtx (RespHeaders AnomalyAction) acknowledgeAnomalyGetH pid aid hostM = do (sess, project) <- Sessions.sessionAndProject pid @@ -208,21 +219,58 @@ anomalyDetailCore pid firstM fetchIssue = do when isErrorRelated $ do errorResolveButton_ pid issue.targetHash isResolved } - (trItem, spanRecs) <- case errorM of - Just err -> do - let targetTIdM = maybe err.recentTraceId (const err.firstTraceId) firstM - targetTme = maybe (zonedTimeToUTC err.updatedAt) (const $ zonedTimeToUTC err.createdAt) firstM - case targetTIdM of - Just x -> do - trM <- Telemetry.getTraceDetails pid x (Just targetTme) now + -- Helper to fetch trace and spans given a trace ID + let fetchTraceData traceIdM timeHint = case traceIdM of + Just traceId -> do + trM <- Telemetry.getTraceDetails pid traceId timeHint now case trM of Just traceItem -> do - spanRecords' <- - Telemetry.getSpanRecordsByTraceId pid traceItem.traceId (Just traceItem.traceStartTime) now + spanRecords' <- Telemetry.getSpanRecordsByTraceId pid traceItem.traceId (Just traceItem.traceStartTime) now pure (Just traceItem, V.fromList spanRecords') Nothing -> pure (Nothing, V.empty) Nothing -> pure (Nothing, V.empty) - Nothing -> pure (Nothing, V.empty) + + (trItem, spanRecs) <- case errorM of + Just err -> do + let targetTIdM = maybe err.recentTraceId (const err.firstTraceId) firstM + targetTme = maybe (zonedTimeToUTC err.updatedAt) (const $ zonedTimeToUTC err.createdAt) firstM + fetchTraceData targetTIdM (Just targetTme) + Nothing -> case issue.issueType of + Issues.NewShape -> do + shapeM <- Shapes.getShapeByHash pid issue.targetHash + case shapeM of + Just shape -> do + let targetTIdM = maybe shape.recentTraceId (const shape.firstTraceId) firstM + fetchTraceData targetTIdM (Just $ shape.createdAt) + Nothing -> pure (Nothing, V.empty) + Issues.NewEndpoint -> do + endpointM <- Endpoints.getEndpointByHash pid issue.targetHash + case endpointM of + Just endpoint -> do + let targetTIdM = maybe endpoint.recentTraceId (const endpoint.firstTraceId) firstM + fetchTraceData targetTIdM (Just endpoint.createdAt) + Nothing -> pure (Nothing, V.empty) + Issues.EndpointLatencyDegradation -> do + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.EndpointLatencyDegradationData) -> do + let targetTIdM = d.sampleTraceIds V.!? 0 + fetchTraceData targetTIdM (Just d.detectedAt) + _ -> pure (Nothing, V.empty) + Issues.EndpointErrorRateSpike -> do + endpointM <- Endpoints.getEndpointByHash pid issue.targetHash + case endpointM of + Just endpoint -> do + let targetTIdM = maybe endpoint.recentTraceId (const endpoint.firstTraceId) firstM + fetchTraceData targetTIdM (Just endpoint.createdAt) + Nothing -> pure (Nothing, V.empty) + Issues.EndpointVolumeRateChange -> do + endpointM <- Endpoints.getEndpointByHash pid issue.targetHash + case endpointM of + Just endpoint -> do + let targetTIdM = maybe endpoint.recentTraceId (const endpoint.firstTraceId) firstM + fetchTraceData targetTIdM (Just endpoint.createdAt) + Nothing -> pure (Nothing, V.empty) + _ -> pure (Nothing, V.empty) addRespHeaders $ PageCtx bwconf $ anomalyDetailPage pid issue trItem spanRecs errorM now (isJust firstM) @@ -278,18 +326,19 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do _ -> pass h3_ [class_ "text-textStrong text-2xl font-semibold"] $ toHtml issue.title p_ [class_ "text-sm text-textWeak max-w-3xl"] $ toHtml issue.recommendedAction - let widget = + let widget title q = div_ [class_ "col-span-4"] $ Widget.widget_ $ (def :: Widget.Widget) { Widget.standalone = Just True , Widget.id = Just $ issueId <> "-timeline" + , Widget.naked = Just True , Widget.wType = Widget.WTTimeseries - , Widget.title = Just "Error trends" + , Widget.title = Just title , Widget.showTooltip = Just True , Widget.xAxis = Just (def{Widget.showAxisLabel = Just True}) , Widget.yAxis = Just (def{Widget.showOnlyMaxLabel = Just True}) - , Widget.query = Just "status_code == \"ERROR\" | summarize count(*) by bin_auto(timestamp), status_code" + , Widget.query = Just q , Widget._projectId = Just issue.projectId , Widget.hideLegend = Just True } @@ -305,7 +354,7 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do statBox_ (Just pid) Nothing "Affected Clients" "" (show err.affectedUsers) Nothing Nothing timeStatBox_ "First Seen" $ prettyTimeAuto now $ zonedTimeToUTC err.createdAt timeStatBox_ "Last Seen" $ prettyTimeAuto now $ zonedTimeToUTC (fromMaybe err.updatedAt err.lastOccurredAt) - widget + widget "Error trend" "status_code == \"ERROR\" | summarize count(*) by bin_auto(timestamp), status_code" div_ [class_ "flex flex-col gap-4"] do div_ [class_ "grid grid-cols-2 gap-4 w-full"] do div_ [class_ "surface-raised rounded-2xl overflow-hidden"] do @@ -363,17 +412,27 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do Issues.NewEndpoint -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.NewEndpointData) -> do - div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do - statBox_ (Just pid) Nothing "Method" "" d.endpointMethod Nothing Nothing - statBox_ (Just pid) Nothing "Path" "" d.endpointPath Nothing Nothing - statBox_ (Just pid) Nothing "Host" "" d.endpointHost Nothing Nothing + div_ [class_ "flex items-center gap-3 mb-4 p-3 rounded-lg"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeaker px-2 py-1 rounded text-sm text-textStrong"] $ toHtml d.endpointPath + div_ [class_ "w-px h-4 bg-strokeWeak"] "" + span_ [class_ "flex items-center gap-1.5 text-sm text-textWeak"] do + faSprite_ "server" "regular" "h-3 w-3" + toHtml d.endpointHost + -- Stats and chart + div_ [class_ "grid grid-cols-4 lg:grid-cols-8 gap-4 mb-4"] do timeStatBox_ "First Seen" $ prettyTimeAuto now d.firstSeenAt + widget "Request trend" $ "attributes.http.request.method==\"" <> d.endpointMethod <> "\" AND attributes.http.route==\"" <> d.endpointPath <> "\" | summarize count(*) by bin_auto(timestamp)" _ -> pass Issues.NewShape -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.NewShapeData) -> do + div_ [class_ "flex items-center gap-3 mb-4 p-3 rounded-lg"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeaker px-2 py-1 rounded text-sm text-textStrong"] $ toHtml d.endpointPath + div_ [class_ "w-px h-4 bg-strokeWeak"] "" + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do - statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing statBox_ (Just pid) Nothing "Status Code" "" (show d.statusCode) Nothing Nothing statBox_ (Just pid) Nothing "New Fields" "" (show $ V.length d.newFields) Nothing Nothing statBox_ (Just pid) Nothing "Deleted Fields" "" (show $ V.length d.deletedFields) Nothing Nothing @@ -487,8 +546,11 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do Issues.EndpointLatencyDegradation -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointLatencyDegradationData) -> do + div_ [class_ "flex items-center gap-3 mb-4 p-3 rounded-lg"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeaker px-2 py-1 rounded text-sm text-textStrong"] $ toHtml d.endpointPath + div_ [class_ "w-px h-4 bg-strokeWeak"] "" div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do - statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing statBox_ (Just pid) Nothing "Percentile" "" d.percentile Nothing Nothing statBox_ (Just pid) Nothing "Current Latency" "" (show (round d.currentLatencyMs :: Int) <> "ms") Nothing Nothing statBox_ (Just pid) Nothing "Baseline" "" (show (round d.baselineLatencyMs :: Int) <> "ms") Nothing Nothing @@ -498,8 +560,12 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do Issues.EndpointErrorRateSpike -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> do + div_ [class_ "flex items-center gap-3 mb-4 p-3 rounded-lg"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeaker px-2 py-1 rounded text-sm text-textStrong"] $ toHtml d.endpointPath + div_ [class_ "w-px h-4 bg-strokeWeak"] "" + div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do - statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing statBox_ (Just pid) Nothing "Error Rate" "" (show (round (d.currentErrorRate * 100) :: Int) <> "%") Nothing Nothing statBox_ (Just pid) Nothing "Errors" "" (show d.errorCount <> "/" <> show d.totalRequests) Nothing Nothing statBox_ (Just pid) Nothing "Baseline" "" (show (round (d.baselineErrorRate * 100) :: Int) <> "%") Nothing Nothing @@ -514,8 +580,11 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do Issues.EndpointVolumeRateChange -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> do + div_ [class_ "flex items-center gap-3 mb-4 p-3 rounded-lg"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeaker px-2 py-1 rounded text-sm text-textStrong"] $ toHtml d.endpointPath + div_ [class_ "w-px h-4 bg-strokeWeak"] "" div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do - statBox_ (Just pid) Nothing "Endpoint" "" (d.endpointMethod <> " " <> d.endpointPath) Nothing Nothing statBox_ (Just pid) Nothing "Direction" "" d.changeDirection Nothing Nothing statBox_ (Just pid) Nothing "Current Rate" "" (show (round d.currentRatePerHour :: Int) <> "/hr") Nothing Nothing statBox_ (Just pid) Nothing "Baseline" "" (show (round d.baselineRatePerHour :: Int) <> "/hr") Nothing Nothing @@ -1155,11 +1224,11 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue -- Metadata row (method, endpoint, service, time) div_ [class_ "flex items-center gap-4 text-sm text-textWeak mb-3 flex-wrap"] do -- Method and endpoint (for API changes) - when (issue.issueType `elem` [Issues.NewShape, Issues.NewEndpoint, Issues.EndpointLatencyDegradation]) do - case AE.fromJSON (getAeson issue.issueData) of - AE.Success (d :: Issues.EndpointRelatedData) -> do - span_ [class_ "font-monospace"] $ toHtml d.endpointMethod - span_ [class_ "font-monospace"] $ toHtml d.endpointPath + case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: IssueEndpointInfo) -> do + div_ [class_ "flex items-center gap-2"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeak px-2 py-1 rounded text-xs text-textStrong"] $ toHtml d.endpointPath _ -> pass -- Service badge span_ [class_ "flex items-center gap-1"] do @@ -1213,11 +1282,16 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue _ -> pass Issues.NewEndpoint -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.NewEndpointData) -> - div_ [class_ "mb-4 p-3 bg-fillInformation-weak border border-strokeInformation-weak rounded-lg"] do - div_ [class_ "flex items-center gap-2 text-sm"] do - span_ [class_ "font-medium text-fillInformation-strong"] $ toHtml d.endpointMethod - span_ [class_ "text-fillInformation-strong"] $ toHtml d.endpointPath - div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ "Host: " <> d.endpointHost + let shapeCount = V.length d.initialShapes + in div_ [class_ "mb-4 p-3 bg-fillInformation-weak border border-strokeInformation-weak rounded-lg"] do + div_ [class_ "flex items-center justify-between"] do + div_ [class_ "flex items-center gap-3"] do + div_ [class_ "flex items-center gap-1.5 text-sm"] do + faSprite_ "square-dashed" "regular" "h-3.5 w-3.5 text-fillInformation-strong" + span_ [class_ "font-medium text-fillInformation-strong"] $ toHtml $ show shapeCount <> " shape" <> (if shapeCount == 1 then "" else "s") + div_ [class_ "flex items-center gap-1.5 text-sm text-textWeak"] do + faSprite_ "server" "regular" "h-3 w-3" + span_ [] $ toHtml d.endpointHost _ -> pass Issues.NewShape -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.NewShapeData) -> do @@ -1241,12 +1315,10 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue _ -> pass Issues.LogPattern -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.LogPatternData) -> - div_ [class_ "border border-strokeWeak rounded-lg group/lp mb-4"] do + div_ [class_ "border border-strokeWeak rounded-lg mb-4"] do label_ [class_ "text-sm text-textWeak font-semibold rounded-lg p-2 flex gap-2 items-center cursor-pointer"] do - faSprite_ "chevron-right" "regular" "h-3 w-3 group-has-[.lp-input:checked]/lp:rotate-90" toHtml $ fromMaybe "LOG" d.logLevel <> " pattern (" <> show d.occurrenceCount <> " occurrences)" - input_ [class_ "lp-input w-0 h-0 opacity-0", type_ "checkbox"] - div_ [class_ "bg-fillWeak p-4 overflow-x-scroll hidden group-has-[.lp-input:checked]/lp:block text-sm monospace text-textStrong"] $ pre_ [class_ "whitespace-pre-wrap"] $ toHtml d.logPattern + div_ [class_ "bg-fillWeak p-4 overflow-x-scroll group-has-[.lp-input:checked]/lp:block text-sm monospace text-textStrong"] $ pre_ [class_ "whitespace-pre-wrap"] $ toHtml d.logPattern _ -> pass Issues.ErrorEscalating -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.ErrorEscalatingData) -> @@ -1298,10 +1370,27 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue div_ [class_ "border-l-4 border-strokeBrand pl-4 mb-4"] $ p_ [class_ "text-sm text-textStrong leading-relaxed"] $ toHtml issue.recommendedAction -- Action buttons + let logsQuery = case issue.issueType of + Issues.NewEndpoint -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewEndpointData) -> Just $ "attributes.http.request.method==\"" <> d.endpointMethod <> "\" AND attributes.http.route==\"" <> d.endpointPath <> "\"" + _ -> Nothing + Issues.NewShape -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.NewShapeData) -> Just $ "attributes.http.request.method==\"" <> d.endpointMethod <> "\" AND attributes.http.route==\"" <> d.endpointPath <> "\"" + _ -> Nothing + Issues.LogPattern -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternData) -> Just $ "log_pattern=\"" <> d.logPattern <> "\"" + _ -> Nothing + Issues.LogPatternRateChange -> case AE.fromJSON (getAeson issue.issueData) of + AE.Success (d :: Issues.LogPatternRateChangeData) -> Just $ "log_pattern=\"" <> d.logPattern <> "\"" + _ -> Nothing + _ -> Nothing + logsUrl = (\q -> "/p/" <> pid.toText <> "/log_explorer?query=" <> toUriStr q) <$> logsQuery + div_ [class_ "flex items-center gap-3 mt-4 pt-4 border-t border-strokeWeak"] do - button_ [class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 text-textBrand hover:text-textBrand/80 hover:bg-fillBrand-weak"] do - faSprite_ "eye" "regular" "w-4 h-4" - span_ [class_ "leading-none"] "view related logs" + whenJust logsUrl \url -> + a_ [href_ url, class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 text-textBrand hover:text-textBrand/80 hover:bg-fillBrand-weak"] do + faSprite_ "eye" "regular" "w-4 h-4" + span_ [class_ "leading-none"] "View related logs" button_ [class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 border bg-background hover:text-accent-foreground text-textBrand border-strokeBrand-strong hover:bg-fillBrand-weak"] do faSprite_ "code" "regular" "w-4 h-4" span_ [class_ "leading-none"] "View Full Schema" @@ -1534,20 +1623,20 @@ issueTypeBadge :: Issues.IssueType -> Bool -> Html () issueTypeBadge issueType critical = badge cls icon txt where (cls, icon, txt) = case issueType of - Issues.RuntimeException -> ("bg-fillError-strong", "triangle-alert", "ERROR") + Issues.RuntimeException -> ("bg-fillError-strong", "triangle-alert", "Error") Issues.QueryAlert -> ("bg-fillWarning-strong", "zap", "ALERT") - Issues.NewEndpoint -> ("bg-fillInformation-strong", "plus-circle", "NEW ENDPOINT") + Issues.NewEndpoint -> ("bg-fillInformation-strong", "plus-circle", "Info") Issues.NewShape - | critical -> ("bg-fillError-strong", "exclamation-triangle", "BREAKING CHANGE") - | otherwise -> ("bg-fillInformation-strong", "shapes", "NEW SHAPE") + | critical -> ("bg-fillError-strong", "exclamation-triangle", "Breaking") + | otherwise -> ("bg-fillInformation-strong", "shapes", "Info") Issues.FieldChange - | critical -> ("bg-fillError-strong", "exclamation-triangle", "BREAKING CHANGE") - | otherwise -> ("bg-fillWarning-strong", "pen-to-square", "FIELD CHANGE") - Issues.LogPattern -> ("bg-fillInformation-strong", "file-lines", "LOG PATTERN") - Issues.ErrorEscalating -> ("bg-fillError-strong", "arrow-trend-up", "ESCALATING") - Issues.ErrorRegressed -> ("bg-fillError-strong", "rotate-left", "REGRESSED") + | critical -> ("bg-fillError-strong", "exclamation-triangle", "Breaking") + | otherwise -> ("bg-fillWarning-strong", "pen-to-square", "Incremental") + Issues.LogPattern -> ("bg-fillInformation-strong", "file-lines", "Info") + Issues.ErrorEscalating -> ("bg-fillError-strong", "arrow-trend-up", "E") + Issues.ErrorRegressed -> ("bg-fillError-strong", "rotate-left", "Alert") Issues.LogPatternRateChange -> ("bg-fillWarning-strong", "chart-line", "RATE CHANGE") - Issues.EndpointLatencyDegradation -> ("bg-fillWarning-strong", "clock", "LATENCY") - Issues.EndpointErrorRateSpike -> ("bg-fillError-strong", "chart-line-up", "ERROR SPIKE") - Issues.EndpointVolumeRateChange -> ("bg-fillWarning-strong", "arrows-up-down", "TRAFFIC") + Issues.EndpointLatencyDegradation -> ("bg-fillWarning-strong", "clock", "Alert") + Issues.EndpointErrorRateSpike -> ("bg-fillError-strong", "chart-line-up", "Alert") + Issues.EndpointVolumeRateChange -> ("bg-fillWarning-strong", "arrows-up-down", "Alert") badge c i t = span_ [class_ $ "badge " <> c] do faSprite_ i "regular" "w-3 h-3"; t From 0fbca39e9c3dfda63fe6a4713d8f07ae0f87923d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 16 Jan 2026 19:58:22 +0000 Subject: [PATCH 58/71] Auto-format code with fourmolu --- src/Pages/Anomalies.hs | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index a39ee2464..084d190d6 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -31,7 +31,6 @@ where import Data.Aeson qualified as AE import Data.Aeson.Types (parseMaybe) -import Deriving.Aeson qualified as DAE import Data.Default (def) import Data.Map qualified as Map import Data.Text qualified as T @@ -42,6 +41,7 @@ import Data.Vector qualified as V import Database.PostgreSQL.Simple (Only (Only)) import Database.PostgreSQL.Simple.Newtypes (Aeson (..), getAeson) import Database.PostgreSQL.Simple.SqlQQ (sql) +import Deriving.Aeson qualified as DAE import Effectful.PostgreSQL qualified as PG import Effectful.Reader.Static (ask) import Effectful.Time qualified as Time @@ -53,11 +53,11 @@ import Lucid.Hyperscript (__) import Models.Apis.Anomalies (FieldChange (..), PayloadChange (..)) import Models.Apis.Anomalies qualified as Anomalies import Models.Apis.Endpoints qualified as Endpoints -import Models.Apis.Shapes qualified as Shapes import Models.Apis.Errors qualified as Errors import Models.Apis.Fields.Facets qualified as Facets import Models.Apis.Issues qualified as Issues import Models.Apis.RequestDumps qualified as RequestDump +import Models.Apis.Shapes qualified as Shapes import Models.Projects.Projects qualified as Projects import Models.Telemetry.Schema qualified as Schema import Models.Telemetry.Telemetry qualified as Telemetry @@ -1225,11 +1225,11 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue div_ [class_ "flex items-center gap-4 text-sm text-textWeak mb-3 flex-wrap"] do -- Method and endpoint (for API changes) case AE.fromJSON (getAeson issue.issueData) of - AE.Success (d :: IssueEndpointInfo) -> do - div_ [class_ "flex items-center gap-2"] do - span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod - span_ [class_ "monospace bg-fillWeak px-2 py-1 rounded text-xs text-textStrong"] $ toHtml d.endpointPath - _ -> pass + AE.Success (d :: IssueEndpointInfo) -> do + div_ [class_ "flex items-center gap-2"] do + span_ [class_ $ "badge " <> methodFillColor d.endpointMethod] $ toHtml d.endpointMethod + span_ [class_ "monospace bg-fillWeak px-2 py-1 rounded text-xs text-textStrong"] $ toHtml d.endpointPath + _ -> pass -- Service badge span_ [class_ "flex items-center gap-1"] do div_ [class_ "w-3 h-3 bg-fillYellow rounded-sm"] "" @@ -1283,15 +1283,15 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue Issues.NewEndpoint -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.NewEndpointData) -> let shapeCount = V.length d.initialShapes - in div_ [class_ "mb-4 p-3 bg-fillInformation-weak border border-strokeInformation-weak rounded-lg"] do - div_ [class_ "flex items-center justify-between"] do - div_ [class_ "flex items-center gap-3"] do - div_ [class_ "flex items-center gap-1.5 text-sm"] do - faSprite_ "square-dashed" "regular" "h-3.5 w-3.5 text-fillInformation-strong" - span_ [class_ "font-medium text-fillInformation-strong"] $ toHtml $ show shapeCount <> " shape" <> (if shapeCount == 1 then "" else "s") - div_ [class_ "flex items-center gap-1.5 text-sm text-textWeak"] do - faSprite_ "server" "regular" "h-3 w-3" - span_ [] $ toHtml d.endpointHost + in div_ [class_ "mb-4 p-3 bg-fillInformation-weak border border-strokeInformation-weak rounded-lg"] do + div_ [class_ "flex items-center justify-between"] do + div_ [class_ "flex items-center gap-3"] do + div_ [class_ "flex items-center gap-1.5 text-sm"] do + faSprite_ "square-dashed" "regular" "h-3.5 w-3.5 text-fillInformation-strong" + span_ [class_ "font-medium text-fillInformation-strong"] $ toHtml $ show shapeCount <> " shape" <> (if shapeCount == 1 then "" else "s") + div_ [class_ "flex items-center gap-1.5 text-sm text-textWeak"] do + faSprite_ "server" "regular" "h-3 w-3" + span_ [] $ toHtml d.endpointHost _ -> pass Issues.NewShape -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.NewShapeData) -> do @@ -1384,7 +1384,7 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue AE.Success (d :: Issues.LogPatternRateChangeData) -> Just $ "log_pattern=\"" <> d.logPattern <> "\"" _ -> Nothing _ -> Nothing - logsUrl = (\q -> "/p/" <> pid.toText <> "/log_explorer?query=" <> toUriStr q) <$> logsQuery + logsUrl = (\q -> "/p/" <> pid.toText <> "/log_explorer?query=" <> toUriStr q) <$> logsQuery div_ [class_ "flex items-center gap-3 mt-4 pt-4 border-t border-strokeWeak"] do whenJust logsUrl \url -> From 08ff061f29ab1f54dc57abc4ce40356564cc78a5 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 16 Jan 2026 20:07:11 +0000 Subject: [PATCH 59/71] Merge origin/master (squashed) --- .github/workflows/haskell.yml | 2 +- .github/workflows/pullrequest.yml | 6 +- .gitignore | 3 +- .hlint.yaml | 1 + Dockerfile | 2 +- src/Models/Projects/Dashboards.hs | 6 +- src/Pages/Dashboards.hs | 290 +++++-- src/Pkg/Components/Widget.hs | 117 ++- src/Pkg/Parser/Expr.hs | 4 +- static/public/assets/css/tailwind.css | 38 +- .../deps/highlightjs/sql-formatter.min.js | 2 + .../public/assets/deps/highlightjs/sql.min.js | 21 + static/public/dashboards/_overview.yaml | 709 ++++++++++++++---- test/doctests/Main.hs | 3 +- test/integration/Pages/BusinessFlowsSpec.hs | 2 +- test/integration/Pages/GitSyncSpec.hs | 5 +- .../Pages/Projects/ManageMembersSpec.hs | 12 +- test/unit/Pkg/ParserSpec.hs | 2 +- web-components/src/main.ts | 4 + 19 files changed, 979 insertions(+), 250 deletions(-) create mode 100644 static/public/assets/deps/highlightjs/sql-formatter.min.js create mode 100644 static/public/assets/deps/highlightjs/sql.min.js diff --git a/.github/workflows/haskell.yml b/.github/workflows/haskell.yml index d9e1fa916..6c2404a53 100644 --- a/.github/workflows/haskell.yml +++ b/.github/workflows/haskell.yml @@ -81,7 +81,7 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v6 with: - node-version: '18' + node-version: '22' - name: Generate required static files run: | diff --git a/.github/workflows/pullrequest.yml b/.github/workflows/pullrequest.yml index 73ae70cc8..4268e8554 100644 --- a/.github/workflows/pullrequest.yml +++ b/.github/workflows/pullrequest.yml @@ -43,7 +43,7 @@ jobs: - name: Setup node env uses: actions/setup-node@v6 with: - node-version: 18 + node-version: 22 - name: Generate required static files run: | @@ -125,7 +125,7 @@ jobs: - name: Setup node env uses: actions/setup-node@v6 with: - node-version: 18 + node-version: 22 - name: Generate required static files run: | @@ -198,7 +198,7 @@ jobs: - name: Setup node env uses: actions/setup-node@v6 with: - node-version: 20 + node-version: 22 - name: Run UI tests run: | cd web-components diff --git a/.gitignore b/.gitignore index e6ae13d42..6420298fa 100644 --- a/.gitignore +++ b/.gitignore @@ -46,4 +46,5 @@ cabal.project.local stack-work tests.log tests_optimized.log -.claude \ No newline at end of file +.claude +.playwright-mcp/ diff --git a/.hlint.yaml b/.hlint.yaml index 8eae9ab48..58e8d222f 100644 --- a/.hlint.yaml +++ b/.hlint.yaml @@ -17,6 +17,7 @@ # - {name: System.Directory, as: Dir, importStyle: qualified, qualifiedStyle: post, asRequired: true} - arguments: + - "-XMultilineStrings" - "-XConstraintKinds" - "-XDeriveGeneric" - "-XGeneralizedNewtypeDeriving" diff --git a/Dockerfile b/Dockerfile index c88b0359f..126549e6f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Stage 1: Build frontend assets -FROM node:18-alpine AS frontend-builder +FROM node:22-alpine AS frontend-builder WORKDIR /build diff --git a/src/Models/Projects/Dashboards.hs b/src/Models/Projects/Dashboards.hs index 9c5397ab2..323ddeaca 100644 --- a/src/Models/Projects/Dashboards.hs +++ b/src/Models/Projects/Dashboards.hs @@ -65,6 +65,7 @@ import Pkg.DeriveUtils (UUIDId (..)) import Relude import Servant (ServerError (..), err404) import System.Directory (listDirectory) +import System.FilePath (()) import System.Types (DB) @@ -191,17 +192,20 @@ insert dashboardVM = PG.execute (Query $ encodeUtf8 q) params ) +-- | Read dashboard YAML files from directory at compile time via TH readDashboardsFromDirectory :: FilePath -> Q Exp readDashboardsFromDirectory dir = do files <- runIO $ listDirectory dir let files' = sort $ filter (".yaml" `L.isSuffixOf`) files + mapM_ (THS.addDependentFile . (dir )) files' dashboards <- runIO $ catMaybes <$> mapM (readDashboardFile dir) files' THS.lift dashboards +-- | Read single dashboard YAML file readDashboardFile :: FilePath -> FilePath -> IO (Maybe Dashboard) readDashboardFile dir file = do - let filePath = dir ++ "/" ++ file + let filePath = dir file result <- try $ readFileBS filePath :: IO (Either SomeException BS.ByteString) case result of Left err -> do diff --git a/src/Pages/Dashboards.hs b/src/Pages/Dashboards.hs index d2d416d21..a018b8a33 100644 --- a/src/Pages/Dashboards.hs +++ b/src/Pages/Dashboards.hs @@ -93,6 +93,7 @@ import Pkg.Components.Widget qualified as Widget import Pkg.DashboardUtils qualified as DashboardUtils import Pkg.DeriveUtils (UUIDId (..)) import Pkg.Parser (QueryComponents (..), SqlQueryCfg (..), defSqlQueryCfg, finalAlertQuery, fixedUTCTime, parseQueryToComponents, presetRollup) +import Pkg.THUtils (hashAssetFile) import Relude hiding (ask) import Relude.Unsafe qualified as Unsafe import Servant (NoContent (..), ServerError, err302, err404, errBody, errHeaders) @@ -108,6 +109,15 @@ import Utils import Web.FormUrlEncoded (FromForm) +-- | Head content for dashboard pages - loads highlight.js and sql-formatter for SQL preview +dashboardHeadContent_ :: Html () +dashboardHeadContent_ = do + link_ [rel_ "stylesheet", href_ $(hashAssetFile "/public/assets/deps/highlightjs/atom-one-dark.min.css")] + script_ [src_ $(hashAssetFile "/public/assets/deps/highlightjs/highlight.min.js")] ("" :: Text) + script_ [src_ $(hashAssetFile "/public/assets/deps/highlightjs/sql.min.js")] ("" :: Text) + script_ [src_ $(hashAssetFile "/public/assets/deps/highlightjs/sql-formatter.min.js")] ("" :: Text) + + folderFromPath :: Maybe Text -> Text folderFromPath Nothing = "" folderFromPath (Just path) = let dir = takeDirectory (toString path) in if dir == "." then "" else toText dir <> "/" @@ -282,6 +292,29 @@ dashboardPage_ pid dashId dash dashVM allParams = do <> memptyIfFalse (var.multi == Just True) [data_ "mode" "select"] script_ [text| + // Interpolate {{var-*}} placeholders in elements with data-var-template attribute + (function() { + let cachedSearch = '', cachedParams = null, pending = false; + window.interpolateVarTemplates = function() { + if (pending) return; + pending = true; + requestAnimationFrame(() => { + pending = false; + if (window.location.search !== cachedSearch) { + cachedSearch = window.location.search; + cachedParams = new URLSearchParams(cachedSearch); + } + document.querySelectorAll('[data-var-template]').forEach(el => { + let text = el.dataset.varTemplate; + cachedParams.forEach((value, key) => { + if (key.startsWith('var-')) text = text.replaceAll('{{' + key + '}}', value || ''); + }); + el.textContent = text; + }); + }); + }; + })(); + window.addEventListener('DOMContentLoaded', () => { const tagifyInstances = new Map(); document.querySelectorAll('.tagify-select-input').forEach(input => { @@ -326,38 +359,44 @@ dashboardPage_ pid dashId dash dashVM allParams = do console.error(`Error fetching data for ${input.name}:`, e); } }); + + window.interpolateVarTemplates(); }); + + window.interpolateVarTemplates(); }); |] let activeTabSlug = dash.tabs >>= \tabs -> join (L.lookup activeTabSlugKey allParams) <|> (slugify . (.name) <$> listToMaybe tabs) widgetOrderUrl = "/p/" <> pid.toText <> "/dashboards/" <> dashId.toText <> "/widgets_order" <> maybe "" ("?tab=" <>) activeTabSlug constantsJson = decodeUtf8 $ AE.encode $ M.fromList [(k, fromMaybe "" v) | (k, v) <- allParams, "const-" `T.isPrefixOf` k] - section_ [class_ "h-full"] $ div_ [class_ "mx-auto mb-20 pt-5 pb-6 px-4 gap-3.5 w-full flex flex-col h-full overflow-y-scroll pb-20 group/pg", id_ "dashboardPage", data_ "constants" constantsJson] do + section_ [class_ "h-full"] $ div_ [class_ "mx-auto mb-20 pt-2 pb-6 px-4 gap-3.5 w-full flex flex-col h-full overflow-y-scroll pb-20 group/pg", id_ "dashboardPage", data_ "constants" constantsJson] do let emptyConstants = [c.key | c <- fromMaybe [] dash.constants, c.result `elem` [Nothing, Just []]] unless (null emptyConstants) $ div_ [class_ "alert alert-warning text-sm"] do faSprite_ "circle-exclamation" "regular" "w-4 h-4" span_ $ toHtml $ "Constants with no data: " <> T.intercalate ", " emptyConstants - case dash.tabs of - Just tabs -> do - let activeTabIdx = case activeTabSlug of - Just slug -> maybe 0 fst (findTabBySlug tabs slug) - Nothing -> 0 - -- Tab system with htmx lazy loading - only render active tab content - div_ [class_ "dashboard-tabs-container", id_ "dashboard-tabs-content"] do - -- Only render the active tab's content (other tabs load via htmx) - case tabs !!? activeTabIdx of - Just activeTab -> tabContentPanel_ pid dashId.toText activeTabIdx activeTab.name activeTab.widgets True False - Nothing -> pass - Nothing -> do - -- Fall back to old behavior for dashboards without tabs - div_ - [class_ "grid-stack -m-2"] - do - forM_ (dash :: Dashboards.Dashboard).widgets (\w -> toHtml (w{Widget._projectId = Just pid})) - when (null (dash :: Dashboards.Dashboard).widgets) $ label_ [id_ "add_a_widget_label", class_ "grid-stack-item pb-8 cursor-pointer bg-fillBrand-weak border-2 border-strokeBrand-strong border-dashed text-strokeSelected rounded-sm rounded-lg flex flex-col gap-3 items-center justify-center *:right-0! *:bottom-0! ", term "gs-w" "3", term "gs-h" "2", Lucid.for_ "page-data-drawer"] do - faSprite_ "plus" "regular" "h-8 w-8" - span_ "Add a widget" + div_ [class_ "dashboard-grid-wrapper relative min-h-[400px]"] do + dashboardSkeleton_ + case dash.tabs of + Just tabs -> do + let activeTabIdx = case activeTabSlug of + Just slug -> maybe 0 fst (findTabBySlug tabs slug) + Nothing -> 0 + -- Tab system with htmx lazy loading - only render active tab content + div_ [class_ "dashboard-tabs-container", id_ "dashboard-tabs-content"] do + -- Only render the active tab's content (other tabs load via htmx) + case tabs !!? activeTabIdx of + Just activeTab -> tabContentPanel_ pid dashId.toText activeTabIdx activeTab.name activeTab.widgets True False + Nothing -> pass + Nothing -> do + -- Fall back to old behavior for dashboards without tabs + div_ + [class_ "grid-stack -m-2"] + do + forM_ (dash :: Dashboards.Dashboard).widgets (\w -> toHtml (w{Widget._projectId = Just pid})) + when (null (dash :: Dashboards.Dashboard).widgets) $ label_ [id_ "add_a_widget_label", class_ "grid-stack-item pb-8 cursor-pointer bg-fillBrand-weak border-2 border-strokeBrand-strong border-dashed text-strokeSelected rounded-sm rounded-lg flex flex-col gap-3 items-center justify-center *:right-0! *:bottom-0! ", term "gs-w" "3", term "gs-h" "2", Lucid.for_ "page-data-drawer"] do + faSprite_ "plus" "regular" "h-8 w-8" + span_ "Add a widget" -- Add hidden element for the auto-refresh handler div_ [id_ "dashboard-refresh-handler", class_ "hidden"] "" @@ -388,48 +427,100 @@ dashboardPage_ pid dashId dash dashVM allParams = do }); }; - // Function to initialize grids (called on page load and after htmx swaps) function initializeGrids() { + document.querySelectorAll('.dashboard-grid-wrapper').forEach(wrapper => { + if (!wrapper._skeletonTimeout && !wrapper.classList.contains('dashboard-loaded')) { + wrapper._skeletonTimeout = setTimeout(() => wrapper.classList.add('dashboard-loaded'), 5000); + } + }); const gridInstances = []; document.querySelectorAll('.grid-stack').forEach(gridEl => { if (!gridEl.classList.contains('grid-stack-initialized')) { - const grid = GridStack.init({ - column: 12, - acceptWidgets: true, - cellHeight: '5rem', - marginTop: '0.05rem', - marginLeft: '0.5rem', - marginRight: '0.5rem', - marginBottom: '2rem', - handleClass: 'grid-stack-handle', - styleInHead: true, - staticGrid: false, - }, gridEl); - - grid.on('removed change', debounce(() => htmx.trigger(document.body, 'widget-order-changed'), 200)); - gridEl.classList.add('grid-stack-initialized'); - gridInstances.push(grid); - // Set global gridStackInstance to the current grid - window.gridStackInstance = grid; + const wrapper = gridEl.closest('.dashboard-grid-wrapper'); + try { + const grid = GridStack.init({ + column: 12, + acceptWidgets: true, + cellHeight: '5rem', + margin: '1rem 0.5rem 1rem 0.5rem', + handleClass: 'grid-stack-handle', + styleInHead: true, + staticGrid: false, + float: false, + animate: true, + }, gridEl); + + grid.on('removed change', debounce(() => { + const collapsingWidget = gridEl.querySelector('[data-collapse-action]'); + if (collapsingWidget) { delete collapsingWidget.dataset.collapseAction; return; } + htmx.trigger(document.body, 'widget-order-changed'); + }, 500)); + gridEl.classList.add('grid-stack-initialized'); + gridInstances.push(grid); + window.gridStackInstance = grid; + } finally { + if (wrapper) { + wrapper.classList.add('dashboard-loaded'); + if (wrapper._skeletonTimeout) clearTimeout(wrapper._skeletonTimeout); + } + window.interpolateVarTemplates(); + } } }); // Initialize nested grids document.querySelectorAll('.nested-grid').forEach(nestedEl => { if (!nestedEl.classList.contains('grid-stack-initialized')) { + const parentWidget = nestedEl.closest('.grid-stack-item'); + // Store original YAML height for partial-width groups + if (parentWidget) { + parentWidget.dataset.originalH = parentWidget.getAttribute('gs-h') || '0'; + } + const nestedInstance = GridStack.init({ column: 12, acceptWidgets: true, - cellHeight: '4.9rem', - marginTop: '0.01rem', - marginLeft: '0.5rem', - marginRight: '0.5rem', - marginBottom: '1rem', + cellHeight: '5rem', + margin: '1rem 0.5rem 1rem 0.5rem', handleClass: 'nested-grid-stack-handle', styleInHead: true, staticGrid: false, + animate: true, }, nestedEl); - nestedInstance.on('removed change', debounce(() => htmx.trigger(document.body, 'widget-order-changed'), 200)); + + // Auto-fit group to children + function autoFitGroupToChildren() { + const items = nestedInstance.getGridItems(); + const node = parentWidget?.gridstackNode; + if (!node) return; + + // Don't resize if group is collapsed + if (parentWidget.classList.contains('collapsed')) return; + + const isFullWidth = node.w === 12; + const maxRow = items.length + ? Math.max(1, ...items.map(item => (item.gridstackNode?.y || 0) + (item.gridstackNode?.h || 1))) + : 1; + + const requiredHeight = 1 + maxRow; // 1 for header + content + const yamlHeight = parseInt(parentWidget.dataset.originalH) || requiredHeight; + + // Full-width: always auto-fit. Partial-width: max of YAML and required + const targetHeight = isFullWidth ? requiredHeight : Math.max(yamlHeight, requiredHeight); + + if (node.h !== targetHeight && window.gridStackInstance) { + window.gridStackInstance.update(parentWidget, { h: targetHeight }); + } + } + + nestedInstance.on('change added removed', autoFitGroupToChildren); + requestAnimationFrame(autoFitGroupToChildren); + nestedInstance.on('removed change', debounce(() => { + const collapsingWidget = nestedEl.closest('[data-collapse-action]'); + if (collapsingWidget) { delete collapsingWidget.dataset.collapseAction; return; } + htmx.trigger(document.body, 'widget-order-changed'); + }, 500)); + nestedEl.classList.add('grid-stack-initialized'); } }); @@ -442,6 +533,7 @@ dashboardPage_ pid dashId dash dashVM allParams = do document.body.addEventListener('htmx:afterSettle', function(e) { if (e.detail.target && e.detail.target.id === 'dashboard-tabs-content') { initializeGrids(); + window.interpolateVarTemplates(); } }); }); @@ -456,6 +548,53 @@ dashboardPage_ pid dashId dash dashVM allParams = do } } }); + + function compactGrid(grid, el) { + if (!el) return; + const items = Array.from(el.querySelectorAll(':scope > .grid-stack-item')).sort((a, b) => (a.gridstackNode?.y || 0) - (b.gridstackNode?.y || 0)); + const rows = {}; + items.forEach(item => { const y = item.gridstackNode?.y || 0; (rows[y] = rows[y] || []).push(item); }); + let nextY = 0, needsUpdate = false; + const updates = []; + Object.keys(rows).map(Number).sort((a, b) => a - b).forEach(y => { + rows[y].forEach(item => { + if (item.gridstackNode?.y !== nextY) { updates.push({ item, y: nextY }); needsUpdate = true; } + }); + nextY += Math.max(...rows[y].map(item => item.gridstackNode?.h || 1)); + }); + if (needsUpdate) { + grid.batchUpdate(); + updates.forEach(({ item, y }) => grid.update(item, { y })); + grid.batchUpdate(false); + } + } + + // Delegated handler for collapse toggle + document.addEventListener('click', function(e) { + const collapseBtn = e.target.closest('.collapse-toggle'); + if (!collapseBtn) return; + const parentWidget = collapseBtn.closest('.grid-stack-item'); + const grid = window.gridStackInstance; + if (!parentWidget || !grid) return; + + // Use requestAnimationFrame for smoother animation after class toggle + requestAnimationFrame(() => { + const isCollapsed = parentWidget.classList.contains('collapsed'); + const mainGridEl = document.querySelector('.grid-stack:not(.nested-grid)'); + + parentWidget.dataset.collapseAction = 'true'; + + if (isCollapsed) { + grid.update(parentWidget, { h: 1 }); + } else { + const nestedInstance = parentWidget.querySelector('.nested-grid')?.gridstack; + const items = nestedInstance?.getGridItems() || []; + const maxRow = items.length ? Math.max(1, ...items.map(item => (item.gridstackNode?.y || 0) + (item.gridstackNode?.h || 1))) : 1; + grid.update(parentWidget, { h: 1 + maxRow }); + } + compactGrid(grid, mainGridEl); + }); + }); |] @@ -569,11 +708,11 @@ processConstant pid now (sinceStr, fromDStr, toDStr) allParams constantBase = do valueToText v = decodeUtf8 $ AE.encode v --- Process a single widget recursively with pre-computed replacement maps for efficiency. --- Preserves original query in rawQuery for editor display. -processWidget :: Projects.ProjectId -> UTCTime -> (Maybe Text, Maybe Text, Maybe Text) -> [(Text, Maybe Text)] -> (Text -> Text, Text -> Text) -> Widget.Widget -> ATAuthCtx Widget.Widget -processWidget pid now timeRange allParams (replacePlaceholdersSQL, replacePlaceholdersKQL) widgetBase = do - let widget = widgetBase & #_projectId %~ (<|> Just pid) & #rawQuery .~ widgetBase.query & #sql . _Just %~ replacePlaceholdersSQL & #query %~ fmap replacePlaceholdersKQL +-- Process a single widget recursively. Keeps sql/query with {{var-*}} templates intact +-- so they can be interpolated at data fetch time with current URL params. +processWidget :: Projects.ProjectId -> UTCTime -> (Maybe Text, Maybe Text, Maybe Text) -> [(Text, Maybe Text)] -> Widget.Widget -> ATAuthCtx Widget.Widget +processWidget pid now timeRange allParams widgetBase = do + let widget = widgetBase & #_projectId %~ (<|> Just pid) & #rawQuery .~ widgetBase.query widget' <- if widget.eager == Just True || widget.wType == Widget.WTAnomalies @@ -585,7 +724,7 @@ processWidget pid now timeRange allParams (replacePlaceholdersSQL, replacePlaceh Nothing -> pure widget' Just childWidgets -> do let addDashboardId child = child & #_dashboardId %~ (<|> widget'._dashboardId) - processedChildren <- pooledForConcurrently childWidgets (processWidget pid now timeRange allParams (replacePlaceholdersSQL, replacePlaceholdersKQL) . addDashboardId) + processedChildren <- pooledForConcurrently childWidgets (processWidget pid now timeRange allParams . addDashboardId) pure $ widget' & #children ?~ processedChildren @@ -866,6 +1005,7 @@ dashboardGetH pid dashId fileM fromDStr toDStr sinceStr allParams = do , pageTitleModalId = Just "pageTitleModalId" , config = appCtx.config , freeTierExceeded = freeTierExceeded + , headContent = Just dashboardHeadContent_ , pageActions = Just $ div_ [class_ "flex gap-3 items-center"] do TimePicker.timepicker_ Nothing currentRange Nothing TimePicker.refreshButton_ @@ -1534,6 +1674,7 @@ dashboardsGetH pid sortM embeddedM teamIdM filters = do , pageTitle = "Dashboards" , freeTierExceeded = freeTierExceeded , config = appCtx.config + , headContent = Just dashboardHeadContent_ , pageActions = Just $ label_ [Lucid.for_ "newDashboardMdl", class_ "btn btn-sm btn-primary gap-2"] do faSprite_ "plus" "regular" "h-4 w-4" "New Dashboard" @@ -1610,7 +1751,7 @@ dashboardsPostH pid form = do addRespHeaders DashboardNoContent --- -- Template Haskell splice to generate the list of dashboards by reading the dashboards folder in filesystem +-- TH splice: reads all dashboard YAML files from static/public/dashboards at compile time dashboardTemplates :: [Dashboards.Dashboard] dashboardTemplates = $(Dashboards.readDashboardsFromDirectory "static/public/dashboards") @@ -1866,16 +2007,13 @@ dashboardWidgetExpandGetH :: Projects.ProjectId -> Dashboards.DashboardId -> Tex dashboardWidgetExpandGetH pid dashId widgetId = do (_, dash) <- getDashAndVM dashId Nothing now <- Time.currentTime - let timeParams@(sinceStr, fromDStr, toDStr) = (Nothing, Nothing, Nothing) + let timeParams = (Nothing, Nothing, Nothing) paramsWithVarDefaults = addVariableDefaults [] dash.variables (_, allParamsWithConstants) <- processConstantsAndExtendParams pid now timeParams paramsWithVarDefaults (fromMaybe [] dash.constants) case snd <$> findWidgetInDashboard widgetId dash of Nothing -> throwError $ err404{errBody = "Widget not found in dashboard"} Just widgetToExpand -> do - let (fromD, toD, _) = TimePicker.parseTimeRange now (TimePicker.TimePicker sinceStr fromDStr toDStr) - replacePlaceholdersSQL = DashboardUtils.replacePlaceholders (DashboardUtils.variablePresets pid.toText fromD toD allParamsWithConstants now) - replacePlaceholdersKQL = DashboardUtils.replacePlaceholders (DashboardUtils.variablePresetsKQL pid.toText fromD toD allParamsWithConstants now) - processedWidget <- processWidget pid now timeParams allParamsWithConstants (replacePlaceholdersSQL, replacePlaceholdersKQL) widgetToExpand + processedWidget <- processWidget pid now timeParams allParamsWithConstants widgetToExpand addRespHeaders $ widgetViewerEditor_ pid (Just dashId) Nothing Nothing (Just processedWidget) "edit" @@ -1891,10 +2029,17 @@ widgetSqlPreviewGetH pid queryM sinceStr fromDStr toDStr = do Left err -> div_ [class_ "p-3 space-y-2"] do div_ [class_ "text-textError text-xs font-medium"] "Parse Error" pre_ [class_ "whitespace-pre-wrap break-all bg-fillError/10 p-2 rounded text-xs overflow-x-auto"] $ toHtml err - Right (_, qc) -> div_ [class_ "space-y-3 p-3 bg-fillWeaker rounded-lg font-mono text-xs"] do + Right (_, qc) -> div_ [class_ "space-y-3 p-3 bg-fillWeaker rounded-lg text-xs sql-preview-container"] do sqlBlock_ "Main Query" qc.finalSqlQuery whenJust qc.finalSummarizeQuery $ sqlBlock_ "Summarize Query" whenJust qc.finalAlertQuery $ sqlBlock_ "Alert Query" + script_ + """ + document.querySelectorAll('.sql-preview-container pre code').forEach(el => { + el.textContent = sqlFormatter.format(el.textContent, { language: 'postgresql' }); + hljs.highlightElement(el); + }); + """ where sqlBlock_ :: Text -> Text -> Html () sqlBlock_ label sql = div_ [class_ "space-y-1"] do @@ -1905,7 +2050,7 @@ widgetSqlPreviewGetH pid queryM sinceStr fromDStr toDStr = do , term "_" [text| on click writeText(`${T.replace "`" "\\`" sql}`) to the navigator's clipboard then set my.innerText to 'Copied!' then wait 1.5s then set my.innerText to 'Copy' |] ] "Copy" - pre_ [class_ "whitespace-pre-wrap break-all bg-fillWeak p-2 rounded overflow-x-auto max-h-48"] $ toHtml sql + pre_ [class_ "bg-fillWeak p-2 rounded overflow-x-auto max-h-48"] $ code_ [class_ "language-sql text-xs !bg-transparent"] $ toHtml sql -- | Find a tab by its slug, returns (index, tab) if found @@ -1920,9 +2065,7 @@ findWidgetInDashboard wid dash = tabResult <|> rootResult match w = w.id == Just wid || maybeToMonoid (slugify <$> w.title) == wid -- Recursively search widget and its children findInWidget :: Widget.Widget -> Maybe Widget.Widget - findInWidget w - | match w = Just w - | otherwise = asum $ map findInWidget (fromMaybe [] w.children) + findInWidget w = mfilter match (pure w) <|> asum (maybe [] (map findInWidget) w.children) tabResult = listToMaybe [(Just $ slugify t.name, w') | t <- fromMaybe [] dash.tabs, w <- t.widgets, w' <- maybeToList (findInWidget w)] rootResult = (Nothing,) <$> asum (map findInWidget dash.widgets) @@ -1986,11 +2129,8 @@ mkWidgetProcessor -> [(Text, Maybe Text)] -> Widget.Widget -> ATAuthCtx Widget.Widget -mkWidgetProcessor pid dashId now timeParams@(sinceStr, fromDStr, toDStr) paramsWithConstants = - let (fromD, toD, _) = TimePicker.parseTimeRange now (TimePicker.TimePicker sinceStr fromDStr toDStr) - replacePlaceholdersSQL = DashboardUtils.replacePlaceholders (DashboardUtils.variablePresets pid.toText fromD toD paramsWithConstants now) - replacePlaceholdersKQL = DashboardUtils.replacePlaceholders (DashboardUtils.variablePresetsKQL pid.toText fromD toD paramsWithConstants now) - in fmap (#_dashboardId ?~ dashId.toText) . processWidget pid now timeParams paramsWithConstants (replacePlaceholdersSQL, replacePlaceholdersKQL) +mkWidgetProcessor pid dashId now timeParams paramsWithConstants = + fmap (#_dashboardId ?~ dashId.toText) . processWidget pid now timeParams paramsWithConstants -- | Handler for dashboard with tab in path: /p/{pid}/dashboards/{dash_id}/tab/{tab_slug} @@ -2046,6 +2186,7 @@ dashboardTabGetH pid dashId tabSlug fileM fromDStr toDStr sinceStr allParams = d , pageTitleSuffixModalId = Just "tabRenameModalId" -- Modal for renaming tab , config = appCtx.config , freeTierExceeded = freeTierExceeded + , headContent = Just dashboardHeadContent_ , pageActions = Just $ div_ [class_ "flex gap-3 items-center"] do TimePicker.timepicker_ Nothing currentRange Nothing TimePicker.refreshButton_ @@ -2109,6 +2250,19 @@ dashboardTabContentGetH pid dashId tabSlug fileM fromDStr toDStr sinceStr allPar "" +-- | Skeleton loader shown while GridStack initializes +dashboardSkeleton_ :: Html () +dashboardSkeleton_ = div_ [class_ "dashboard-skeleton absolute inset-0 z-10 bg-bgBase flex flex-col items-center justify-center"] do + span_ [class_ "loading loading-spinner loading-lg text-fillBrand-strong"] "" + p_ [class_ "text-sm text-textWeak mt-3"] "Loading dashboard..." + div_ [class_ "grid grid-cols-12 gap-4 mt-8 w-full max-w-4xl px-8"] do + div_ [class_ "col-span-8 h-32 rounded-lg skeleton-shimmer"] "" + div_ [class_ "col-span-4 h-32 rounded-lg skeleton-shimmer"] "" + div_ [class_ "col-span-4 h-24 rounded-lg skeleton-shimmer"] "" + div_ [class_ "col-span-4 h-24 rounded-lg skeleton-shimmer"] "" + div_ [class_ "col-span-4 h-24 rounded-lg skeleton-shimmer"] "" + + -- | Render a single tab content panel -- isPartial: True for HTMX partial loads (include OOB swap), False for full page loads tabContentPanel_ :: Projects.ProjectId -> Text -> Int -> Text -> [Widget.Widget] -> Bool -> Bool -> Html () diff --git a/src/Pkg/Components/Widget.hs b/src/Pkg/Components/Widget.hs index 22007f479..c987f492f 100644 --- a/src/Pkg/Components/Widget.hs +++ b/src/Pkg/Components/Widget.hs @@ -80,6 +80,8 @@ data WidgetType | WTTable | WTTraces | WTFlamegraph + | WTServiceMap -- Service dependency graph visualization + | WTHeatmap -- Latency distribution heatmap deriving stock (Enum, Eq, Generic, Show, THS.Lift) deriving anyclass (Default, NFData) deriving (AE.FromJSON, AE.ToJSON) via DAE.CustomJSON '[DAE.ConstructorTagModifier '[DAE.StripPrefix "WT", DAE.CamelToSnake]] WidgetType @@ -149,6 +151,7 @@ data Widget = Widget , warningThreshold :: Maybe Double , showThresholdLines :: Maybe Text -- 'always' | 'on_breach' | 'never' , alertStatus :: Maybe Text -- 'normal' | 'warning' | 'alerting' (runtime) + , description :: Maybe Text -- Help text shown in info icon tooltip } deriving stock (Generic, Show, THS.Lift) deriving anyclass (Default, FromForm, NFData) @@ -269,36 +272,52 @@ widget_ = widgetHelper_ widgetHelper_ :: Widget -> Html () widgetHelper_ w' = case w.wType of - WTAnomalies -> gridItem_ $ div_ [class_ $ "h-full " <> paddingBtm] $ div_ [class_ "gap-0.5 flex flex-col h-full"] do + WTAnomalies -> gridItem_ $ div_ [class_ $ "h-full group/wgt " <> paddingBtm] $ div_ [class_ "gap-0.5 flex flex-col h-full"] do unless (w.naked == Just True) $ renderWidgetHeader w (maybeToMonoid w.id) w.title Nothing Nothing Nothing (Just ("View all", "/p/" <> maybeToMonoid (w._projectId <&> (.toText)) <> "/anomalies")) (w.hideSubtitle == Just True) div_ [class_ "flex-1 flex min-h-0"] $ div_ [class_ $ "h-full w-full " <> if w.naked == Just True then "" else "surface-raised rounded-2xl", id_ $ maybeToMonoid w.id <> "_bordered"] $ div_ [class_ "h-full overflow-auto p-3"] $ whenJust w.html toHtmlRaw - WTGroup -> gridItem_ $ div_ [class_ $ "h-full " <> paddingBtm] $ div_ [class_ "h-full flex flex-col gap-4"] do - div_ [class_ $ "group/h gap-1 leading-none flex justify-between items-center " <> gridStackHandleClass] do - div_ [class_ "inline-flex gap-1 items-center"] do - span_ [class_ "hidden group-hover/h:inline-flex"] $ Utils.faSprite_ "grip-dots-vertical" "regular" "w-4 h-4" - whenJust w.icon \icon -> span_ [] $ Utils.faSprite_ icon "regular" "w-4 h-4" - span_ [class_ "text-sm"] $ toHtml $ maybeToMonoid w.title - div_ [class_ "grid-stack nested-grid h-full -mx-2"] $ forM_ (fromMaybe [] w.children) (\wChild -> widgetHelper_ (wChild{_isNested = Just True})) - WTTable -> gridItem_ $ div_ [class_ $ "h-full " <> paddingBtm] $ renderTable w + WTGroup -> gridItem_ $ div_ [class_ "h-full flex flex-col border border-strokeWeak rounded-lg surface-raised overflow-hidden group/wgt"] do + -- Header: auto height (no flex), group-header class for CSS targeting when collapsed + div_ [class_ $ "group-header py-2 px-4 flex items-center justify-between " <> gridStackHandleClass] do + div_ [class_ "inline-flex gap-2 items-center group/h"] do + span_ [class_ "hidden group-hover/h:inline-flex cursor-move"] $ Utils.faSprite_ "grip-dots-vertical" "regular" "w-4 h-4" + whenJust w.icon \icon -> span_ [] $ Utils.faSprite_ icon "regular" "w-5 h-5" + span_ ([class_ "text-lg font-medium"] <> foldMap (\t -> [data_ "var-template" t | "{{var-" `T.isInfixOf` t]) w.title) $ toHtml $ maybeToMonoid w.title + whenJust w.description \desc -> span_ [class_ "hidden group-hover/wgt:inline-flex items-center", data_ "tippy-content" desc] $ Utils.faSprite_ "circle-info" "regular" "w-4 h-4" + -- Collapse chevron: only for full-width groups + when isFullWidth $ button_ [class_ "collapse-toggle p-2 rounded hover:bg-fillWeak transition-colors cursor-pointer", [__|on click toggle .hidden on .nested-grid in closest .grid-stack-item then toggle .collapsed on closest .grid-stack-item|]] $ Utils.faSprite_ "chevron-up" "regular" "w-5 h-5 transition-transform" + -- Nested grid: flex-1 fills remaining space + div_ [class_ "grid-stack nested-grid flex-1"] $ forM_ (fromMaybe [] w.children) (\wChild -> widgetHelper_ (wChild{_isNested = Just True})) + WTTable -> gridItem_ $ div_ [class_ $ "h-full group/wgt " <> paddingBtm] $ renderTable w WTLogs -> gridItem_ $ div_ [class_ $ "h-full " <> paddingBtm] $ div_ [class_ "p-3"] "Logs widget coming soon" - WTTraces -> gridItem_ $ div_ [class_ $ "h-full " <> paddingBtm] $ renderTraceTable w + WTTraces -> gridItem_ $ div_ [class_ $ "h-full group/wgt " <> paddingBtm] $ renderTraceTable w WTFlamegraph -> gridItem_ $ div_ [class_ $ "h-full " <> paddingBtm] $ div_ [class_ "p-3"] "Flamegraph widget coming soon" _ -> gridItem_ $ div_ [class_ $ " w-full h-full group/wgt " <> paddingBtm] $ renderChart w where w = w' & #id %~ maybe (slugify <$> w'.title) Just gridStackHandleClass = if w._isNested == Just True then "nested-grid-stack-handle" else "grid-stack-handle" - layoutFields = [("x", (.x)), ("y", (.y)), ("w", (.w)), ("h", (.h))] - attrs = concat [maybe [] (\v -> [term ("gs-" <> name) (show v)]) (w.layout >>= layoutField) | (name, layoutField) <- layoutFields] + isFullWidth = (== Just 12) $ w.layout >>= (.w) + groupRequiredHeight = case w.wType of + WTGroup -> + let childWidgets = fromMaybe [] w.children + maxRow = foldl' (\acc c -> max acc $ fromMaybe 0 (c.layout >>= (.y)) + fromMaybe 1 (c.layout >>= (.h))) 1 childWidgets + in Just (1 + maxRow) + _ -> Nothing + -- For groups: full-width uses requiredHeight, partial-width uses max(yamlH, requiredHeight) + effectiveHeight = case groupRequiredHeight of + Just reqH -> Just $ if isFullWidth then reqH else maybe reqH (max reqH) (w.layout >>= (.h)) + Nothing -> w.layout >>= (.h) + layoutFields = [("x", (.x)), ("y", (.y)), ("w", (.w))] :: [(Text, Layout -> Maybe Int)] + attrs = + foldMap (\(name, field) -> foldMap (\v -> [term ("gs-" <> name) (show v)]) (w.layout >>= field)) layoutFields + <> foldMap (\h -> [term "gs-h" (show h)]) effectiveHeight paddingBtm | w.standalone == Just True = "" - | w._isNested == Just True && w.wType `elem` [WTTimeseriesStat, WTStat] = "" - | otherwise = bool " pb-8 " " standalone pb-4 " (w._isNested == Just True) - -- Serialize the widget to JSON for easy copying + | otherwise = "" widgetJson = decodeUtf8 $ fromLazy $ AE.encode w gridItem_ = if w.naked == Just True then Relude.id - else div_ ([class_ "grid-stack-item h-full flex-1 [.nested-grid_&]:overflow-hidden ", id_ $ maybeToMonoid w.id <> "_widgetEl", data_ "widget" widgetJson] <> attrs) . div_ [class_ "grid-stack-item-content h-full"] + else div_ ([class_ "grid-stack-item h-full flex-1 [.nested-grid_&]:overflow-hidden ", id_ $ maybeToMonoid w.id <> "_widgetEl", data_ "widget" widgetJson] <> attrs) . div_ [class_ "grid-stack-item-content h-full [.grid-stack_&]:h-auto"] renderWidgetHeader :: Widget -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe (Text, Text) -> Bool -> Html () @@ -308,10 +327,11 @@ renderWidgetHeader widget wId title valueM subValueM expandBtnFn ctaM hideSub = span_ [class_ "text-sm text-textWeak flex items-center gap-1"] do unless (widget.standalone == Just True) $ span_ [class_ "hidden group-hover/h:inline-flex"] $ Utils.faSprite_ "grip-dots-vertical" "regular" "w-4 h-4" whenJust widget.icon \icon -> span_ [] $ Utils.faSprite_ icon "regular" "w-4 h-4" - toHtml $ maybeToMonoid title + span_ (foldMap (\t -> [data_ "var-template" t | "{{var-" `T.isInfixOf` t]) title) $ toHtml $ maybeToMonoid title + whenJust widget.description \desc -> span_ [class_ "hidden group-hover/wgt:inline-flex items-center", data_ "tippy-content" desc] $ Utils.faSprite_ "circle-info" "regular" "w-4 h-4" span_ [class_ $ "bg-fillWeak border border-strokeWeak text-sm font-semibold px-2 py-1 rounded-3xl leading-none text-textWeak " <> if isJust valueM then "" else "hidden", id_ $ wId <> "Value"] $ whenJust valueM toHtml - span_ [class_ $ "text-textDisabled widget-subtitle text-sm " <> bool "" "hidden" hideSub, id_ $ wId <> "Subtitle"] $ toHtml $ maybeToMonoid subValueM + span_ ([class_ $ "text-textDisabled widget-subtitle text-sm " <> bool "" "hidden" hideSub, id_ $ wId <> "Subtitle"] <> foldMap (\t -> [data_ "var-template" t | "{{var-" `T.isInfixOf` t]) subValueM) $ toHtml $ maybeToMonoid subValueM -- Add hidden loader with specific ID that can be toggled from JS span_ [class_ "hidden", id_ $ wId <> "_loader"] $ Utils.faSprite_ "spinner" "regular" "w-4 h-4 animate-spin" div_ [class_ "text-iconNeutral flex items-center gap-0.5"] do @@ -488,7 +508,7 @@ renderTraceTable widget = do div_ [ class_ "h-full overflow-auto p-3" , hxGet_ $ "/p/" <> maybe "" (.toText) widget._projectId <> "/widget?widgetJSON=" <> decodeUtf8 (urlEncode True widgetJson) - , hxTrigger_ "load" + , hxTrigger_ "load, update-query from:window" , hxTarget_ $ "#" <> tableId , hxSelect_ $ "#" <> tableId , hxSwap_ "outerHTML" @@ -588,6 +608,54 @@ renderTable widget = do whenJust widget.onRowClick \action -> renderRowClickScript tableId action widget.columns +-- | Render stat widget content with HTMX lazy loading support +-- Always includes HTMX attributes so widget can refresh on update-query events +renderStatContent :: Widget -> Text -> Maybe Text -> Html () +renderStatContent widget chartId valueM = do + let statContentId = chartId <> "_stat" + hasData = widget.eager == Just True || isJust (widget.dataset >>= (.value)) + paddingClass = "px-3 flex flex-col " <> bool "py-3 " "py-2 " (widget._isNested == Just True) + -- Always use eager widget JSON for HTMX requests + eagerWidget = widget & #eager ?~ True + widgetJson = fromLazy $ AE.encode eagerWidget + -- Always include HTMX attributes for refresh capability + div_ + [ id_ statContentId + , class_ paddingClass + , hxGet_ $ "/p/" <> maybe "" (.toText) widget._projectId <> "/widget?widgetJSON=" <> decodeUtf8 (urlEncode True widgetJson) + , hxTrigger_ $ if hasData then "update-query from:window" else "load, update-query from:window" + , hxTarget_ $ "#" <> statContentId + , hxSelect_ $ "#" <> statContentId + , hxSwap_ "outerHTML" + , hxExt_ "forward-page-params" + ] + $ if hasData + then renderStatValue widget chartId valueM + else renderStatPlaceholder widget chartId + + +-- | Render placeholder with loading spinner for lazy-loaded stats +renderStatPlaceholder :: Widget -> Text -> Html () +renderStatPlaceholder widget chartId = div_ [class_ "flex flex-col gap-1"] do + strong_ [class_ "text-textSuccess-strong text-4xl font-normal", id_ $ chartId <> "Value"] + $ span_ [class_ "loading loading-spinner loading-sm"] "" + div_ [class_ "inline-flex gap-1 items-center text-sm"] do + whenJust widget.icon \icon -> Utils.faSprite_ icon "regular" "w-4 h-4 text-iconBrand" + toHtml $ maybeToMonoid widget.title + whenJust widget.description \desc -> span_ [class_ "hidden group-hover/wgt:inline-flex items-center", data_ "tippy-content" desc] $ Utils.faSprite_ "circle-info" "regular" "w-4 h-4 text-iconNeutral" + + +-- | Render actual stat value content +renderStatValue :: Widget -> Text -> Maybe Text -> Html () +renderStatValue widget chartId valueM = div_ [class_ "flex flex-col gap-1"] do + strong_ [class_ "text-textSuccess-strong text-4xl font-normal", id_ $ chartId <> "Value"] + $ whenJust valueM toHtml + div_ [class_ "inline-flex gap-1 items-center text-sm"] do + whenJust widget.icon \icon -> Utils.faSprite_ icon "regular" "w-4 h-4 text-iconBrand" + toHtml $ maybeToMonoid widget.title + whenJust widget.description \desc -> span_ [class_ "hidden group-hover/wgt:inline-flex items-center", data_ "tippy-content" desc] $ Utils.faSprite_ "circle-info" "regular" "w-4 h-4 text-iconNeutral" + + renderChart :: Widget -> Html () renderChart widget = do let rateM = widget.dataset >>= (.rowsPerMin) >>= \r -> Just $ Utils.prettyPrintCount (round r) <> " rows/min" @@ -607,14 +675,7 @@ renderChart widget = do , id_ $ chartId <> "_bordered" ] do - when isStat $ div_ [class_ $ "px-3 flex flex-col " <> bool "py-3 " "py-2 " (widget._isNested == Just True)] do - div_ [class_ "flex flex-col gap-1"] do - strong_ [class_ "text-textSuccess-strong text-4xl font-normal", id_ $ chartId <> "Value"] - $ whenJust valueM toHtml - div_ [class_ "inline-flex gap-1 items-center text-sm"] do - whenJust widget.icon \icon -> Utils.faSprite_ icon "regular" "w-4 h-4 text-iconBrand" - toHtml $ maybeToMonoid widget.title - Utils.faSprite_ "circle-info" "regular" "w-4 h-4 text-iconNeutral" + when isStat $ renderStatContent widget chartId valueM unless (widget.wType == WTStat) $ div_ [class_ $ "h-0 max-h-full overflow-hidden w-full flex-1 min-h-0" <> bool " p-2" "" isStat] do div_ [class_ "h-full w-full", id_ $ maybeToMonoid widget.id] "" let theme = fromMaybe "default" widget.theme @@ -943,6 +1004,8 @@ mapWidgetTypeToChartType WTTimeseries = "bar" mapWidgetTypeToChartType WTTimeseriesLine = "line" mapWidgetTypeToChartType WTTimeseriesStat = "line" mapWidgetTypeToChartType WTDistribution = "bar" +mapWidgetTypeToChartType WTServiceMap = "graph" -- ECharts force-directed graph +mapWidgetTypeToChartType WTHeatmap = "heatmap" -- ECharts heatmap mapWidgetTypeToChartType _ = "bar" diff --git a/src/Pkg/Parser/Expr.hs b/src/Pkg/Parser/Expr.hs index 52b9dab0f..48eacc3b5 100644 --- a/src/Pkg/Parser/Expr.hs +++ b/src/Pkg/Parser/Expr.hs @@ -769,8 +769,8 @@ typeCastMap = M.fromList [("toint", "integer"), ("tolong", "bigint"), ("tostring unaryFuncSQL :: Map Text (Text -> Text) unaryFuncSQL = M.fromList - [ ("isnull", \v -> v <> " IS NULL") - , ("isnotnull", \v -> v <> " IS NOT NULL") + [ ("isnull", (<> " IS NULL")) + , ("isnotnull", (<> " IS NOT NULL")) , ("isempty", \v -> "(" <> v <> " IS NULL OR " <> v <> " = '')") , ("isnotempty", \v -> "(" <> v <> " IS NOT NULL AND " <> v <> " != '')") ] diff --git a/static/public/assets/css/tailwind.css b/static/public/assets/css/tailwind.css index 5ab0a69ea..a783cc6a7 100644 --- a/static/public/assets/css/tailwind.css +++ b/static/public/assets/css/tailwind.css @@ -345,6 +345,11 @@ body, :host { .skeleton-glow { box-shadow: 0 0 10px rgba(26, 116, 168, 0.1); } + + /* Dashboard grid loading states */ + .grid-stack:not(.grid-stack-initialized) { visibility: hidden; } + .dashboard-skeleton { transition: opacity 0.2s; } + .dashboard-loaded .dashboard-skeleton { opacity: 0; pointer-events: none; } } /* @@ -1198,11 +1203,15 @@ code, pre, .code, .monospace { /* Dashboard variable selectors */ .dash-variable .tagify { - margin-top: -0.5rem; --tags-border-color: transparent; --tag-text-color: var(--color-textStrong); } +.dash-variable .tagify--select { + cursor: pointer; +} + + .dash-variable .tagify__tag { min-width: 100%; margin-inline-end: 0.5em; @@ -1217,10 +1226,37 @@ code, pre, .code, .monospace { text-wrap-mode: nowrap !important; } +/* Group widget collapsed state - header fills space and centers content */ +.grid-stack-item.collapsed .group-header { + flex: 1; +} + +/* Group widget collapsed state - rotate chevron */ +.grid-stack-item.collapsed .collapse-toggle svg { + transform: rotate(180deg); +} + .grid-stack-handle, .nested-grid-stack-handle { @apply cursor-move; } +/* Nested grid collapse/expand transitions */ +.nested-grid { + transition: opacity 0.2s ease-out, max-height 0.3s ease-out; + overflow: hidden; +} + +.collapsed .nested-grid { + opacity: 0; + max-height: 0 !important; + padding: 0 !important; + margin: 0 !important; +} + +.collapse-toggle svg { + transition: transform 0.2s ease-out; +} + /* Gridstack custom resize icon - restore APItoolkit custom styling (overrides the default gridstack arrow icon with a dotted grid pattern) */ .grid-stack-placeholder > .placeholder-content { diff --git a/static/public/assets/deps/highlightjs/sql-formatter.min.js b/static/public/assets/deps/highlightjs/sql-formatter.min.js new file mode 100644 index 000000000..8375b4267 --- /dev/null +++ b/static/public/assets/deps/highlightjs/sql-formatter.min.js @@ -0,0 +1,2 @@ +!function(E,T){"object"==typeof exports&&"object"==typeof module?module.exports=T():"function"==typeof define&&define.amd?define([],T):"object"==typeof exports?exports.sqlFormatter=T():E.sqlFormatter=T()}(this,(()=>(()=>{var E={833:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.snowflake=T.singlestoredb=T.transactsql=T.trino=T.sql=T.sqlite=T.spark=T.redshift=T.postgresql=T.plsql=T.n1ql=T.tidb=T.mysql=T.mariadb=T.hive=T.duckdb=T.db2i=T.db2=T.clickhouse=T.bigquery=void 0;var A=R(5028);Object.defineProperty(T,"bigquery",{enumerable:!0,get:function(){return A.bigquery}});var e=R(5084);Object.defineProperty(T,"clickhouse",{enumerable:!0,get:function(){return e.clickhouse}});var S=R(1718);Object.defineProperty(T,"db2",{enumerable:!0,get:function(){return S.db2}});var I=R(9274);Object.defineProperty(T,"db2i",{enumerable:!0,get:function(){return I.db2i}});var O=R(3018);Object.defineProperty(T,"duckdb",{enumerable:!0,get:function(){return O.duckdb}});var N=R(7340);Object.defineProperty(T,"hive",{enumerable:!0,get:function(){return N.hive}});var t=R(1378);Object.defineProperty(T,"mariadb",{enumerable:!0,get:function(){return t.mariadb}});var L=R(3358);Object.defineProperty(T,"mysql",{enumerable:!0,get:function(){return L.mysql}});var C=R(2066);Object.defineProperty(T,"tidb",{enumerable:!0,get:function(){return C.tidb}});var _=R(7328);Object.defineProperty(T,"n1ql",{enumerable:!0,get:function(){return _.n1ql}});var s=R(6910);Object.defineProperty(T,"plsql",{enumerable:!0,get:function(){return s.plsql}});var r=R(2912);Object.defineProperty(T,"postgresql",{enumerable:!0,get:function(){return r.postgresql}});var a=R(1642);Object.defineProperty(T,"redshift",{enumerable:!0,get:function(){return a.redshift}});var D=R(9774);Object.defineProperty(T,"spark",{enumerable:!0,get:function(){return D.spark}});var P=R(5784);Object.defineProperty(T,"sqlite",{enumerable:!0,get:function(){return P.sqlite}});var n=R(3446);Object.defineProperty(T,"sql",{enumerable:!0,get:function(){return n.sql}});var i=R(198);Object.defineProperty(T,"trino",{enumerable:!0,get:function(){return i.trino}});var o=R(918);Object.defineProperty(T,"transactsql",{enumerable:!0,get:function(){return o.transactsql}});var M=R(7146);Object.defineProperty(T,"singlestoredb",{enumerable:!0,get:function(){return M.singlestoredb}});var U=R(3686);Object.defineProperty(T,"snowflake",{enumerable:!0,get:function(){return U.snowflake}})},8235:function(E,T,R){"use strict";var A=this&&this.__importDefault||function(E){return E&&E.__esModule?E:{default:E}};Object.defineProperty(T,"__esModule",{value:!0}),T.createDialect=void 0;const e=A(R(7165)),S=new Map;T.createDialect=E=>{let T=S.get(E);return T||(T=I(E),S.set(E,T)),T};const I=E=>({tokenizer:new e.default(E.tokenizerOptions,E.name),formatOptions:O(E.formatOptions)}),O=E=>{var T;return{alwaysDenseOperators:E.alwaysDenseOperators||[],onelineClauses:Object.fromEntries(E.onelineClauses.map((E=>[E,!0]))),tabularOnelineClauses:Object.fromEntries((null!==(T=E.tabularOnelineClauses)&&void 0!==T?T:E.onelineClauses).map((E=>[E,!0])))}}},7163:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.expandSinglePhrase=T.expandPhrases=void 0,T.expandPhrases=E=>E.flatMap(T.expandSinglePhrase),T.expandSinglePhrase=E=>t(A(E)).map(R);const R=E=>E.replace(/ +/g," ").trim(),A=E=>({type:"mandatory_block",items:e(E,0)[0]}),e=(E,T,R)=>{const A=[];for(;E[T];){const[e,I]=S(E,T);if(A.push(e),"|"!==E[T=I]){if("}"===E[T]||"]"===E[T]){if(R!==E[T])throw new Error(`Unbalanced parenthesis in: ${E}`);return[A,++T]}if(T===E.length){if(R)throw new Error(`Unbalanced parenthesis in: ${E}`);return[A,T]}throw new Error(`Unexpected "${E[T]}"`)}T++}return[A,T]},S=(E,T)=>{const R=[];for(;;){const[A,e]=I(E,T);if(!A)break;R.push(A),T=e}return 1===R.length?[R[0],T]:[{type:"concatenation",items:R},T]},I=(E,T)=>{if("{"===E[T])return O(E,T+1);if("["===E[T])return N(E,T+1);{let R="";for(;E[T]&&/[A-Za-z0-9_ ]/.test(E[T]);)R+=E[T],T++;return[R,T]}},O=(E,T)=>{const[R,A]=e(E,T,"}");return[{type:"mandatory_block",items:R},A]},N=(E,T)=>{const[R,A]=e(E,T,"]");return[{type:"optional_block",items:R},A]},t=E=>{if("string"==typeof E)return[E];if("concatenation"===E.type)return E.items.map(t).reduce(L,[""]);if("mandatory_block"===E.type)return E.items.flatMap(t);if("optional_block"===E.type)return["",...E.items.flatMap(t)];throw new Error(`Unknown node type: ${E}`)},L=(E,T)=>{const R=[];for(const A of E)for(const E of T)R.push(A+E);return R}},260:function(E,T,R){"use strict";var A=this&&this.__createBinding||(Object.create?function(E,T,R,A){void 0===A&&(A=R);var e=Object.getOwnPropertyDescriptor(T,R);e&&!("get"in e?!T.__esModule:e.writable||e.configurable)||(e={enumerable:!0,get:function(){return T[R]}}),Object.defineProperty(E,A,e)}:function(E,T,R,A){void 0===A&&(A=R),E[A]=T[R]}),e=this&&this.__setModuleDefault||(Object.create?function(E,T){Object.defineProperty(E,"default",{enumerable:!0,value:T})}:function(E,T){E.default=T}),S=this&&this.__importStar||function(E){if(E&&E.__esModule)return E;var T={};if(null!=E)for(var R in E)"default"!==R&&Object.prototype.hasOwnProperty.call(E,R)&&A(T,E,R);return e(T,E),T};Object.defineProperty(T,"__esModule",{value:!0});const I=R(1044),O=R(3216),N=R(2437),t=R(1301),L=R(5112),C=S(R(240)),_=S(R(989));class s{constructor({cfg:E,dialectCfg:T,params:R,layout:A,inline:e=!1}){this.inline=!1,this.nodes=[],this.index=-1,this.cfg=E,this.dialectCfg=T,this.inline=e,this.params=R,this.layout=A}format(E){for(this.nodes=E,this.index=0;this.index{this.layout.add(this.showFunctionKw(E.nameKw))})),this.formatNode(E.parenthesis)}formatParameterizedDataType(E){this.withComments(E.dataType,(()=>{this.layout.add(this.showDataType(E.dataType))})),this.formatNode(E.parenthesis)}formatArraySubscript(E){let T;switch(E.array.type){case t.NodeType.data_type:T=this.showDataType(E.array);break;case t.NodeType.keyword:T=this.showKw(E.array);break;default:T=this.showIdentifier(E.array)}this.withComments(E.array,(()=>{this.layout.add(T)})),this.formatNode(E.parenthesis)}formatPropertyAccess(E){this.formatNode(E.object),this.layout.add(L.WS.NO_SPACE,E.operator),this.formatNode(E.property)}formatParenthesis(E){const T=this.formatInlineExpression(E.children);T?(this.layout.add(E.openParen),this.layout.add(...T.getLayoutItems()),this.layout.add(L.WS.NO_SPACE,E.closeParen,L.WS.SPACE)):(this.layout.add(E.openParen,L.WS.NEWLINE),(0,O.isTabularStyle)(this.cfg)?(this.layout.add(L.WS.INDENT),this.layout=this.formatSubExpression(E.children)):(this.layout.indentation.increaseBlockLevel(),this.layout.add(L.WS.INDENT),this.layout=this.formatSubExpression(E.children),this.layout.indentation.decreaseBlockLevel()),this.layout.add(L.WS.NEWLINE,L.WS.INDENT,E.closeParen,L.WS.SPACE))}formatBetweenPredicate(E){this.layout.add(this.showKw(E.betweenKw),L.WS.SPACE),this.layout=this.formatSubExpression(E.expr1),this.layout.add(L.WS.NO_SPACE,L.WS.SPACE,this.showNonTabularKw(E.andKw),L.WS.SPACE),this.layout=this.formatSubExpression(E.expr2),this.layout.add(L.WS.SPACE)}formatCaseExpression(E){this.formatNode(E.caseKw),this.layout.indentation.increaseBlockLevel(),this.layout=this.formatSubExpression(E.expr),this.layout=this.formatSubExpression(E.clauses),this.layout.indentation.decreaseBlockLevel(),this.layout.add(L.WS.NEWLINE,L.WS.INDENT),this.formatNode(E.endKw)}formatCaseWhen(E){this.layout.add(L.WS.NEWLINE,L.WS.INDENT),this.formatNode(E.whenKw),this.layout=this.formatSubExpression(E.condition),this.formatNode(E.thenKw),this.layout=this.formatSubExpression(E.result)}formatCaseElse(E){this.layout.add(L.WS.NEWLINE,L.WS.INDENT),this.formatNode(E.elseKw),this.layout=this.formatSubExpression(E.result)}formatClause(E){this.isOnelineClause(E)?this.formatClauseInOnelineStyle(E):(0,O.isTabularStyle)(this.cfg)?this.formatClauseInTabularStyle(E):this.formatClauseInIndentedStyle(E)}isOnelineClause(E){return(0,O.isTabularStyle)(this.cfg)?this.dialectCfg.tabularOnelineClauses[E.nameKw.text]:this.dialectCfg.onelineClauses[E.nameKw.text]}formatClauseInIndentedStyle(E){this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E.nameKw),L.WS.NEWLINE),this.layout.indentation.increaseTopLevel(),this.layout.add(L.WS.INDENT),this.layout=this.formatSubExpression(E.children),this.layout.indentation.decreaseTopLevel()}formatClauseInOnelineStyle(E){this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E.nameKw),L.WS.SPACE),this.layout=this.formatSubExpression(E.children)}formatClauseInTabularStyle(E){this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E.nameKw),L.WS.SPACE),this.layout.indentation.increaseTopLevel(),this.layout=this.formatSubExpression(E.children),this.layout.indentation.decreaseTopLevel()}formatSetOperation(E){this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E.nameKw),L.WS.NEWLINE),this.layout.add(L.WS.INDENT),this.layout=this.formatSubExpression(E.children)}formatLimitClause(E){this.withComments(E.limitKw,(()=>{this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E.limitKw))})),this.layout.indentation.increaseTopLevel(),(0,O.isTabularStyle)(this.cfg)?this.layout.add(L.WS.SPACE):this.layout.add(L.WS.NEWLINE,L.WS.INDENT),E.offset?(this.layout=this.formatSubExpression(E.offset),this.layout.add(L.WS.NO_SPACE,",",L.WS.SPACE),this.layout=this.formatSubExpression(E.count)):this.layout=this.formatSubExpression(E.count),this.layout.indentation.decreaseTopLevel()}formatAllColumnsAsterisk(E){this.layout.add("*",L.WS.SPACE)}formatLiteral(E){this.layout.add(E.text,L.WS.SPACE)}formatIdentifier(E){this.layout.add(this.showIdentifier(E),L.WS.SPACE)}formatParameter(E){this.layout.add(this.params.get(E),L.WS.SPACE)}formatOperator({text:E}){this.cfg.denseOperators||this.dialectCfg.alwaysDenseOperators.includes(E)?this.layout.add(L.WS.NO_SPACE,E):":"===E?this.layout.add(L.WS.NO_SPACE,E,L.WS.SPACE):this.layout.add(E,L.WS.SPACE)}formatComma(E){this.inline?this.layout.add(L.WS.NO_SPACE,",",L.WS.SPACE):this.layout.add(L.WS.NO_SPACE,",",L.WS.NEWLINE,L.WS.INDENT)}withComments(E,T){this.formatComments(E.leadingComments),T(),this.formatComments(E.trailingComments)}formatComments(E){E&&E.forEach((E=>{E.type===t.NodeType.line_comment?this.formatLineComment(E):this.formatBlockComment(E)}))}formatLineComment(E){(0,I.isMultiline)(E.precedingWhitespace||"")?this.layout.add(L.WS.NEWLINE,L.WS.INDENT,E.text,L.WS.MANDATORY_NEWLINE,L.WS.INDENT):this.layout.getLayoutItems().length>0?this.layout.add(L.WS.NO_NEWLINE,L.WS.SPACE,E.text,L.WS.MANDATORY_NEWLINE,L.WS.INDENT):this.layout.add(E.text,L.WS.MANDATORY_NEWLINE,L.WS.INDENT)}formatBlockComment(E){E.type===t.NodeType.block_comment&&this.isMultilineBlockComment(E)?(this.splitBlockComment(E.text).forEach((E=>{this.layout.add(L.WS.NEWLINE,L.WS.INDENT,E)})),this.layout.add(L.WS.NEWLINE,L.WS.INDENT)):this.layout.add(E.text,L.WS.SPACE)}isMultilineBlockComment(E){return(0,I.isMultiline)(E.text)||(0,I.isMultiline)(E.precedingWhitespace||"")}isDocComment(E){const T=E.split(/\n/);return/^\/\*\*?$/.test(T[0])&&T.slice(1,T.length-1).every((E=>/^\s*\*/.test(E)))&&/^\s*\*\/$/.test((0,I.last)(T))}splitBlockComment(E){return this.isDocComment(E)?E.split(/\n/).map((E=>/^\s*\*/.test(E)?" "+E.replace(/^\s*/,""):E)):E.split(/\n/).map((E=>E.replace(/^\s*/,"")))}formatSubExpression(E){return new s({cfg:this.cfg,dialectCfg:this.dialectCfg,params:this.params,layout:this.layout,inline:this.inline}).format(E)}formatInlineExpression(E){const T=this.params.getPositionalParameterIndex();try{return new s({cfg:this.cfg,dialectCfg:this.dialectCfg,params:this.params,layout:new _.default(this.cfg.expressionWidth),inline:!0}).format(E)}catch(E){if(E instanceof _.InlineLayoutError)return void this.params.setPositionalParameterIndex(T);throw E}}formatKeywordNode(E){switch(E.tokenType){case N.TokenType.RESERVED_JOIN:return this.formatJoin(E);case N.TokenType.AND:case N.TokenType.OR:case N.TokenType.XOR:return this.formatLogicalOperator(E);default:return this.formatKeyword(E)}}formatJoin(E){(0,O.isTabularStyle)(this.cfg)?(this.layout.indentation.decreaseTopLevel(),this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E),L.WS.SPACE),this.layout.indentation.increaseTopLevel()):this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E),L.WS.SPACE)}formatKeyword(E){this.layout.add(this.showKw(E),L.WS.SPACE)}formatLogicalOperator(E){"before"===this.cfg.logicalOperatorNewline?(0,O.isTabularStyle)(this.cfg)?(this.layout.indentation.decreaseTopLevel(),this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E),L.WS.SPACE),this.layout.indentation.increaseTopLevel()):this.layout.add(L.WS.NEWLINE,L.WS.INDENT,this.showKw(E),L.WS.SPACE):this.layout.add(this.showKw(E),L.WS.NEWLINE,L.WS.INDENT)}formatDataType(E){this.layout.add(this.showDataType(E),L.WS.SPACE)}showKw(E){return(0,C.isTabularToken)(E.tokenType)?(0,C.default)(this.showNonTabularKw(E),this.cfg.indentStyle):this.showNonTabularKw(E)}showNonTabularKw(E){switch(this.cfg.keywordCase){case"preserve":return(0,I.equalizeWhitespace)(E.raw);case"upper":return E.text;case"lower":return E.text.toLowerCase()}}showFunctionKw(E){return(0,C.isTabularToken)(E.tokenType)?(0,C.default)(this.showNonTabularFunctionKw(E),this.cfg.indentStyle):this.showNonTabularFunctionKw(E)}showNonTabularFunctionKw(E){switch(this.cfg.functionCase){case"preserve":return(0,I.equalizeWhitespace)(E.raw);case"upper":return E.text;case"lower":return E.text.toLowerCase()}}showIdentifier(E){if(E.quoted)return E.text;switch(this.cfg.identifierCase){case"preserve":return E.text;case"upper":return E.text.toUpperCase();case"lower":return E.text.toLowerCase()}}showDataType(E){switch(this.cfg.dataTypeCase){case"preserve":return(0,I.equalizeWhitespace)(E.raw);case"upper":return E.text;case"lower":return E.text.toLowerCase()}}}T.default=s,E.exports=T.default},5758:function(E,T,R){"use strict";var A=this&&this.__createBinding||(Object.create?function(E,T,R,A){void 0===A&&(A=R);var e=Object.getOwnPropertyDescriptor(T,R);e&&!("get"in e?!T.__esModule:e.writable||e.configurable)||(e={enumerable:!0,get:function(){return T[R]}}),Object.defineProperty(E,A,e)}:function(E,T,R,A){void 0===A&&(A=R),E[A]=T[R]}),e=this&&this.__setModuleDefault||(Object.create?function(E,T){Object.defineProperty(E,"default",{enumerable:!0,value:T})}:function(E,T){E.default=T}),S=this&&this.__importStar||function(E){if(E&&E.__esModule)return E;var T={};if(null!=E)for(var R in E)"default"!==R&&Object.prototype.hasOwnProperty.call(E,R)&&A(T,E,R);return e(T,E),T},I=this&&this.__importDefault||function(E){return E&&E.__esModule?E:{default:E}};Object.defineProperty(T,"__esModule",{value:!0});const O=R(3216),N=I(R(8264)),t=R(1858),L=I(R(260)),C=S(R(5112)),_=I(R(7963));T.default=class{constructor(E,T){this.dialect=E,this.cfg=T,this.params=new N.default(this.cfg.params)}format(E){const T=this.parse(E);return this.formatAst(T).trimEnd()}parse(E){return(0,t.createParser)(this.dialect.tokenizer).parse(E,this.cfg.paramTypes||{})}formatAst(E){return E.map((E=>this.formatStatement(E))).join("\n".repeat(this.cfg.linesBetweenQueries+1))}formatStatement(E){const T=new L.default({cfg:this.cfg,dialectCfg:this.dialect.formatOptions,params:this.params,layout:new C.default(new _.default((0,O.indentString)(this.cfg)))}).format(E.children);return E.hasSemicolon&&(this.cfg.newlineBeforeSemicolon?T.add(C.WS.NEWLINE,";"):T.add(C.WS.NO_NEWLINE,";")),T.toString()}},E.exports=T.default},7963:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0});const A=R(1044),e="top-level";T.default=class{constructor(E){this.indent=E,this.indentTypes=[]}getSingleIndent(){return this.indent}getLevel(){return this.indentTypes.length}increaseTopLevel(){this.indentTypes.push(e)}increaseBlockLevel(){this.indentTypes.push("block-level")}decreaseTopLevel(){this.indentTypes.length>0&&(0,A.last)(this.indentTypes)===e&&this.indentTypes.pop()}decreaseBlockLevel(){for(;this.indentTypes.length>0&&this.indentTypes.pop()===e;);}},E.exports=T.default},989:function(E,T,R){"use strict";var A=this&&this.__createBinding||(Object.create?function(E,T,R,A){void 0===A&&(A=R);var e=Object.getOwnPropertyDescriptor(T,R);e&&!("get"in e?!T.__esModule:e.writable||e.configurable)||(e={enumerable:!0,get:function(){return T[R]}}),Object.defineProperty(E,A,e)}:function(E,T,R,A){void 0===A&&(A=R),E[A]=T[R]}),e=this&&this.__setModuleDefault||(Object.create?function(E,T){Object.defineProperty(E,"default",{enumerable:!0,value:T})}:function(E,T){E.default=T}),S=this&&this.__importStar||function(E){if(E&&E.__esModule)return E;var T={};if(null!=E)for(var R in E)"default"!==R&&Object.prototype.hasOwnProperty.call(E,R)&&A(T,E,R);return e(T,E),T},I=this&&this.__importDefault||function(E){return E&&E.__esModule?E:{default:E}};Object.defineProperty(T,"__esModule",{value:!0}),T.InlineLayoutError=void 0;const O=I(R(7963)),N=S(R(5112));class t extends N.default{constructor(E){super(new O.default("")),this.expressionWidth=E,this.length=0,this.trailingSpace=!1}add(...E){if(E.forEach((E=>this.addToLength(E))),this.length>this.expressionWidth)throw new L;super.add(...E)}addToLength(E){if("string"==typeof E)this.length+=E.length,this.trailingSpace=!1;else{if(E===N.WS.MANDATORY_NEWLINE||E===N.WS.NEWLINE)throw new L;E===N.WS.INDENT||E===N.WS.SINGLE_INDENT||E===N.WS.SPACE?this.trailingSpace||(this.length++,this.trailingSpace=!0):E!==N.WS.NO_NEWLINE&&E!==N.WS.NO_SPACE||this.trailingSpace&&(this.trailingSpace=!1,this.length--)}}}T.default=t;class L extends Error{}T.InlineLayoutError=L},5112:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.WS=void 0;const A=R(1044);var e;!function(E){E[E.SPACE=0]="SPACE",E[E.NO_SPACE=1]="NO_SPACE",E[E.NO_NEWLINE=2]="NO_NEWLINE",E[E.NEWLINE=3]="NEWLINE",E[E.MANDATORY_NEWLINE=4]="MANDATORY_NEWLINE",E[E.INDENT=5]="INDENT",E[E.SINGLE_INDENT=6]="SINGLE_INDENT"}(e=T.WS||(T.WS={})),T.default=class{constructor(E){this.indentation=E,this.items=[]}add(...E){for(const T of E)switch(T){case e.SPACE:this.items.push(e.SPACE);break;case e.NO_SPACE:this.trimHorizontalWhitespace();break;case e.NO_NEWLINE:this.trimWhitespace();break;case e.NEWLINE:this.trimHorizontalWhitespace(),this.addNewline(e.NEWLINE);break;case e.MANDATORY_NEWLINE:this.trimHorizontalWhitespace(),this.addNewline(e.MANDATORY_NEWLINE);break;case e.INDENT:this.addIndentation();break;case e.SINGLE_INDENT:this.items.push(e.SINGLE_INDENT);break;default:this.items.push(T)}}trimHorizontalWhitespace(){for(;S((0,A.last)(this.items));)this.items.pop()}trimWhitespace(){for(;I((0,A.last)(this.items));)this.items.pop()}addNewline(E){if(this.items.length>0)switch((0,A.last)(this.items)){case e.NEWLINE:this.items.pop(),this.items.push(E);break;case e.MANDATORY_NEWLINE:break;default:this.items.push(E)}}addIndentation(){for(let E=0;Ethis.itemToString(E))).join("")}getLayoutItems(){return this.items}itemToString(E){switch(E){case e.SPACE:return" ";case e.NEWLINE:case e.MANDATORY_NEWLINE:return"\n";case e.SINGLE_INDENT:return this.indentation.getSingleIndent();default:return E}}};const S=E=>E===e.SPACE||E===e.SINGLE_INDENT,I=E=>E===e.SPACE||E===e.SINGLE_INDENT||E===e.NEWLINE},8264:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.default=class{constructor(E){this.params=E,this.index=0}get({key:E,text:T}){return this.params?E?this.params[E]:this.params[this.index++]:T}getPositionalParameterIndex(){return this.index}setPositionalParameterIndex(E){this.index=E}},E.exports=T.default},3216:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.isTabularStyle=T.indentString=void 0,T.indentString=function(E){return"tabularLeft"===E.indentStyle||"tabularRight"===E.indentStyle?" ".repeat(10):E.useTabs?"\t":" ".repeat(E.tabWidth)},T.isTabularStyle=function(E){return"tabularLeft"===E.indentStyle||"tabularRight"===E.indentStyle}},240:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.isTabularToken=void 0;const A=R(2437);T.default=function(E,T){if("standard"===T)return E;let R=[];return E.length>=10&&E.includes(" ")&&([E,...R]=E.split(" ")),(E="tabularLeft"===T?E.padEnd(9," "):E.padStart(9," "))+["",...R].join(" ")},T.isTabularToken=function(E){return(0,A.isLogicalOperator)(E)||E===A.TokenType.RESERVED_CLAUSE||E===A.TokenType.RESERVED_SELECT||E===A.TokenType.RESERVED_SET_OPERATION||E===A.TokenType.RESERVED_JOIN||E===A.TokenType.LIMIT}},5028:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.bigquery=void 0;const A=R(7163),e=R(2437),S=R(6543),I=R(1636),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT] [AS STRUCT | AS VALUE]"]),N=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","QUALIFY","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","OMIT RECORD IF","INSERT [INTO]","VALUES","SET","MERGE [INTO]","WHEN [NOT] MATCHED [BY SOURCE | BY TARGET] [THEN]","UPDATE SET","CLUSTER BY","FOR SYSTEM_TIME AS OF","WITH CONNECTION","WITH PARTITION COLUMNS","REMOTE WITH CONNECTION"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [TEMP|TEMPORARY|SNAPSHOT|EXTERNAL] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["CREATE [OR REPLACE] [MATERIALIZED] VIEW [IF NOT EXISTS]","UPDATE","DELETE [FROM]","DROP [SNAPSHOT | EXTERNAL] TABLE [IF EXISTS]","ALTER TABLE [IF EXISTS]","ADD COLUMN [IF NOT EXISTS]","DROP COLUMN [IF EXISTS]","RENAME TO","ALTER COLUMN [IF EXISTS]","SET DEFAULT COLLATE","SET OPTIONS","DROP NOT NULL","SET DATA TYPE","ALTER SCHEMA [IF EXISTS]","ALTER [MATERIALIZED] VIEW [IF EXISTS]","ALTER BI_CAPACITY","TRUNCATE TABLE","CREATE SCHEMA [IF NOT EXISTS]","DEFAULT COLLATE","CREATE [OR REPLACE] [TEMP|TEMPORARY|TABLE] FUNCTION [IF NOT EXISTS]","CREATE [OR REPLACE] PROCEDURE [IF NOT EXISTS]","CREATE [OR REPLACE] ROW ACCESS POLICY [IF NOT EXISTS]","GRANT TO","FILTER USING","CREATE CAPACITY","AS JSON","CREATE RESERVATION","CREATE ASSIGNMENT","CREATE SEARCH INDEX [IF NOT EXISTS]","DROP SCHEMA [IF EXISTS]","DROP [MATERIALIZED] VIEW [IF EXISTS]","DROP [TABLE] FUNCTION [IF EXISTS]","DROP PROCEDURE [IF EXISTS]","DROP ROW ACCESS POLICY","DROP ALL ROW ACCESS POLICIES","DROP CAPACITY [IF EXISTS]","DROP RESERVATION [IF EXISTS]","DROP ASSIGNMENT [IF EXISTS]","DROP SEARCH INDEX [IF EXISTS]","DROP [IF EXISTS]","GRANT","REVOKE","DECLARE","EXECUTE IMMEDIATE","LOOP","END LOOP","REPEAT","END REPEAT","WHILE","END WHILE","BREAK","LEAVE","CONTINUE","ITERATE","FOR","END FOR","BEGIN","BEGIN TRANSACTION","COMMIT TRANSACTION","ROLLBACK TRANSACTION","RAISE","RETURN","CALL","ASSERT","EXPORT DATA"]),C=(0,A.expandPhrases)(["UNION {ALL | DISTINCT}","EXCEPT DISTINCT","INTERSECT DISTINCT"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN"]),s=(0,A.expandPhrases)(["TABLESAMPLE SYSTEM","ANY TYPE","ALL COLUMNS","NOT DETERMINISTIC","{ROWS | RANGE} BETWEEN","IS [NOT] DISTINCT FROM"]),r=(0,A.expandPhrases)([]);T.bigquery={name:"bigquery",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...L,...t],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,reservedKeywords:I.keywords,reservedDataTypes:I.dataTypes,reservedFunctionNames:S.functions,extraParens:["[]"],stringTypes:[{quote:'""".."""',prefixes:["R","B","RB","BR"]},{quote:"'''..'''",prefixes:["R","B","RB","BR"]},'""-bs',"''-bs",{quote:'""-raw',prefixes:["R","B","RB","BR"],requirePrefix:!0},{quote:"''-raw",prefixes:["R","B","RB","BR"],requirePrefix:!0}],identTypes:["``"],identChars:{dashes:!0},paramTypes:{positional:!0,named:["@"],quoted:["@"]},variableTypes:[{regex:String.raw`@@\w+`}],lineCommentTypes:["--","#"],operators:["&","|","^","~",">>","<<","||","=>"],postProcess:function(E){return function(E){let T=e.EOF_TOKEN;return E.map((E=>"OFFSET"===E.text&&"["===T.text?(T=E,Object.assign(Object.assign({},E),{type:e.TokenType.RESERVED_FUNCTION_NAME})):(T=E,E)))}(function(E){var T;const R=[];for(let A=0;AT=>T.type===e.TokenType.IDENTIFIER||T.type===e.TokenType.COMMA?T[E]+" ":T[E];function D(E,T){let R=0;for(let A=T;A"===T.text?R--:">>"===T.text&&(R-=2),0===R)return A}return E.length-1}},6543:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["KEYS.NEW_KEYSET","KEYS.ADD_KEY_FROM_RAW_BYTES","AEAD.DECRYPT_BYTES","AEAD.DECRYPT_STRING","AEAD.ENCRYPT","KEYS.KEYSET_CHAIN","KEYS.KEYSET_FROM_JSON","KEYS.KEYSET_TO_JSON","KEYS.ROTATE_KEYSET","KEYS.KEYSET_LENGTH","ANY_VALUE","ARRAY_AGG","AVG","CORR","COUNT","COUNTIF","COVAR_POP","COVAR_SAMP","MAX","MIN","ST_CLUSTERDBSCAN","STDDEV_POP","STDDEV_SAMP","STRING_AGG","SUM","VAR_POP","VAR_SAMP","ANY_VALUE","ARRAY_AGG","ARRAY_CONCAT_AGG","AVG","BIT_AND","BIT_OR","BIT_XOR","COUNT","COUNTIF","LOGICAL_AND","LOGICAL_OR","MAX","MIN","STRING_AGG","SUM","APPROX_COUNT_DISTINCT","APPROX_QUANTILES","APPROX_TOP_COUNT","APPROX_TOP_SUM","ARRAY_CONCAT","ARRAY_LENGTH","ARRAY_TO_STRING","GENERATE_ARRAY","GENERATE_DATE_ARRAY","GENERATE_TIMESTAMP_ARRAY","ARRAY_REVERSE","OFFSET","SAFE_OFFSET","ORDINAL","SAFE_ORDINAL","BIT_COUNT","PARSE_BIGNUMERIC","PARSE_NUMERIC","SAFE_CAST","CURRENT_DATE","EXTRACT","DATE","DATE_ADD","DATE_SUB","DATE_DIFF","DATE_TRUNC","DATE_FROM_UNIX_DATE","FORMAT_DATE","LAST_DAY","PARSE_DATE","UNIX_DATE","CURRENT_DATETIME","DATETIME","EXTRACT","DATETIME_ADD","DATETIME_SUB","DATETIME_DIFF","DATETIME_TRUNC","FORMAT_DATETIME","LAST_DAY","PARSE_DATETIME","ERROR","EXTERNAL_QUERY","S2_CELLIDFROMPOINT","S2_COVERINGCELLIDS","ST_ANGLE","ST_AREA","ST_ASBINARY","ST_ASGEOJSON","ST_ASTEXT","ST_AZIMUTH","ST_BOUNDARY","ST_BOUNDINGBOX","ST_BUFFER","ST_BUFFERWITHTOLERANCE","ST_CENTROID","ST_CENTROID_AGG","ST_CLOSESTPOINT","ST_CLUSTERDBSCAN","ST_CONTAINS","ST_CONVEXHULL","ST_COVEREDBY","ST_COVERS","ST_DIFFERENCE","ST_DIMENSION","ST_DISJOINT","ST_DISTANCE","ST_DUMP","ST_DWITHIN","ST_ENDPOINT","ST_EQUALS","ST_EXTENT","ST_EXTERIORRING","ST_GEOGFROM","ST_GEOGFROMGEOJSON","ST_GEOGFROMTEXT","ST_GEOGFROMWKB","ST_GEOGPOINT","ST_GEOGPOINTFROMGEOHASH","ST_GEOHASH","ST_GEOMETRYTYPE","ST_INTERIORRINGS","ST_INTERSECTION","ST_INTERSECTS","ST_INTERSECTSBOX","ST_ISCOLLECTION","ST_ISEMPTY","ST_LENGTH","ST_MAKELINE","ST_MAKEPOLYGON","ST_MAKEPOLYGONORIENTED","ST_MAXDISTANCE","ST_NPOINTS","ST_NUMGEOMETRIES","ST_NUMPOINTS","ST_PERIMETER","ST_POINTN","ST_SIMPLIFY","ST_SNAPTOGRID","ST_STARTPOINT","ST_TOUCHES","ST_UNION","ST_UNION_AGG","ST_WITHIN","ST_X","ST_Y","FARM_FINGERPRINT","MD5","SHA1","SHA256","SHA512","HLL_COUNT.INIT","HLL_COUNT.MERGE","HLL_COUNT.MERGE_PARTIAL","HLL_COUNT.EXTRACT","MAKE_INTERVAL","EXTRACT","JUSTIFY_DAYS","JUSTIFY_HOURS","JUSTIFY_INTERVAL","JSON_EXTRACT","JSON_QUERY","JSON_EXTRACT_SCALAR","JSON_VALUE","JSON_EXTRACT_ARRAY","JSON_QUERY_ARRAY","JSON_EXTRACT_STRING_ARRAY","JSON_VALUE_ARRAY","TO_JSON_STRING","ABS","SIGN","IS_INF","IS_NAN","IEEE_DIVIDE","RAND","SQRT","POW","POWER","EXP","LN","LOG","LOG10","GREATEST","LEAST","DIV","SAFE_DIVIDE","SAFE_MULTIPLY","SAFE_NEGATE","SAFE_ADD","SAFE_SUBTRACT","MOD","ROUND","TRUNC","CEIL","CEILING","FLOOR","COS","COSH","ACOS","ACOSH","SIN","SINH","ASIN","ASINH","TAN","TANH","ATAN","ATANH","ATAN2","RANGE_BUCKET","FIRST_VALUE","LAST_VALUE","NTH_VALUE","LEAD","LAG","PERCENTILE_CONT","PERCENTILE_DISC","NET.IP_FROM_STRING","NET.SAFE_IP_FROM_STRING","NET.IP_TO_STRING","NET.IP_NET_MASK","NET.IP_TRUNC","NET.IPV4_FROM_INT64","NET.IPV4_TO_INT64","NET.HOST","NET.PUBLIC_SUFFIX","NET.REG_DOMAIN","RANK","DENSE_RANK","PERCENT_RANK","CUME_DIST","NTILE","ROW_NUMBER","SESSION_USER","CORR","COVAR_POP","COVAR_SAMP","STDDEV_POP","STDDEV_SAMP","STDDEV","VAR_POP","VAR_SAMP","VARIANCE","ASCII","BYTE_LENGTH","CHAR_LENGTH","CHARACTER_LENGTH","CHR","CODE_POINTS_TO_BYTES","CODE_POINTS_TO_STRING","CONCAT","CONTAINS_SUBSTR","ENDS_WITH","FORMAT","FROM_BASE32","FROM_BASE64","FROM_HEX","INITCAP","INSTR","LEFT","LENGTH","LPAD","LOWER","LTRIM","NORMALIZE","NORMALIZE_AND_CASEFOLD","OCTET_LENGTH","REGEXP_CONTAINS","REGEXP_EXTRACT","REGEXP_EXTRACT_ALL","REGEXP_INSTR","REGEXP_REPLACE","REGEXP_SUBSTR","REPLACE","REPEAT","REVERSE","RIGHT","RPAD","RTRIM","SAFE_CONVERT_BYTES_TO_STRING","SOUNDEX","SPLIT","STARTS_WITH","STRPOS","SUBSTR","SUBSTRING","TO_BASE32","TO_BASE64","TO_CODE_POINTS","TO_HEX","TRANSLATE","TRIM","UNICODE","UPPER","CURRENT_TIME","TIME","EXTRACT","TIME_ADD","TIME_SUB","TIME_DIFF","TIME_TRUNC","FORMAT_TIME","PARSE_TIME","CURRENT_TIMESTAMP","EXTRACT","STRING","TIMESTAMP","TIMESTAMP_ADD","TIMESTAMP_SUB","TIMESTAMP_DIFF","TIMESTAMP_TRUNC","FORMAT_TIMESTAMP","PARSE_TIMESTAMP","TIMESTAMP_SECONDS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UNIX_SECONDS","UNIX_MILLIS","UNIX_MICROS","GENERATE_UUID","COALESCE","IF","IFNULL","NULLIF","AVG","BIT_AND","BIT_OR","BIT_XOR","CORR","COUNT","COVAR_POP","COVAR_SAMP","EXACT_COUNT_DISTINCT","FIRST","GROUP_CONCAT","GROUP_CONCAT_UNQUOTED","LAST","MAX","MIN","NEST","NTH","QUANTILES","STDDEV","STDDEV_POP","STDDEV_SAMP","SUM","TOP","UNIQUE","VARIANCE","VAR_POP","VAR_SAMP","BIT_COUNT","BOOLEAN","BYTES","CAST","FLOAT","HEX_STRING","INTEGER","STRING","COALESCE","GREATEST","IFNULL","IS_INF","IS_NAN","IS_EXPLICITLY_DEFINED","LEAST","NVL","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","DATE","DATE_ADD","DATEDIFF","DAY","DAYOFWEEK","DAYOFYEAR","FORMAT_UTC_USEC","HOUR","MINUTE","MONTH","MSEC_TO_TIMESTAMP","NOW","PARSE_UTC_USEC","QUARTER","SEC_TO_TIMESTAMP","SECOND","STRFTIME_UTC_USEC","TIME","TIMESTAMP","TIMESTAMP_TO_MSEC","TIMESTAMP_TO_SEC","TIMESTAMP_TO_USEC","USEC_TO_TIMESTAMP","UTC_USEC_TO_DAY","UTC_USEC_TO_HOUR","UTC_USEC_TO_MONTH","UTC_USEC_TO_WEEK","UTC_USEC_TO_YEAR","WEEK","YEAR","FORMAT_IP","PARSE_IP","FORMAT_PACKED_IP","PARSE_PACKED_IP","JSON_EXTRACT","JSON_EXTRACT_SCALAR","ABS","ACOS","ACOSH","ASIN","ASINH","ATAN","ATANH","ATAN2","CEIL","COS","COSH","DEGREES","EXP","FLOOR","LN","LOG","LOG2","LOG10","PI","POW","RADIANS","RAND","ROUND","SIN","SINH","SQRT","TAN","TANH","REGEXP_MATCH","REGEXP_EXTRACT","REGEXP_REPLACE","CONCAT","INSTR","LEFT","LENGTH","LOWER","LPAD","LTRIM","REPLACE","RIGHT","RPAD","RTRIM","SPLIT","SUBSTR","UPPER","TABLE_DATE_RANGE","TABLE_DATE_RANGE_STRICT","TABLE_QUERY","HOST","DOMAIN","TLD","AVG","COUNT","MAX","MIN","STDDEV","SUM","CUME_DIST","DENSE_RANK","FIRST_VALUE","LAG","LAST_VALUE","LEAD","NTH_VALUE","NTILE","PERCENT_RANK","PERCENTILE_CONT","PERCENTILE_DISC","RANK","RATIO_TO_REPORT","ROW_NUMBER","CURRENT_USER","EVERY","FROM_BASE64","HASH","FARM_FINGERPRINT","IF","POSITION","SHA1","SOME","TO_BASE64","BQ.JOBS.CANCEL","BQ.REFRESH_MATERIALIZED_VIEW","OPTIONS","PIVOT","UNPIVOT"]},1636:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ALL","AND","ANY","AS","ASC","ASSERT_ROWS_MODIFIED","AT","BETWEEN","BY","CASE","CAST","COLLATE","CONTAINS","CREATE","CROSS","CUBE","CURRENT","DEFAULT","DEFINE","DESC","DISTINCT","ELSE","END","ENUM","ESCAPE","EXCEPT","EXCLUDE","EXISTS","EXTRACT","FALSE","FETCH","FOLLOWING","FOR","FROM","FULL","GROUP","GROUPING","GROUPS","HASH","HAVING","IF","IGNORE","IN","INNER","INTERSECT","INTO","IS","JOIN","LATERAL","LEFT","LIMIT","LOOKUP","MERGE","NATURAL","NEW","NO","NOT","NULL","NULLS","OF","ON","OR","ORDER","OUTER","OVER","PARTITION","PRECEDING","PROTO","RANGE","RECURSIVE","RESPECT","RIGHT","ROLLUP","ROWS","SELECT","SET","SOME","TABLE","TABLESAMPLE","THEN","TO","TREAT","TRUE","UNBOUNDED","UNION","UNNEST","USING","WHEN","WHERE","WINDOW","WITH","WITHIN","SAFE","LIKE","COPY","CLONE","IN","OUT","INOUT","RETURNS","LANGUAGE","CASCADE","RESTRICT","DETERMINISTIC"],T.dataTypes=["ARRAY","BOOL","BYTES","DATE","DATETIME","GEOGRAPHY","INTERVAL","INT64","INT","SMALLINT","INTEGER","BIGINT","TINYINT","BYTEINT","NUMERIC","DECIMAL","BIGNUMERIC","BIGDECIMAL","FLOAT64","STRING","STRUCT","TIME","TIMEZONE"]},5084:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.clickhouse=void 0;const A=R(7163),e=R(2437),S=R(4407),I=R(460),O=(0,A.expandPhrases)(["SELECT [DISTINCT]","MODIFY QUERY SELECT [DISTINCT]"]),N=(0,A.expandPhrases)(["SET","WITH","FROM","SAMPLE","PREWHERE","WHERE","GROUP BY","HAVING","QUALIFY","ORDER BY","LIMIT","SETTINGS","INTO OUTFILE","FORMAT","WINDOW","PARTITION BY","INSERT INTO","VALUES","DEPENDS ON","MOVE {USER | ROLE | QUOTA | SETTINGS PROFILE | ROW POLICY}","GRANT","REVOKE","CHECK GRANT","SET [DEFAULT] ROLE [NONE | ALL | ALL EXCEPT]","DEDUPLICATE BY","MODIFY STATISTICS","TYPE","ALTER USER [IF EXISTS]","ALTER [ROW] POLICY [IF EXISTS]","DROP {USER | ROLE | QUOTA | PROFILE | SETTINGS PROFILE | ROW POLICY | POLICY} [IF EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [TEMPORARY] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["ALL EXCEPT","ON CLUSTER","UPDATE","SYSTEM RELOAD {DICTIONARIES | DICTIONARY | FUNCTIONS | FUNCTION | ASYNCHRONOUS METRICS}","SYSTEM DROP {DNS CACHE | MARK CACHE | ICEBERG METADATA CACHE | TEXT INDEX DICTIONARY CACHE | TEXT INDEX HEADER CACHE | TEXT INDEX POSTINGS CACHE | REPLICA | DATABASE REPLICA | UNCOMPRESSED CACHE | COMPILED EXPRESSION CACHE | QUERY CONDITION CACHE | QUERY CACHE | FORMAT SCHEMA CACHE | FILESYSTEM CACHE}","SYSTEM FLUSH LOGS","SYSTEM RELOAD {CONFIG | USERS}","SYSTEM SHUTDOWN","SYSTEM KILL","SYSTEM FLUSH DISTRIBUTED","SYSTEM START DISTRIBUTED SENDS","SYSTEM {STOP | START} {LISTEN | MERGES | TTL MERGES | MOVES | FETCHES | REPLICATED SENDS | REPLICATION QUEUES | PULLING REPLICATION LOG}","SYSTEM {SYNC | RESTART | RESTORE} REPLICA","SYSTEM {SYNC | RESTORE} DATABASE REPLICA","SYSTEM RESTART REPLICAS","SYSTEM UNFREEZE","SYSTEM WAIT LOADING PARTS","SYSTEM {LOAD | UNLOAD} PRIMARY KEY","SYSTEM {STOP | START} [REPLICATED] VIEW","SYSTEM {STOP | START} VIEWS","SYSTEM {REFRESH | CANCEL | WAIT} VIEW","WITH NAME","SHOW [CREATE] {TABLE | TEMPORARY TABLE | DICTIONARY | VIEW | DATABASE}","SHOW DATABASES [[NOT] {LIKE | ILIKE}]","SHOW [FULL] [TEMPORARY] TABLES [FROM | IN]","SHOW [EXTENDED] [FULL] COLUMNS {FROM | IN}","ATTACH {TABLE | DICTIONARY | DATABASE} [IF NOT EXISTS]","DETACH {TABLE | DICTIONARY | DATABASE} [IF EXISTS]","PERMANENTLY","SYNC","DROP {DICTIONARY | DATABASE | PROFILE | VIEW | FUNCTION | NAMED COLLECTION} [IF EXISTS]","DROP [TEMPORARY] TABLE [IF EXISTS] [IF EMPTY]","RENAME TO","EXISTS [TEMPORARY] {TABLE | DICTIONARY | DATABASE}","KILL QUERY","OPTIMIZE TABLE","RENAME {TABLE | DICTIONARY | DATABASE}","EXCHANGE {TABLES | DICTIONARIES}","TRUNCATE TABLE [IF EXISTS]","EXECUTE AS","USE","TO","UNDROP TABLE","CREATE {DATABASE | NAMED COLLECTION} [IF NOT EXISTS]","CREATE [OR REPLACE] {VIEW | DICTIONARY} [IF NOT EXISTS]","CREATE MATERIALIZED VIEW [IF NOT EXISTS]","CREATE FUNCTION","CREATE {USER | ROLE | QUOTA | SETTINGS PROFILE} [IF NOT EXISTS | OR REPLACE]","CREATE [ROW] POLICY [IF NOT EXISTS | OR REPLACE]","REPLACE [TEMPORARY] TABLE [IF NOT EXISTS]","ALTER {ROLE | QUOTA | SETTINGS PROFILE} [IF EXISTS]","ALTER [TEMPORARY] TABLE","ALTER NAMED COLLECTION [IF EXISTS]","GRANTEES","NOT IDENTIFIED","RESET AUTHENTICATION METHODS TO NEW","{IDENTIFIED | ADD IDENTIFIED} [WITH | BY]","[ADD | DROP] HOST {LOCAL | NAME | REGEXP | IP | LIKE}","VALID UNTIL","DROP [ALL] {PROFILES | SETTINGS}","{ADD | MODIFY} SETTINGS","ADD PROFILES","APPLY DELETED MASK","IN PARTITION","{ADD | DROP | RENAME | CLEAR | COMMENT | MODIFY | ALTER | MATERIALIZE} COLUMN","{DETACH | DROP | ATTACH | FETCH | MOVE} {PART | PARTITION}","DROP DETACHED {PART | PARTITION}","{FORGET | REPLACE} PARTITION","CLEAR COLUMN","{FREEZE | UNFREEZE} [PARTITION]","CLEAR INDEX","TO {DISK | VOLUME}","[DELETE | REWRITE PARTS] IN PARTITION","{MODIFY | RESET} SETTING","DELETE WHERE","MODIFY ORDER BY","{MODIFY | REMOVE} SAMPLE BY","{ADD | MATERIALIZE | CLEAR} INDEX [IF NOT EXISTS]","DROP INDEX [IF EXISTS]","GRANULARITY","AFTER","FIRST","ADD CONSTRAINT [IF NOT EXISTS]","DROP CONSTRAINT [IF EXISTS]","MODIFY TTL","REMOVE TTL","ADD STATISTICS [IF NOT EXISTS]","{DROP | CLEAR} STATISTICS [IF EXISTS]","MATERIALIZE STATISTICS [ALL | IF EXISTS]","KEYED BY","NOT KEYED","FOR [RANDOMIZED] INTERVAL","AS {PERMISSIVE | RESTRICTIVE}","FOR SELECT","ADD PROJECTION [IF NOT EXISTS]","{DROP | MATERIALIZE | CLEAR} PROJECTION [IF EXISTS]","REFRESH {EVERY | AFTER}","RANDOMIZE FOR","APPEND","APPEND TO","DELETE FROM","EXPLAIN [AST | SYNTAX | QUERY TREE | PLAN | PIPELINE | ESTIMATE | TABLE OVERRIDE]","GRANT ON CLUSTER","GRANT CURRENT GRANTS","WITH GRANT OPTION","REVOKE ON CLUSTER","ADMIN OPTION FOR","CHECK TABLE","PARTITION ID","{DESC | DESCRIBE} TABLE"]),C=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","PARALLEL WITH"]),_=(0,A.expandPhrases)(["[GLOBAL] [INNER|LEFT|RIGHT|FULL|CROSS] [OUTER|SEMI|ANTI|ANY|ALL|ASOF] JOIN","[LEFT] ARRAY JOIN"]),s=(0,A.expandPhrases)(["{ROWS | RANGE} BETWEEN","ALTER MATERIALIZE STATISTICS"]);T.clickhouse={name:"clickhouse",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedKeywords:I.keywords,reservedDataTypes:I.dataTypes,reservedFunctionNames:S.functions,extraParens:["[]","{}"],lineCommentTypes:["#","--"],nestedBlockComments:!1,underscoresInNumbers:!0,stringTypes:["$$","''-qq-bs"],identTypes:['""-qq-bs',"``"],paramTypes:{custom:[{regex:String.raw`\{[^:']+:[^}]+\}`,key:E=>{const T=/\{([^:]+):/.exec(E);return T?T[1].trim():E}}]},operators:["%","||","?",":","==","<=>","->"],postProcess:function(E){return E.map(((T,R)=>{const A=E[R+1]||e.EOF_TOKEN,S=E[R-1]||e.EOF_TOKEN;return T.type!==e.TokenType.RESERVED_SELECT||A.type!==e.TokenType.COMMA&&S.type!==e.TokenType.RESERVED_CLAUSE&&S.type!==e.TokenType.COMMA?e.isToken.SET(T)&&A.type===e.TokenType.OPEN_PAREN?Object.assign(Object.assign({},T),{type:e.TokenType.RESERVED_FUNCTION_NAME}):T:Object.assign(Object.assign({},T),{type:e.TokenType.RESERVED_KEYWORD})}))}},formatOptions:{onelineClauses:[...t,...L],tabularOnelineClauses:L}}},4407:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["BIT_AND","BIT_OR","BIT_XOR","BLAKE3","CAST","CHARACTER_LENGTH","CHAR_LENGTH","COVAR_POP","COVAR_SAMP","CRC32","CRC32IEEE","CRC64","DATE","DATE_DIFF","DATE_FORMAT","DATE_TRUNC","DAY","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","FORMAT_BYTES","FQDN","FROM_BASE64","FROM_DAYS","FROM_UNIXTIME","HOUR","INET6_ATON","INET6_NTOA","INET_ATON","INET_NTOA","IPv4CIDRToRange","IPv4NumToString","IPv4NumToStringClassC","IPv4StringToNum","IPv4StringToNumOrDefault","IPv4StringToNumOrNull","IPv4ToIPv6","IPv6CIDRToRange","IPv6NumToString","IPv6StringToNum","IPv6StringToNumOrDefault","IPv6StringToNumOrNull","JSONAllPaths","JSONAllPathsWithTypes","JSONArrayLength","JSONDynamicPaths","JSONDynamicPathsWithTypes","JSONExtract","JSONExtractArrayRaw","JSONExtractArrayRawCaseInsensitive","JSONExtractBool","JSONExtractBoolCaseInsensitive","JSONExtractCaseInsensitive","JSONExtractFloat","JSONExtractFloatCaseInsensitive","JSONExtractInt","JSONExtractIntCaseInsensitive","JSONExtractKeys","JSONExtractKeysAndValues","JSONExtractKeysAndValuesCaseInsensitive","JSONExtractKeysAndValuesRaw","JSONExtractKeysAndValuesRawCaseInsensitive","JSONExtractKeysCaseInsensitive","JSONExtractRaw","JSONExtractRawCaseInsensitive","JSONExtractString","JSONExtractStringCaseInsensitive","JSONExtractUInt","JSONExtractUIntCaseInsensitive","JSONHas","JSONKey","JSONLength","JSONMergePatch","JSONSharedDataPaths","JSONSharedDataPathsWithTypes","JSONType","JSON_ARRAY_LENGTH","JSON_EXISTS","JSON_QUERY","JSON_VALUE","L1Distance","L1Norm","L1Normalize","L2Distance","L2Norm","L2Normalize","L2SquaredDistance","L2SquaredNorm","LAST_DAY","LinfDistance","LinfNorm","LinfNormalize","LpDistance","LpNorm","LpNormalize","MACNumToString","MACStringToNum","MACStringToOUI","MAP_FROM_ARRAYS","MD4","MD5","MILLISECOND","MINUTE","MONTH","OCTET_LENGTH","QUARTER","REGEXP_EXTRACT","REGEXP_MATCHES","REGEXP_REPLACE","RIPEMD160","SCHEMA","SECOND","SHA1","SHA224","SHA256","SHA384","SHA512","SHA512_256","STD","STDDEV_POP","STDDEV_SAMP","ST_LineFromWKB","ST_MLineFromWKB","ST_MPolyFromWKB","ST_PointFromWKB","ST_PolyFromWKB","SUBSTRING_INDEX","SVG","TIMESTAMP_DIFF","TO_BASE64","TO_DAYS","TO_UNIXTIME","ULIDStringToDateTime","URLHash","URLHierarchy","URLPathHierarchy","UTCTimestamp","UTC_timestamp","UUIDNumToString","UUIDStringToNum","UUIDToNum","UUIDv7ToDateTime","VAR_POP","VAR_SAMP","YEAR","YYYYMMDDToDate","YYYYMMDDToDate32","YYYYMMDDhhmmssToDateTime","YYYYMMDDhhmmssToDateTime64","_CAST","__actionName","__bitBoolMaskAnd","__bitBoolMaskOr","__bitSwapLastTwo","__bitWrapperFunc","__getScalar","__patchPartitionID","__scalarSubqueryResult","abs","accurateCast","accurateCastOrDefault","accurateCastOrNull","acos","acosh","addDate","addDays","addHours","addInterval","addMicroseconds","addMilliseconds","addMinutes","addMonths","addNanoseconds","addQuarters","addSeconds","addTupleOfIntervals","addWeeks","addYears","addressToLine","addressToLineWithInlines","addressToSymbol","aes_decrypt_mysql","aes_encrypt_mysql","age","aggThrow","alphaTokens","analysisOfVariance","anova","any","anyHeavy","anyLast","anyLastRespectNulls","anyLast_respect_nulls","anyRespectNulls","anyValueRespectNulls","any_respect_nulls","any_value","any_value_respect_nulls","appendTrailingCharIfAbsent","approx_top_count","approx_top_k","approx_top_sum","argMax","argMin","array","arrayAUC","arrayAUCPR","arrayAll","arrayAvg","arrayCompact","arrayConcat","arrayCount","arrayCumSum","arrayCumSumNonNegative","arrayDifference","arrayDistinct","arrayDotProduct","arrayElement","arrayElementOrNull","arrayEnumerate","arrayEnumerateDense","arrayEnumerateDenseRanked","arrayEnumerateUniq","arrayEnumerateUniqRanked","arrayExists","arrayFill","arrayFilter","arrayFirst","arrayFirstIndex","arrayFirstOrNull","arrayFlatten","arrayFold","arrayIntersect","arrayJaccardIndex","arrayJoin","arrayLast","arrayLastIndex","arrayLastOrNull","arrayLevenshteinDistance","arrayLevenshteinDistanceWeighted","arrayMap","arrayMax","arrayMin","arrayNormalizedGini","arrayPRAUC","arrayPartialReverseSort","arrayPartialShuffle","arrayPartialSort","arrayPopBack","arrayPopFront","arrayProduct","arrayPushBack","arrayPushFront","arrayROCAUC","arrayRandomSample","arrayReduce","arrayReduceInRanges","arrayResize","arrayReverse","arrayReverseFill","arrayReverseSort","arrayReverseSplit","arrayRotateLeft","arrayRotateRight","arrayShiftLeft","arrayShiftRight","arrayShingles","arrayShuffle","arraySimilarity","arraySlice","arraySort","arraySplit","arrayStringConcat","arraySum","arraySymmetricDifference","arrayUnion","arrayUniq","arrayWithConstant","arrayZip","arrayZipUnaligned","array_agg","array_concat_agg","ascii","asin","asinh","assumeNotNull","atan","atan2","atanh","authenticatedUser","avg","avgWeighted","bar","base32Decode","base32Encode","base58Decode","base58Encode","base64Decode","base64Encode","base64URLDecode","base64URLEncode","basename","bech32Decode","bech32Encode","bin","bitAnd","bitCount","bitHammingDistance","bitNot","bitOr","bitPositionsToArray","bitRotateLeft","bitRotateRight","bitShiftLeft","bitShiftRight","bitSlice","bitTest","bitTestAll","bitTestAny","bitXor","bitmapAnd","bitmapAndCardinality","bitmapAndnot","bitmapAndnotCardinality","bitmapBuild","bitmapCardinality","bitmapContains","bitmapHasAll","bitmapHasAny","bitmapMax","bitmapMin","bitmapOr","bitmapOrCardinality","bitmapSubsetInRange","bitmapSubsetLimit","bitmapToArray","bitmapTransform","bitmapXor","bitmapXorCardinality","bitmaskToArray","bitmaskToList","blockNumber","blockSerializedSize","blockSize","boundingRatio","buildId","byteHammingDistance","byteSize","byteSlice","byteSwap","caseWithExpr","caseWithExpression","caseWithoutExpr","caseWithoutExpression","catboostEvaluate","categoricalInformationValue","cbrt","ceil","ceiling","changeDay","changeHour","changeMinute","changeMonth","changeSecond","changeYear","char","cityHash64","clamp","coalesce","colorOKLCHToSRGB","colorSRGBToOKLCH","compareSubstrings","concat","concatAssumeInjective","concatWithSeparator","concatWithSeparatorAssumeInjective","concat_ws","connectionId","connection_id","contingency","convertCharset","corr","corrMatrix","corrStable","cos","cosh","cosineDistance","count","countDigits","countEqual","countMatches","countMatchesCaseInsensitive","countSubstrings","countSubstringsCaseInsensitive","countSubstringsCaseInsensitiveUTF8","covarPop","covarPopMatrix","covarPopStable","covarSamp","covarSampMatrix","covarSampStable","cramersV","cramersVBiasCorrected","curdate","currentDatabase","currentProfiles","currentQueryID","currentRoles","currentSchemas","currentUser","current_database","current_date","current_query_id","current_schemas","current_timestamp","current_user","cutFragment","cutIPv6","cutQueryString","cutQueryStringAndFragment","cutToFirstSignificantSubdomain","cutToFirstSignificantSubdomainCustom","cutToFirstSignificantSubdomainCustomRFC","cutToFirstSignificantSubdomainCustomWithWWW","cutToFirstSignificantSubdomainCustomWithWWWRFC","cutToFirstSignificantSubdomainRFC","cutToFirstSignificantSubdomainWithWWW","cutToFirstSignificantSubdomainWithWWWRFC","cutURLParameter","cutWWW","damerauLevenshteinDistance","dateDiff","dateName","dateTime64ToSnowflake","dateTime64ToSnowflakeID","dateTimeToSnowflake","dateTimeToSnowflakeID","dateTimeToUUIDv7","dateTrunc","date_bin","date_diff","decodeHTMLComponent","decodeURLComponent","decodeURLFormComponent","decodeXMLComponent","decrypt","defaultProfiles","defaultRoles","defaultValueOfArgumentType","defaultValueOfTypeName","degrees","deltaSum","deltaSumTimestamp","demangle","denseRank","dense_rank","detectCharset","detectLanguage","detectLanguageMixed","detectLanguageUnknown","detectProgrammingLanguage","detectTonality","dictGet","dictGetAll","dictGetChildren","dictGetDate","dictGetDateOrDefault","dictGetDateTime","dictGetDateTimeOrDefault","dictGetDescendants","dictGetFloat32","dictGetFloat32OrDefault","dictGetFloat64","dictGetFloat64OrDefault","dictGetHierarchy","dictGetIPv4","dictGetIPv4OrDefault","dictGetIPv6","dictGetIPv6OrDefault","dictGetInt16","dictGetInt16OrDefault","dictGetInt32","dictGetInt32OrDefault","dictGetInt64","dictGetInt64OrDefault","dictGetInt8","dictGetInt8OrDefault","dictGetOrDefault","dictGetOrNull","dictGetString","dictGetStringOrDefault","dictGetUInt16","dictGetUInt16OrDefault","dictGetUInt32","dictGetUInt32OrDefault","dictGetUInt64","dictGetUInt64OrDefault","dictGetUInt8","dictGetUInt8OrDefault","dictGetUUID","dictGetUUIDOrDefault","dictHas","dictIsIn","displayName","distanceL1","distanceL2","distanceL2Squared","distanceLinf","distanceLp","distinctDynamicTypes","distinctJSONPaths","distinctJSONPathsAndTypes","divide","divideDecimal","divideOrNull","domain","domainRFC","domainWithoutWWW","domainWithoutWWWRFC","dotProduct","dumpColumnStructure","dynamicElement","dynamicType","e","editDistance","editDistanceUTF8","empty","emptyArrayDate","emptyArrayDateTime","emptyArrayFloat32","emptyArrayFloat64","emptyArrayInt16","emptyArrayInt32","emptyArrayInt64","emptyArrayInt8","emptyArrayString","emptyArrayToSingle","emptyArrayUInt16","emptyArrayUInt32","emptyArrayUInt64","emptyArrayUInt8","enabledProfiles","enabledRoles","encodeURLComponent","encodeURLFormComponent","encodeXMLComponent","encrypt","endsWith","endsWithUTF8","entropy","equals","erf","erfc","errorCodeToName","estimateCompressionRatio","evalMLMethod","exp","exp10","exp2","exponentialMovingAverage","exponentialTimeDecayedAvg","exponentialTimeDecayedCount","exponentialTimeDecayedMax","exponentialTimeDecayedSum","extract","extractAll","extractAllGroups","extractAllGroupsHorizontal","extractAllGroupsVertical","extractGroups","extractKeyValuePairs","extractKeyValuePairsWithEscaping","extractTextFromHTML","extractURLParameter","extractURLParameterNames","extractURLParameters","factorial","farmFingerprint64","farmHash64","file","filesystemAvailable","filesystemCapacity","filesystemUnreserved","finalizeAggregation","financialInternalRateOfReturn","financialInternalRateOfReturnExtended","financialNetPresentValue","financialNetPresentValueExtended","firstLine","firstSignificantSubdomain","firstSignificantSubdomainCustom","firstSignificantSubdomainCustomRFC","firstSignificantSubdomainRFC","firstValueRespectNulls","first_value","first_value_respect_nulls","flameGraph","flatten","flattenTuple","floor","formatDateTime","formatDateTimeInJodaSyntax","formatQuery","formatQueryOrNull","formatQuerySingleLine","formatQuerySingleLineOrNull","formatReadableDecimalSize","formatReadableQuantity","formatReadableSize","formatReadableTimeDelta","formatRow","formatRowNoNewline","fragment","fromDaysSinceYearZero","fromDaysSinceYearZero32","fromModifiedJulianDay","fromModifiedJulianDayOrNull","fromUTCTimestamp","fromUnixTimestamp","fromUnixTimestamp64Micro","fromUnixTimestamp64Milli","fromUnixTimestamp64Nano","fromUnixTimestamp64Second","fromUnixTimestampInJodaSyntax","from_utc_timestamp","fullHostName","fuzzBits","gccMurmurHash","gcd","generateRandomStructure","generateSerialID","generateSnowflakeID","generateULID","generateUUIDv4","generateUUIDv7","geoDistance","geoToH3","geoToS2","geohashDecode","geohashEncode","geohashesInBox","getClientHTTPHeader","getMacro","getMaxTableNameLengthForDatabase","getMergeTreeSetting","getOSKernelVersion","getServerPort","getServerSetting","getSetting","getSettingOrDefault","getSizeOfEnumType","getSubcolumn","getTypeSerializationStreams","globalIn","globalInIgnoreSet","globalNotIn","globalNotInIgnoreSet","globalNotNullIn","globalNotNullInIgnoreSet","globalNullIn","globalNullInIgnoreSet","globalVariable","greatCircleAngle","greatCircleDistance","greater","greaterOrEquals","greatest","groupArray","groupArrayInsertAt","groupArrayIntersect","groupArrayLast","groupArrayMovingAvg","groupArrayMovingSum","groupArraySample","groupArraySorted","groupBitAnd","groupBitOr","groupBitXor","groupBitmap","groupBitmapAnd","groupBitmapOr","groupBitmapXor","groupConcat","groupNumericIndexedVector","groupUniqArray","group_concat","h3CellAreaM2","h3CellAreaRads2","h3Distance","h3EdgeAngle","h3EdgeLengthKm","h3EdgeLengthM","h3ExactEdgeLengthKm","h3ExactEdgeLengthM","h3ExactEdgeLengthRads","h3GetBaseCell","h3GetDestinationIndexFromUnidirectionalEdge","h3GetFaces","h3GetIndexesFromUnidirectionalEdge","h3GetOriginIndexFromUnidirectionalEdge","h3GetPentagonIndexes","h3GetRes0Indexes","h3GetResolution","h3GetUnidirectionalEdge","h3GetUnidirectionalEdgeBoundary","h3GetUnidirectionalEdgesFromHexagon","h3HexAreaKm2","h3HexAreaM2","h3HexRing","h3IndexesAreNeighbors","h3IsPentagon","h3IsResClassIII","h3IsValid","h3Line","h3NumHexagons","h3PointDistKm","h3PointDistM","h3PointDistRads","h3ToCenterChild","h3ToChildren","h3ToGeo","h3ToGeoBoundary","h3ToParent","h3ToString","h3UnidirectionalEdgeIsValid","h3kRing","halfMD5","has","hasAll","hasAny","hasColumnInTable","hasSubsequence","hasSubsequenceCaseInsensitive","hasSubsequenceCaseInsensitiveUTF8","hasSubsequenceUTF8","hasSubstr","hasThreadFuzzer","hasToken","hasTokenCaseInsensitive","hasTokenCaseInsensitiveOrNull","hasTokenOrNull","hex","hilbertDecode","hilbertEncode","histogram","hiveHash","hop","hopEnd","hopStart","hostName","hostname","hypot","icebergBucket","icebergHash","icebergTruncate","identity","idnaDecode","idnaEncode","if","ifNotFinite","ifNull","ignore","inIgnoreSet","indexHint","indexOf","indexOfAssumeSorted","initcap","initcapUTF8","initialQueryID","initialQueryStartTime","initial_query_id","initial_query_start_time","initializeAggregation","instr","intDiv","intDivOrNull","intDivOrZero","intExp10","intExp2","intHash32","intHash64","intervalLengthSum","isConstant","isDecimalOverflow","isDynamicElementInSharedData","isFinite","isIPAddressInRange","isIPv4String","isIPv6String","isInfinite","isMergeTreePartCoveredBy","isNaN","isNotDistinctFrom","isNotNull","isNull","isNullable","isValidJSON","isValidUTF8","isZeroOrNull","jaroSimilarity","jaroWinklerSimilarity","javaHash","javaHashUTF16LE","joinGet","joinGetOrNull","jsonMergePatch","jumpConsistentHash","kafkaMurmurHash","keccak256","kolmogorovSmirnovTest","kostikConsistentHash","kql_array_sort_asc","kql_array_sort_desc","kurtPop","kurtSamp","lag","lagInFrame","largestTriangleThreeBuckets","lastValueRespectNulls","last_value","last_value_respect_nulls","lcase","lcm","lead","leadInFrame","least","left","leftPad","leftPadUTF8","leftUTF8","lemmatize","length","lengthUTF8","less","lessOrEquals","levenshteinDistance","levenshteinDistanceUTF8","lgamma","ln","locate","log","log10","log1p","log2","logTrace","lowCardinalityIndices","lowCardinalityKeys","lower","lowerUTF8","lpad","ltrim","lttb","makeDate","makeDate32","makeDateTime","makeDateTime64","mannWhitneyUTest","map","mapAdd","mapAll","mapApply","mapConcat","mapContains","mapContainsKey","mapContainsKeyLike","mapContainsValue","mapContainsValueLike","mapExists","mapExtractKeyLike","mapExtractValueLike","mapFilter","mapFromArrays","mapFromString","mapKeys","mapPartialReverseSort","mapPartialSort","mapPopulateSeries","mapReverseSort","mapSort","mapSubtract","mapUpdate","mapValues","match","materialize","max","max2","maxIntersections","maxIntersectionsPosition","maxMappedArrays","meanZTest","median","medianBFloat16","medianBFloat16Weighted","medianDD","medianDeterministic","medianExact","medianExactHigh","medianExactLow","medianExactWeighted","medianExactWeightedInterpolated","medianGK","medianInterpolatedWeighted","medianTDigest","medianTDigestWeighted","medianTiming","medianTimingWeighted","mergeTreePartInfo","metroHash64","mid","min","min2","minMappedArrays","minSampleSizeContinous","minSampleSizeContinuous","minSampleSizeConversion","minus","mismatches","mod","modOrNull","modulo","moduloLegacy","moduloOrNull","moduloOrZero","monthName","mortonDecode","mortonEncode","multiFuzzyMatchAllIndices","multiFuzzyMatchAny","multiFuzzyMatchAnyIndex","multiIf","multiMatchAllIndices","multiMatchAny","multiMatchAnyIndex","multiSearchAllPositions","multiSearchAllPositionsCaseInsensitive","multiSearchAllPositionsCaseInsensitiveUTF8","multiSearchAllPositionsUTF8","multiSearchAny","multiSearchAnyCaseInsensitive","multiSearchAnyCaseInsensitiveUTF8","multiSearchAnyUTF8","multiSearchFirstIndex","multiSearchFirstIndexCaseInsensitive","multiSearchFirstIndexCaseInsensitiveUTF8","multiSearchFirstIndexUTF8","multiSearchFirstPosition","multiSearchFirstPositionCaseInsensitive","multiSearchFirstPositionCaseInsensitiveUTF8","multiSearchFirstPositionUTF8","multiply","multiplyDecimal","murmurHash2_32","murmurHash2_64","murmurHash3_128","murmurHash3_32","murmurHash3_64","negate","neighbor","nested","netloc","ngramDistance","ngramDistanceCaseInsensitive","ngramDistanceCaseInsensitiveUTF8","ngramDistanceUTF8","ngramMinHash","ngramMinHashArg","ngramMinHashArgCaseInsensitive","ngramMinHashArgCaseInsensitiveUTF8","ngramMinHashArgUTF8","ngramMinHashCaseInsensitive","ngramMinHashCaseInsensitiveUTF8","ngramMinHashUTF8","ngramSearch","ngramSearchCaseInsensitive","ngramSearchCaseInsensitiveUTF8","ngramSearchUTF8","ngramSimHash","ngramSimHashCaseInsensitive","ngramSimHashCaseInsensitiveUTF8","ngramSimHashUTF8","ngrams","nonNegativeDerivative","normL1","normL2","normL2Squared","normLinf","normLp","normalizeL1","normalizeL2","normalizeLinf","normalizeLp","normalizeQuery","normalizeQueryKeepNames","normalizeUTF8NFC","normalizeUTF8NFD","normalizeUTF8NFKC","normalizeUTF8NFKD","normalizedQueryHash","normalizedQueryHashKeepNames","notEmpty","notEquals","notILike","notIn","notInIgnoreSet","notLike","notNullIn","notNullInIgnoreSet","nothing","nothingNull","nothingUInt64","now","now64","nowInBlock","nowInBlock64","nth_value","ntile","nullIf","nullIn","nullInIgnoreSet","numbers","numericIndexedVectorAllValueSum","numericIndexedVectorBuild","numericIndexedVectorCardinality","numericIndexedVectorGetValue","numericIndexedVectorPointwiseAdd","numericIndexedVectorPointwiseDivide","numericIndexedVectorPointwiseEqual","numericIndexedVectorPointwiseGreater","numericIndexedVectorPointwiseGreaterEqual","numericIndexedVectorPointwiseLess","numericIndexedVectorPointwiseLessEqual","numericIndexedVectorPointwiseMultiply","numericIndexedVectorPointwiseNotEqual","numericIndexedVectorPointwiseSubtract","numericIndexedVectorShortDebugString","numericIndexedVectorToMap","overlay","overlayUTF8","parseDateTime","parseDateTime32BestEffort","parseDateTime32BestEffortOrNull","parseDateTime32BestEffortOrZero","parseDateTime64","parseDateTime64BestEffort","parseDateTime64BestEffortOrNull","parseDateTime64BestEffortOrZero","parseDateTime64BestEffortUS","parseDateTime64BestEffortUSOrNull","parseDateTime64BestEffortUSOrZero","parseDateTime64InJodaSyntax","parseDateTime64InJodaSyntaxOrNull","parseDateTime64InJodaSyntaxOrZero","parseDateTime64OrNull","parseDateTime64OrZero","parseDateTimeBestEffort","parseDateTimeBestEffortOrNull","parseDateTimeBestEffortOrZero","parseDateTimeBestEffortUS","parseDateTimeBestEffortUSOrNull","parseDateTimeBestEffortUSOrZero","parseDateTimeInJodaSyntax","parseDateTimeInJodaSyntaxOrNull","parseDateTimeInJodaSyntaxOrZero","parseDateTimeOrNull","parseDateTimeOrZero","parseReadableSize","parseReadableSizeOrNull","parseReadableSizeOrZero","parseTimeDelta","partitionID","partitionId","path","pathFull","percentRank","percent_rank","pi","plus","pmod","pmodOrNull","pointInEllipses","pointInPolygon","polygonAreaCartesian","polygonAreaSpherical","polygonConvexHullCartesian","polygonPerimeterCartesian","polygonPerimeterSpherical","polygonsDistanceCartesian","polygonsDistanceSpherical","polygonsEqualsCartesian","polygonsIntersectCartesian","polygonsIntersectSpherical","polygonsIntersectionCartesian","polygonsIntersectionSpherical","polygonsSymDifferenceCartesian","polygonsSymDifferenceSpherical","polygonsUnionCartesian","polygonsUnionSpherical","polygonsWithinCartesian","polygonsWithinSpherical","port","portRFC","position","positionCaseInsensitive","positionCaseInsensitiveUTF8","positionUTF8","positiveModulo","positiveModuloOrNull","positive_modulo","positive_modulo_or_null","pow","power","printf","proportionsZTest","protocol","punycodeDecode","punycodeEncode","quantile","quantileBFloat16","quantileBFloat16Weighted","quantileDD","quantileDeterministic","quantileExact","quantileExactExclusive","quantileExactHigh","quantileExactInclusive","quantileExactLow","quantileExactWeighted","quantileExactWeightedInterpolated","quantileGK","quantileInterpolatedWeighted","quantileTDigest","quantileTDigestWeighted","quantileTiming","quantileTimingWeighted","quantiles","quantilesBFloat16","quantilesBFloat16Weighted","quantilesDD","quantilesDeterministic","quantilesExact","quantilesExactExclusive","quantilesExactHigh","quantilesExactInclusive","quantilesExactLow","quantilesExactWeighted","quantilesExactWeightedInterpolated","quantilesGK","quantilesInterpolatedWeighted","quantilesTDigest","quantilesTDigestWeighted","quantilesTiming","quantilesTimingWeighted","queryID","queryString","queryStringAndFragment","query_id","radians","rand","rand32","rand64","randBernoulli","randBinomial","randCanonical","randChiSquared","randConstant","randExponential","randFisherF","randLogNormal","randNegativeBinomial","randNormal","randPoisson","randStudentT","randUniform","randomFixedString","randomPrintableASCII","randomString","randomStringUTF8","rank","rankCorr","readWKBLineString","readWKBMultiLineString","readWKBMultiPolygon","readWKBPoint","readWKBPolygon","readWKTLineString","readWKTMultiLineString","readWKTMultiPolygon","readWKTPoint","readWKTPolygon","readWKTRing","regexpExtract","regexpQuoteMeta","regionHierarchy","regionIn","regionToArea","regionToCity","regionToContinent","regionToCountry","regionToDistrict","regionToName","regionToPopulation","regionToTopContinent","reinterpret","reinterpretAsDate","reinterpretAsDateTime","reinterpretAsFixedString","reinterpretAsFloat32","reinterpretAsFloat64","reinterpretAsInt128","reinterpretAsInt16","reinterpretAsInt256","reinterpretAsInt32","reinterpretAsInt64","reinterpretAsInt8","reinterpretAsString","reinterpretAsUInt128","reinterpretAsUInt16","reinterpretAsUInt256","reinterpretAsUInt32","reinterpretAsUInt64","reinterpretAsUInt8","reinterpretAsUUID","repeat","replace","replaceAll","replaceOne","replaceRegexpAll","replaceRegexpOne","replicate","retention","reverse","reverseUTF8","revision","right","rightPad","rightPadUTF8","rightUTF8","round","roundAge","roundBankers","roundDown","roundDuration","roundToExp2","rowNumberInAllBlocks","rowNumberInBlock","row_number","rpad","rtrim","runningAccumulate","runningConcurrency","runningDifference","runningDifferenceStartingWithFirstValue","s2CapContains","s2CapUnion","s2CellsIntersect","s2GetNeighbors","s2RectAdd","s2RectContains","s2RectIntersection","s2RectUnion","s2ToGeo","scalarProduct","searchAll","searchAny","sequenceCount","sequenceMatch","sequenceMatchEvents","sequenceNextNode","seriesDecomposeSTL","seriesOutliersDetectTukey","seriesPeriodDetectFFT","serverTimeZone","serverTimezone","serverUUID","shardCount","shardNum","showCertificate","sigmoid","sign","simpleJSONExtractBool","simpleJSONExtractFloat","simpleJSONExtractInt","simpleJSONExtractRaw","simpleJSONExtractString","simpleJSONExtractUInt","simpleJSONHas","simpleLinearRegression","sin","singleValueOrNull","sinh","sipHash128","sipHash128Keyed","sipHash128Reference","sipHash128ReferenceKeyed","sipHash64","sipHash64Keyed","skewPop","skewSamp","sleep","sleepEachRow","snowflakeIDToDateTime","snowflakeIDToDateTime64","snowflakeToDateTime","snowflakeToDateTime64","soundex","space","sparkBar","sparkbar","sparseGrams","sparseGramsHashes","sparseGramsHashesUTF8","sparseGramsUTF8","splitByAlpha","splitByChar","splitByNonAlpha","splitByRegexp","splitByString","splitByWhitespace","sqid","sqidDecode","sqidEncode","sqrt","startsWith","startsWithUTF8","stddevPop","stddevPopStable","stddevSamp","stddevSampStable","stem","stochasticLinearRegression","stochasticLogisticRegression","str_to_date","str_to_map","stringBytesEntropy","stringBytesUniq","stringJaccardIndex","stringJaccardIndexUTF8","stringToH3","structureToCapnProtoSchema","structureToProtobufSchema","studentTTest","subBitmap","subDate","substr","substring","substringIndex","substringIndexUTF8","substringUTF8","subtractDays","subtractHours","subtractInterval","subtractMicroseconds","subtractMilliseconds","subtractMinutes","subtractMonths","subtractNanoseconds","subtractQuarters","subtractSeconds","subtractTupleOfIntervals","subtractWeeks","subtractYears","sum","sumCount","sumKahan","sumMapFiltered","sumMapFilteredWithOverflow","sumMapWithOverflow","sumMappedArrays","sumWithOverflow","svg","synonyms","tan","tanh","tcpPort","tgamma","theilsU","throwIf","tid","timeDiff","timeSeriesDeltaToGrid","timeSeriesDerivToGrid","timeSeriesFromGrid","timeSeriesGroupArray","timeSeriesIdToTags","timeSeriesIdToTagsGroup","timeSeriesIdeltaToGrid","timeSeriesInstantDeltaToGrid","timeSeriesInstantRateToGrid","timeSeriesIrateToGrid","timeSeriesLastToGrid","timeSeriesLastTwoSamples","timeSeriesPredictLinearToGrid","timeSeriesRange","timeSeriesRateToGrid","timeSeriesResampleToGridWithStaleness","timeSeriesStoreTags","timeSeriesTagsGroupToTags","timeSlot","timeSlots","timeZone","timeZoneOf","timeZoneOffset","time_bucket","timestamp","timestampDiff","timestamp_diff","timezone","timezoneOf","timezoneOffset","toBFloat16","toBFloat16OrNull","toBFloat16OrZero","toBool","toColumnTypeName","toDate","toDate32","toDate32OrDefault","toDate32OrNull","toDate32OrZero","toDateOrDefault","toDateOrNull","toDateOrZero","toDateTime","toDateTime32","toDateTime64","toDateTime64OrDefault","toDateTime64OrNull","toDateTime64OrZero","toDateTimeOrDefault","toDateTimeOrNull","toDateTimeOrZero","toDayOfMonth","toDayOfWeek","toDayOfYear","toDaysSinceYearZero","toDecimal128","toDecimal128OrDefault","toDecimal128OrNull","toDecimal128OrZero","toDecimal256","toDecimal256OrDefault","toDecimal256OrNull","toDecimal256OrZero","toDecimal32","toDecimal32OrDefault","toDecimal32OrNull","toDecimal32OrZero","toDecimal64","toDecimal64OrDefault","toDecimal64OrNull","toDecimal64OrZero","toDecimalString","toFixedString","toFloat32","toFloat32OrDefault","toFloat32OrNull","toFloat32OrZero","toFloat64","toFloat64OrDefault","toFloat64OrNull","toFloat64OrZero","toHour","toIPv4","toIPv4OrDefault","toIPv4OrNull","toIPv4OrZero","toIPv6","toIPv6OrDefault","toIPv6OrNull","toIPv6OrZero","toISOWeek","toISOYear","toInt128","toInt128OrDefault","toInt128OrNull","toInt128OrZero","toInt16","toInt16OrDefault","toInt16OrNull","toInt16OrZero","toInt256","toInt256OrDefault","toInt256OrNull","toInt256OrZero","toInt32","toInt32OrDefault","toInt32OrNull","toInt32OrZero","toInt64","toInt64OrDefault","toInt64OrNull","toInt64OrZero","toInt8","toInt8OrDefault","toInt8OrNull","toInt8OrZero","toInterval","toIntervalDay","toIntervalHour","toIntervalMicrosecond","toIntervalMillisecond","toIntervalMinute","toIntervalMonth","toIntervalNanosecond","toIntervalQuarter","toIntervalSecond","toIntervalWeek","toIntervalYear","toJSONString","toLastDayOfMonth","toLastDayOfWeek","toLowCardinality","toMillisecond","toMinute","toModifiedJulianDay","toModifiedJulianDayOrNull","toMonday","toMonth","toMonthNumSinceEpoch","toNullable","toQuarter","toRelativeDayNum","toRelativeHourNum","toRelativeMinuteNum","toRelativeMonthNum","toRelativeQuarterNum","toRelativeSecondNum","toRelativeWeekNum","toRelativeYearNum","toSecond","toStartOfDay","toStartOfFifteenMinutes","toStartOfFiveMinute","toStartOfFiveMinutes","toStartOfHour","toStartOfISOYear","toStartOfInterval","toStartOfMicrosecond","toStartOfMillisecond","toStartOfMinute","toStartOfMonth","toStartOfNanosecond","toStartOfQuarter","toStartOfSecond","toStartOfTenMinutes","toStartOfWeek","toStartOfYear","toString","toStringCutToZero","toTime","toTime64","toTime64OrNull","toTime64OrZero","toTimeOrNull","toTimeOrZero","toTimeWithFixedDate","toTimeZone","toTimezone","toTypeName","toUInt128","toUInt128OrDefault","toUInt128OrNull","toUInt128OrZero","toUInt16","toUInt16OrDefault","toUInt16OrNull","toUInt16OrZero","toUInt256","toUInt256OrDefault","toUInt256OrNull","toUInt256OrZero","toUInt32","toUInt32OrDefault","toUInt32OrNull","toUInt32OrZero","toUInt64","toUInt64OrDefault","toUInt64OrNull","toUInt64OrZero","toUInt8","toUInt8OrDefault","toUInt8OrNull","toUInt8OrZero","toUTCTimestamp","toUUID","toUUIDOrDefault","toUUIDOrNull","toUUIDOrZero","toUnixTimestamp","toUnixTimestamp64Micro","toUnixTimestamp64Milli","toUnixTimestamp64Nano","toUnixTimestamp64Second","toValidUTF8","toWeek","toYYYYMM","toYYYYMMDD","toYYYYMMDDhhmmss","toYear","toYearNumSinceEpoch","toYearWeek","to_utc_timestamp","today","tokens","topK","topKWeighted","topLevelDomain","topLevelDomainRFC","transactionID","transactionLatestSnapshot","transactionOldestSnapshot","transform","translate","translateUTF8","trim","trimBoth","trimLeft","trimRight","trunc","tryBase32Decode","tryBase58Decode","tryBase64Decode","tryBase64URLDecode","tryDecrypt","tryIdnaEncode","tryPunycodeDecode","tumble","tumbleEnd","tumbleStart","tuple","tupleConcat","tupleDivide","tupleDivideByNumber","tupleElement","tupleHammingDistance","tupleIntDiv","tupleIntDivByNumber","tupleIntDivOrZero","tupleIntDivOrZeroByNumber","tupleMinus","tupleModulo","tupleModuloByNumber","tupleMultiply","tupleMultiplyByNumber","tupleNames","tupleNegate","tuplePlus","tupleToNameValuePairs","ucase","unbin","unhex","uniq","uniqCombined","uniqCombined64","uniqExact","uniqHLL12","uniqTheta","uniqThetaIntersect","uniqThetaNot","uniqThetaUnion","uniqUpTo","upper","upperUTF8","uptime","user","validateNestedArraySizes","varPop","varPopStable","varSamp","varSampStable","variantElement","variantType","vectorDifference","vectorSum","version","visibleWidth","visitParamExtractBool","visitParamExtractFloat","visitParamExtractInt","visitParamExtractRaw","visitParamExtractString","visitParamExtractUInt","visitParamHas","week","welchTTest","widthBucket","width_bucket","windowFunnel","windowID","wkb","wkt","wordShingleMinHash","wordShingleMinHashArg","wordShingleMinHashArgCaseInsensitive","wordShingleMinHashArgCaseInsensitiveUTF8","wordShingleMinHashArgUTF8","wordShingleMinHashCaseInsensitive","wordShingleMinHashCaseInsensitiveUTF8","wordShingleMinHashUTF8","wordShingleSimHash","wordShingleSimHashCaseInsensitive","wordShingleSimHashCaseInsensitiveUTF8","wordShingleSimHashUTF8","wyHash64","xor","xxHash32","xxHash64","xxh3","yandexConsistentHash","yearweek","yesterday","zookeeperSessionUptime","MergeTree","ReplacingMergeTree","SummingMergeTree","AggregatingMergeTree","CollapsingMergeTree","VersionedCollapsingMergeTree","GraphiteMergeTree","CoalescingMergeTree","Atomic","Shared","Lazy","Replicated","PostgreSQL","MySQL","SQLite","MaterializedPostgreSQL","DataLakeCatalog"]},460:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ACCESS","ACTION","ADD","ADMIN","AFTER","ALGORITHM","ALIAS","ALL","ALLOWED_LATENESS","ALTER","AND","ANTI","APPEND","APPLY","AS","ASC","ASCENDING","ASOF","ASSUME","AST","ASYNC","ATTACH","AUTO_INCREMENT","AZURE","BACKUP","BAGEXPANSION","BASE_BACKUP","BCRYPT_HASH","BCRYPT_PASSWORD","BEGIN","BETWEEN","BIDIRECTIONAL","BOTH","BY","CACHE","CACHES","CASCADE","CASE","CHANGE","CHANGEABLE_IN_READONLY","CHANGED","CHARACTER","CHECK","CLEANUP","CLEAR","CLUSTER","CLUSTERS","CLUSTER_HOST_IDS","CN","CODEC","COLLATE","COLLECTION","COLUMN","COLUMNS","COMMENT","COMMIT","COMPRESSION","CONST","CONSTRAINT","CREATE","CROSS","CUBE","CURRENT","D","DATA","DATABASE","DATABASES","DAYS","DD","DDL","DEDUPLICATE","DEFAULT","DEFINER","DELAY","DELETE","DELETED","DEPENDS","DESC","DESCENDING","DESCRIBE","DETACH","DETACHED","DICTIONARIES","DICTIONARY","DISK","DISTINCT","DIV","DOUBLE_SHA1_HASH","DOUBLE_SHA1_PASSWORD","DROP","ELSE","ENABLED","END","ENFORCED","ENGINE","ENGINES","EPHEMERAL","ESTIMATE","EVENT","EVENTS","EVERY","EXCEPT","EXCHANGE","EXISTS","EXPLAIN","EXPRESSION","EXTENDED","EXTERNAL","FAKE","FALSE","FETCH","FIELDS","FILESYSTEM","FILL","FILTER","FINAL","FIRST","FOLLOWING","FOR","FOREIGN","FORMAT","FREEZE","FROM","FULL","FULLTEXT","FUNCTION","FUNCTIONS","GLOBAL","GRANT","GRANTEES","GRANTS","GRANULARITY","GROUP","GROUPING","GROUPS","H","HASH","HAVING","HDFS","HH","HIERARCHICAL","HOST","HOURS","HTTP","IDENTIFIED","ILIKE","IN","INDEX","INDEXES","INDICES","INFILE","INHERIT","INJECTIVE","INNER","INSERT","INTERPOLATE","INTERSECT","INTERVAL","INTO","INVISIBLE","INVOKER","IP","IS","IS_OBJECT_ID","JOIN","JWT","KERBEROS","KEY","KEYED","KEYS","KILL","KIND","LARGE","LAST","LAYOUT","LDAP","LEADING","LEVEL","LIFETIME","LIGHTWEIGHT","LIKE","LIMIT","LIMITS","LINEAR","LIST","LIVE","LOCAL","M","MASK","MATERIALIZED","MCS","MEMORY","MERGES","METRICS","MI","MICROSECOND","MICROSECONDS","MILLISECONDS","MINUTES","MM","MODIFY","MONTHS","MOVE","MS","MUTATION","N","NAME","NAMED","NANOSECOND","NANOSECONDS","NEXT","NO","NONE","NOT","NO_PASSWORD","NS","NULL","NULLS","OBJECT","OFFSET","ON","ONLY","OPTIMIZE","OPTION","OR","ORDER","OUTER","OUTFILE","OVER","OVERRIDABLE","OVERRIDE","PART","PARTIAL","PARTITION","PARTITIONS","PART_MOVE_TO_SHARD","PASTE","PERIODIC","PERMANENTLY","PERMISSIVE","PERSISTENT","PIPELINE","PLAINTEXT_PASSWORD","PLAN","POLICY","POPULATE","PRECEDING","PRECISION","PREWHERE","PRIMARY","PRIVILEGES","PROCESSLIST","PROFILE","PROJECTION","PROTOBUF","PULL","Q","QQ","QUALIFY","QUARTERS","QUERY","QUOTA","RANDOMIZE","RANDOMIZED","RANGE","READONLY","REALM","RECOMPRESS","RECURSIVE","REFERENCES","REFRESH","REGEXP","REMOVE","RENAME","RESET","RESPECT","RESTORE","RESTRICT","RESTRICTIVE","RESUME","REVOKE","ROLE","ROLES","ROLLBACK","ROLLUP","ROW","ROWS","S","S3","SALT","SAMPLE","SAN","SCHEME","SECONDS","SECURITY","SELECT","SEMI","SEQUENTIAL","SERVER","SET","SETS","SETTING","SETTINGS","SHA256_HASH","SHA256_PASSWORD","SHARD","SHOW","SIGNED","SIMPLE","SNAPSHOT","SOURCE","SPATIAL","SQL","SQL_TSI_DAY","SQL_TSI_HOUR","SQL_TSI_MICROSECOND","SQL_TSI_MILLISECOND","SQL_TSI_MINUTE","SQL_TSI_MONTH","SQL_TSI_NANOSECOND","SQL_TSI_QUARTER","SQL_TSI_SECOND","SQL_TSI_WEEK","SQL_TSI_YEAR","SS","SSH_KEY","SSL_CERTIFICATE","STALENESS","START","STATISTICS","STDOUT","STEP","STORAGE","STRICT","STRICTLY_ASCENDING","SUBPARTITION","SUBPARTITIONS","SUSPEND","SYNC","SYNTAX","SYSTEM","TABLE","TABLES","TAGS","TEMPORARY","TEST","THAN","THEN","TIES","TIME","TO","TOP","TOTALS","TRACKING","TRAILING","TRANSACTION","TREE","TRIGGER","TRUE","TRUNCATE","TTL","TYPE","TYPEOF","UNBOUNDED","UNDROP","UNFREEZE","UNION","UNIQUE","UNSET","UNSIGNED","UNTIL","UPDATE","URL","USE","USING","UUID","VALID","VALUES","VARYING","VIEW","VISIBLE","VOLUME","WATCH","WATERMARK","WEEKS","WHEN","WHERE","WINDOW","WITH","WITH_ITEMINDEX","WK","WRITABLE","WW","YEARS","YY","YYYY","ZKPATH"],T.dataTypes=["AGGREGATEFUNCTION","ARRAY","BFLOAT16","BIGINT","BIGINT SIGNED","BIGINT UNSIGNED","BINARY","BINARY LARGE OBJECT","BINARY VARYING","BIT","BLOB","BYTE","BYTEA","BOOL","CHAR","CHAR LARGE OBJECT","CHAR VARYING","CHARACTER","CHARACTER LARGE OBJECT","CHARACTER VARYING","CLOB","DEC","DOUBLE","DOUBLE PRECISION","DATE","DATE32","DATETIME","DATETIME32","DATETIME64","DECIMAL","DECIMAL128","DECIMAL256","DECIMAL32","DECIMAL64","DYNAMIC","ENUM","ENUM","ENUM16","ENUM8","FIXED","FLOAT","FIXEDSTRING","FLOAT32","FLOAT64","GEOMETRY","INET4","INET6","INT","INT SIGNED","INT UNSIGNED","INT1","INT1 SIGNED","INT1 UNSIGNED","INTEGER","INTEGER SIGNED","INTEGER UNSIGNED","IPV4","IPV6","INT128","INT16","INT256","INT32","INT64","INT8","INTERVALDAY","INTERVALHOUR","INTERVALMICROSECOND","INTERVALMILLISECOND","INTERVALMINUTE","INTERVALMONTH","INTERVALNANOSECOND","INTERVALQUARTER","INTERVALSECOND","INTERVALWEEK","INTERVALYEAR","JSON","LONGBLOB","LONGTEXT","LINESTRING","LOWCARDINALITY","MEDIUMBLOB","MEDIUMINT","MEDIUMINT SIGNED","MEDIUMINT UNSIGNED","MEDIUMTEXT","MAP","MULTILINESTRING","MULTIPOLYGON","NATIONAL CHAR","NATIONAL CHAR VARYING","NATIONAL CHARACTER","NATIONAL CHARACTER LARGE OBJECT","NATIONAL CHARACTER VARYING","NCHAR","NCHAR LARGE OBJECT","NCHAR VARYING","NUMERIC","NVARCHAR","NESTED","NOTHING","NULLABLE","OBJECT","POINT","POLYGON","REAL","RING","SET","SIGNED","SINGLE","SMALLINT","SMALLINT SIGNED","SMALLINT UNSIGNED","SIMPLEAGGREGATEFUNCTION","STRING","TEXT","TIMESTAMP","TINYBLOB","TINYINT","TINYINT SIGNED","TINYINT UNSIGNED","TINYTEXT","TIME","TIME64","TUPLE","UINT128","UINT16","UINT256","UINT32","UINT64","UINT8","UNSIGNED","UUID","VARBINARY","VARCHAR","VARCHAR2","VARIANT","YEAR","BOOL","BOOLEAN"]},1718:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.db2=void 0;const A=R(7163),e=R(6609),S=R(5246),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH","FROM","WHERE","GROUP BY","HAVING","PARTITION BY","ORDER BY [INPUT SEQUENCE]","LIMIT","OFFSET","FETCH NEXT","FOR UPDATE [OF]","FOR {READ | FETCH} ONLY","FOR {RR | CS | UR | RS} [USE AND KEEP {SHARE | UPDATE | EXCLUSIVE} LOCKS]","WAIT FOR OUTCOME","SKIP LOCKED DATA","INTO","INSERT INTO","VALUES","SET","MERGE INTO","WHEN [NOT] MATCHED [THEN]","UPDATE SET","INSERT"]),N=(0,A.expandPhrases)(["CREATE [GLOBAL TEMPORARY | EXTERNAL] TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] VIEW","UPDATE","WHERE CURRENT OF","WITH {RR | RS | CS | UR}","DELETE FROM","DROP TABLE [IF EXISTS]","ALTER TABLE","ADD [COLUMN]","DROP [COLUMN]","RENAME COLUMN","ALTER [COLUMN]","SET DATA TYPE","SET NOT NULL","DROP {DEFAULT | GENERATED | NOT NULL}","TRUNCATE [TABLE]","ALLOCATE","ALTER AUDIT POLICY","ALTER BUFFERPOOL","ALTER DATABASE PARTITION GROUP","ALTER DATABASE","ALTER EVENT MONITOR","ALTER FUNCTION","ALTER HISTOGRAM TEMPLATE","ALTER INDEX","ALTER MASK","ALTER METHOD","ALTER MODULE","ALTER NICKNAME","ALTER PACKAGE","ALTER PERMISSION","ALTER PROCEDURE","ALTER SCHEMA","ALTER SECURITY LABEL COMPONENT","ALTER SECURITY POLICY","ALTER SEQUENCE","ALTER SERVER","ALTER SERVICE CLASS","ALTER STOGROUP","ALTER TABLESPACE","ALTER THRESHOLD","ALTER TRIGGER","ALTER TRUSTED CONTEXT","ALTER TYPE","ALTER USAGE LIST","ALTER USER MAPPING","ALTER VIEW","ALTER WORK ACTION SET","ALTER WORK CLASS SET","ALTER WORKLOAD","ALTER WRAPPER","ALTER XSROBJECT","ALTER STOGROUP","ALTER TABLESPACE","ALTER TRIGGER","ALTER TRUSTED CONTEXT","ALTER VIEW","ASSOCIATE [RESULT SET] {LOCATOR | LOCATORS}","AUDIT","BEGIN DECLARE SECTION","CALL","CLOSE","COMMENT ON","COMMIT [WORK]","CONNECT","CREATE [OR REPLACE] [PUBLIC] ALIAS","CREATE AUDIT POLICY","CREATE BUFFERPOOL","CREATE DATABASE PARTITION GROUP","CREATE EVENT MONITOR","CREATE [OR REPLACE] FUNCTION","CREATE FUNCTION MAPPING","CREATE HISTOGRAM TEMPLATE","CREATE [UNIQUE] INDEX","CREATE INDEX EXTENSION","CREATE [OR REPLACE] MASK","CREATE [SPECIFIC] METHOD","CREATE [OR REPLACE] MODULE","CREATE [OR REPLACE] NICKNAME","CREATE [OR REPLACE] PERMISSION","CREATE [OR REPLACE] PROCEDURE","CREATE ROLE","CREATE SCHEMA","CREATE SECURITY LABEL [COMPONENT]","CREATE SECURITY POLICY","CREATE [OR REPLACE] SEQUENCE","CREATE SERVICE CLASS","CREATE SERVER","CREATE STOGROUP","CREATE SYNONYM","CREATE [LARGE | REGULAR | {SYSTEM | USER} TEMPORARY] TABLESPACE","CREATE THRESHOLD","CREATE {TRANSFORM | TRANSFORMS} FOR","CREATE [OR REPLACE] TRIGGER","CREATE TRUSTED CONTEXT","CREATE [OR REPLACE] TYPE","CREATE TYPE MAPPING","CREATE USAGE LIST","CREATE USER MAPPING FOR","CREATE [OR REPLACE] VARIABLE","CREATE WORK ACTION SET","CREATE WORK CLASS SET","CREATE WORKLOAD","CREATE WRAPPER","DECLARE","DECLARE GLOBAL TEMPORARY TABLE","DESCRIBE [INPUT | OUTPUT]","DISCONNECT","DROP [PUBLIC] ALIAS","DROP AUDIT POLICY","DROP BUFFERPOOL","DROP DATABASE PARTITION GROUP","DROP EVENT MONITOR","DROP [SPECIFIC] FUNCTION","DROP FUNCTION MAPPING","DROP HISTOGRAM TEMPLATE","DROP INDEX [EXTENSION]","DROP MASK","DROP [SPECIFIC] METHOD","DROP MODULE","DROP NICKNAME","DROP PACKAGE","DROP PERMISSION","DROP [SPECIFIC] PROCEDURE","DROP ROLE","DROP SCHEMA","DROP SECURITY LABEL [COMPONENT]","DROP SECURITY POLICY","DROP SEQUENCE","DROP SERVER","DROP SERVICE CLASS","DROP STOGROUP","DROP TABLE HIERARCHY","DROP {TABLESPACE | TABLESPACES}","DROP {TRANSFORM | TRANSFORMS}","DROP THRESHOLD","DROP TRIGGER","DROP TRUSTED CONTEXT","DROP TYPE [MAPPING]","DROP USAGE LIST","DROP USER MAPPING FOR","DROP VARIABLE","DROP VIEW [HIERARCHY]","DROP WORK {ACTION | CLASS} SET","DROP WORKLOAD","DROP WRAPPER","DROP XSROBJECT","END DECLARE SECTION","EXECUTE [IMMEDIATE]","EXPLAIN {PLAN [SECTION] | ALL}","FETCH [FROM]","FLUSH {BUFFERPOOL | BUFFERPOOLS} ALL","FLUSH EVENT MONITOR","FLUSH FEDERATED CACHE","FLUSH OPTIMIZATION PROFILE CACHE","FLUSH PACKAGE CACHE [DYNAMIC]","FLUSH AUTHENTICATION CACHE [FOR ALL]","FREE LOCATOR","GET DIAGNOSTICS","GOTO","GRANT","INCLUDE","ITERATE","LEAVE","LOCK TABLE","LOOP","OPEN","PIPE","PREPARE","REFRESH TABLE","RELEASE","RELEASE [TO] SAVEPOINT","RENAME [TABLE | INDEX | STOGROUP | TABLESPACE]","REPEAT","RESIGNAL","RETURN","REVOKE","ROLLBACK [WORK] [TO SAVEPOINT]","SAVEPOINT","SET COMPILATION ENVIRONMENT","SET CONNECTION","SET CURRENT","SET ENCRYPTION PASSWORD","SET EVENT MONITOR STATE","SET INTEGRITY","SET PASSTHRU","SET PATH","SET ROLE","SET SCHEMA","SET SERVER OPTION","SET {SESSION AUTHORIZATION | SESSION_USER}","SET USAGE LIST","SIGNAL","TRANSFER OWNERSHIP OF","WHENEVER {NOT FOUND | SQLERROR | SQLWARNING}","WHILE"]),L=(0,A.expandPhrases)(["UNION [ALL]","EXCEPT [ALL]","INTERSECT [ALL]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN"]),_=(0,A.expandPhrases)(["ON DELETE","ON UPDATE","SET NULL","{ROWS | RANGE} BETWEEN"]),s=(0,A.expandPhrases)([]);T.db2={name:"db2",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,extraParens:["[]"],stringTypes:[{quote:"''-qq",prefixes:["G","N","U&"]},{quote:"''-raw",prefixes:["X","BX","GX","UX"],requirePrefix:!0}],identTypes:['""-qq'],identChars:{first:"@#$",rest:"@#$"},paramTypes:{positional:!0,named:[":"]},paramChars:{first:"@#$",rest:"@#$"},operators:["**","%","|","&","^","~","¬=","¬>","¬<","!>","!<","^=","^>","^<","||","->","=>"]},formatOptions:{onelineClauses:[...N,...t],tabularOnelineClauses:t}}},6609:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ARRAY_AGG","AVG","CORRELATION","COUNT","COUNT_BIG","COVARIANCE","COVARIANCE_SAMP","CUME_DIST","GROUPING","LISTAGG","MAX","MEDIAN","MIN","PERCENTILE_CONT","PERCENTILE_DISC","PERCENT_RANK","REGR_AVGX","REGR_AVGY","REGR_COUNT","REGR_INTERCEPT","REGR_ICPT","REGR_R2","REGR_SLOPE","REGR_SXX","REGR_SXY","REGR_SYY","STDDEV","STDDEV_SAMP","SUM","VARIANCE","VARIANCE_SAMP","XMLAGG","XMLGROUP","ABS","ABSVAL","ACOS","ADD_DAYS","ADD_HOURS","ADD_MINUTES","ADD_MONTHS","ADD_SECONDS","ADD_YEARS","AGE","ARRAY_DELETE","ARRAY_FIRST","ARRAY_LAST","ARRAY_NEXT","ARRAY_PRIOR","ASCII","ASCII_STR","ASIN","ATAN","ATAN2","ATANH","BITAND","BITANDNOT","BITOR","BITXOR","BITNOT","BPCHAR","BSON_TO_JSON","BTRIM","CARDINALITY","CEILING","CEIL","CHARACTER_LENGTH","CHR","COALESCE","COLLATION_KEY","COLLATION_KEY_BIT","COMPARE_DECFLOAT","CONCAT","COS","COSH","COT","CURSOR_ROWCOUNT","DATAPARTITIONNUM","DATE_PART","DATE_TRUNC","DAY","DAYNAME","DAYOFMONTH","DAYOFWEEK","DAYOFWEEK_ISO","DAYOFYEAR","DAYS","DAYS_BETWEEN","DAYS_TO_END_OF_MONTH","DBPARTITIONNUM","DECFLOAT","DECFLOAT_FORMAT","DECODE","DECRYPT_BIN","DECRYPT_CHAR","DEGREES","DEREF","DIFFERENCE","DIGITS","DOUBLE_PRECISION","EMPTY_BLOB","EMPTY_CLOB","EMPTY_DBCLOB","EMPTY_NCLOB","ENCRYPT","EVENT_MON_STATE","EXP","EXTRACT","FIRST_DAY","FLOOR","FROM_UTC_TIMESTAMP","GENERATE_UNIQUE","GETHINT","GREATEST","HASH","HASH4","HASH8","HASHEDVALUE","HEX","HEXTORAW","HOUR","HOURS_BETWEEN","IDENTITY_VAL_LOCAL","IFNULL","INITCAP","INSERT","INSTR","INSTR2","INSTR4","INSTRB","INTNAND","INTNOR","INTNXOR","INTNNOT","ISNULL","JSON_ARRAY","JSON_OBJECT","JSON_QUERY","JSON_TO_BSON","JSON_VALUE","JULIAN_DAY","LAST_DAY","LCASE","LEAST","LEFT","LENGTH","LENGTH2","LENGTH4","LENGTHB","LN","LOCATE","LOCATE_IN_STRING","LOG10","LONG_VARCHAR","LONG_VARGRAPHIC","LOWER","LPAD","LTRIM","MAX","MAX_CARDINALITY","MICROSECOND","MIDNIGHT_SECONDS","MIN","MINUTE","MINUTES_BETWEEN","MOD","MONTH","MONTHNAME","MONTHS_BETWEEN","MULTIPLY_ALT","NEXT_DAY","NEXT_MONTH","NEXT_QUARTER","NEXT_WEEK","NEXT_YEAR","NORMALIZE_DECFLOAT","NOW","NULLIF","NVL","NVL2","OCTET_LENGTH","OVERLAY","PARAMETER","POSITION","POSSTR","POW","POWER","QUANTIZE","QUARTER","QUOTE_IDENT","QUOTE_LITERAL","RADIANS","RAISE_ERROR","RAND","RANDOM","RAWTOHEX","REC2XML","REGEXP_COUNT","REGEXP_EXTRACT","REGEXP_INSTR","REGEXP_LIKE","REGEXP_MATCH_COUNT","REGEXP_REPLACE","REGEXP_SUBSTR","REPEAT","REPLACE","RID","RID_BIT","RIGHT","ROUND","ROUND_TIMESTAMP","RPAD","RTRIM","SECLABEL","SECLABEL_BY_NAME","SECLABEL_TO_CHAR","SECOND","SECONDS_BETWEEN","SIGN","SIN","SINH","SOUNDEX","SPACE","SQRT","STRIP","STRLEFT","STRPOS","STRRIGHT","SUBSTR","SUBSTR2","SUBSTR4","SUBSTRB","SUBSTRING","TABLE_NAME","TABLE_SCHEMA","TAN","TANH","THIS_MONTH","THIS_QUARTER","THIS_WEEK","THIS_YEAR","TIMESTAMP_FORMAT","TIMESTAMP_ISO","TIMESTAMPDIFF","TIMEZONE","TO_CHAR","TO_CLOB","TO_DATE","TO_HEX","TO_MULTI_BYTE","TO_NCHAR","TO_NCLOB","TO_NUMBER","TO_SINGLE_BYTE","TO_TIMESTAMP","TO_UTC_TIMESTAMP","TOTALORDER","TRANSLATE","TRIM","TRIM_ARRAY","TRUNC_TIMESTAMP","TRUNCATE","TRUNC","TYPE_ID","TYPE_NAME","TYPE_SCHEMA","UCASE","UNICODE_STR","UPPER","VALUE","VARCHAR_BIT_FORMAT","VARCHAR_FORMAT","VARCHAR_FORMAT_BIT","VERIFY_GROUP_FOR_USER","VERIFY_ROLE_FOR_USER","VERIFY_TRUSTED_CONTEXT_ROLE_FOR_USER","WEEK","WEEK_ISO","WEEKS_BETWEEN","WIDTH_BUCKET","XMLATTRIBUTES","XMLCOMMENT","XMLCONCAT","XMLDOCUMENT","XMLELEMENT","XMLFOREST","XMLNAMESPACES","XMLPARSE","XMLPI","XMLQUERY","XMLROW","XMLSERIALIZE","XMLTEXT","XMLVALIDATE","XMLXSROBJECTID","XSLTRANSFORM","YEAR","YEARS_BETWEEN","YMD_BETWEEN","BASE_TABLE","JSON_TABLE","UNNEST","XMLTABLE","RANK","DENSE_RANK","NTILE","LAG","LEAD","ROW_NUMBER","FIRST_VALUE","LAST_VALUE","NTH_VALUE","RATIO_TO_REPORT","CAST"]},5246:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ACTIVATE","ADD","AFTER","ALIAS","ALL","ALLOCATE","ALLOW","ALTER","AND","ANY","AS","ASENSITIVE","ASSOCIATE","ASUTIME","AT","ATTRIBUTES","AUDIT","AUTHORIZATION","AUX","AUXILIARY","BEFORE","BEGIN","BETWEEN","BINARY","BUFFERPOOL","BY","CACHE","CALL","CALLED","CAPTURE","CARDINALITY","CASCADED","CASE","CAST","CHECK","CLONE","CLOSE","CLUSTER","COLLECTION","COLLID","COLUMN","COMMENT","COMMIT","CONCAT","CONDITION","CONNECT","CONNECTION","CONSTRAINT","CONTAINS","CONTINUE","COUNT","COUNT_BIG","CREATE","CROSS","CURRENT","CURRENT_DATE","CURRENT_LC_CTYPE","CURRENT_PATH","CURRENT_SCHEMA","CURRENT_SERVER","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_TIMEZONE","CURRENT_USER","CURSOR","CYCLE","DATA","DATABASE","DATAPARTITIONNAME","DATAPARTITIONNUM","DAY","DAYS","DB2GENERAL","DB2GENRL","DB2SQL","DBINFO","DBPARTITIONNAME","DBPARTITIONNUM","DEALLOCATE","DECLARE","DEFAULT","DEFAULTS","DEFINITION","DELETE","DENSERANK","DENSE_RANK","DESCRIBE","DESCRIPTOR","DETERMINISTIC","DIAGNOSTICS","DISABLE","DISALLOW","DISCONNECT","DISTINCT","DO","DOCUMENT","DROP","DSSIZE","DYNAMIC","EACH","EDITPROC","ELSE","ELSEIF","ENABLE","ENCODING","ENCRYPTION","END","END-EXEC","ENDING","ERASE","ESCAPE","EVERY","EXCEPT","EXCEPTION","EXCLUDING","EXCLUSIVE","EXECUTE","EXISTS","EXIT","EXPLAIN","EXTENDED","EXTERNAL","EXTRACT","FENCED","FETCH","FIELDPROC","FILE","FINAL","FIRST1","FOR","FOREIGN","FREE","FROM","FULL","FUNCTION","GENERAL","GENERATED","GET","GLOBAL","GO","GOTO","GRANT","GRAPHIC","GROUP","HANDLER","HASH","HASHED_VALUE","HAVING","HINT","HOLD","HOUR","HOURS","IDENTITY","IF","IMMEDIATE","IMPORT","IN","INCLUDING","INCLUSIVE","INCREMENT","INDEX","INDICATOR","INDICATORS","INF","INFINITY","INHERIT","INNER","INOUT","INSENSITIVE","INSERT","INTEGRITY","INTERSECT","INTO","IS","ISNULL","ISOBID","ISOLATION","ITERATE","JAR","JAVA","JOIN","KEEP","KEY","LABEL","LANGUAGE","LAST3","LATERAL","LC_CTYPE","LEAVE","LEFT","LIKE","LIMIT","LINKTYPE","LOCAL","LOCALDATE","LOCALE","LOCALTIME","LOCALTIMESTAMP","LOCATOR","LOCATORS","LOCK","LOCKMAX","LOCKSIZE","LOOP","MAINTAINED","MATERIALIZED","MAXVALUE","MICROSECOND","MICROSECONDS","MINUTE","MINUTES","MINVALUE","MODE","MODIFIES","MONTH","MONTHS","NAN","NEW","NEW_TABLE","NEXTVAL","NO","NOCACHE","NOCYCLE","NODENAME","NODENUMBER","NOMAXVALUE","NOMINVALUE","NONE","NOORDER","NORMALIZED","NOT2","NOTNULL","NULL","NULLS","NUMPARTS","OBID","OF","OFF","OFFSET","OLD","OLD_TABLE","ON","OPEN","OPTIMIZATION","OPTIMIZE","OPTION","OR","ORDER","OUT","OUTER","OVER","OVERRIDING","PACKAGE","PADDED","PAGESIZE","PARAMETER","PART","PARTITION","PARTITIONED","PARTITIONING","PARTITIONS","PASSWORD","PATH","PERCENT","PIECESIZE","PLAN","POSITION","PRECISION","PREPARE","PREVVAL","PRIMARY","PRIQTY","PRIVILEGES","PROCEDURE","PROGRAM","PSID","PUBLIC","QUERY","QUERYNO","RANGE","RANK","READ","READS","RECOVERY","REFERENCES","REFERENCING","REFRESH","RELEASE","RENAME","REPEAT","RESET","RESIGNAL","RESTART","RESTRICT","RESULT","RESULT_SET_LOCATOR","RETURN","RETURNS","REVOKE","RIGHT","ROLE","ROLLBACK","ROUND_CEILING","ROUND_DOWN","ROUND_FLOOR","ROUND_HALF_DOWN","ROUND_HALF_EVEN","ROUND_HALF_UP","ROUND_UP","ROUTINE","ROW","ROWNUMBER","ROWS","ROWSET","ROW_NUMBER","RRN","RUN","SAVEPOINT","SCHEMA","SCRATCHPAD","SCROLL","SEARCH","SECOND","SECONDS","SECQTY","SECURITY","SELECT","SENSITIVE","SEQUENCE","SESSION","SESSION_USER","SET","SIGNAL","SIMPLE","SNAN","SOME","SOURCE","SPECIFIC","SQL","SQLID","STACKED","STANDARD","START","STARTING","STATEMENT","STATIC","STATMENT","STAY","STOGROUP","STORES","STYLE","SUBSTRING","SUMMARY","SYNONYM","SYSFUN","SYSIBM","SYSPROC","SYSTEM","SYSTEM_USER","TABLE","TABLESPACE","THEN","TO","TRANSACTION","TRIGGER","TRIM","TRUNCATE","TYPE","UNDO","UNION","UNIQUE","UNTIL","UPDATE","USAGE","USER","USING","VALIDPROC","VALUE","VALUES","VARIABLE","VARIANT","VCAT","VERSION","VIEW","VOLATILE","VOLUMES","WHEN","WHENEVER","WHERE","WHILE","WITH","WITHOUT","WLM","WRITE","XMLELEMENT","XMLEXISTS","XMLNAMESPACES","YEAR","YEARS"],T.dataTypes=["ARRAY","BIGINT","BINARY","BLOB","BOOLEAN","CCSID","CHAR","CHARACTER","CLOB","DATE","DATETIME","DBCLOB","DEC","DECIMAL","DOUBLE","DOUBLE PRECISION","FLOAT","FLOAT4","FLOAT8","GRAPHIC","INT","INT2","INT4","INT8","INTEGER","INTERVAL","LONG VARCHAR","LONG VARGRAPHIC","NCHAR","NCHR","NCLOB","NVARCHAR","NUMERIC","SMALLINT","REAL","TIME","TIMESTAMP","VARBINARY","VARCHAR","VARGRAPHIC"]},9274:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.db2i=void 0;const A=R(7163),e=R(7237),S=R(7002),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","INTO","FROM","WHERE","GROUP BY","HAVING","PARTITION BY","ORDER [SIBLINGS] BY [INPUT SEQUENCE]","LIMIT","OFFSET","FETCH {FIRST | NEXT}","FOR UPDATE [OF]","FOR READ ONLY","OPTIMIZE FOR","INSERT INTO","VALUES","SET","MERGE INTO","WHEN [NOT] MATCHED [THEN]","UPDATE SET","DELETE","INSERT","FOR SYSTEM NAME"]),N=(0,A.expandPhrases)(["CREATE [OR REPLACE] TABLE"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [RECURSIVE] VIEW","UPDATE","WHERE CURRENT OF","WITH {NC | RR | RS | CS | UR}","DELETE FROM","DROP TABLE","ALTER TABLE","ADD [COLUMN]","ALTER [COLUMN]","DROP [COLUMN]","SET DATA TYPE","SET {GENERATED ALWAYS | GENERATED BY DEFAULT}","SET NOT NULL","SET {NOT HIDDEN | IMPLICITLY HIDDEN}","SET FIELDPROC","DROP {DEFAULT | NOT NULL | GENERATED | IDENTITY | ROW CHANGE TIMESTAMP | FIELDPROC}","TRUNCATE [TABLE]","SET [CURRENT] SCHEMA","SET CURRENT_SCHEMA","ALLOCATE CURSOR","ALLOCATE [SQL] DESCRIPTOR [LOCAL | GLOBAL] SQL","ALTER [SPECIFIC] {FUNCTION | PROCEDURE}","ALTER {MASK | PERMISSION | SEQUENCE | TRIGGER}","ASSOCIATE [RESULT SET] {LOCATOR | LOCATORS}","BEGIN DECLARE SECTION","CALL","CLOSE","COMMENT ON {ALIAS | COLUMN | CONSTRAINT | INDEX | MASK | PACKAGE | PARAMETER | PERMISSION | SEQUENCE | TABLE | TRIGGER | VARIABLE | XSROBJECT}","COMMENT ON [SPECIFIC] {FUNCTION | PROCEDURE | ROUTINE}","COMMENT ON PARAMETER SPECIFIC {FUNCTION | PROCEDURE | ROUTINE}","COMMENT ON [TABLE FUNCTION] RETURN COLUMN","COMMENT ON [TABLE FUNCTION] RETURN COLUMN SPECIFIC [PROCEDURE | ROUTINE]","COMMIT [WORK] [HOLD]","CONNECT [TO | RESET] USER","CREATE [OR REPLACE] {ALIAS | FUNCTION | MASK | PERMISSION | PROCEDURE | SEQUENCE | TRIGGER | VARIABLE}","CREATE [ENCODED VECTOR] INDEX","CREATE UNIQUE [WHERE NOT NULL] INDEX","CREATE SCHEMA","CREATE TYPE","DEALLOCATE [SQL] DESCRIPTOR [LOCAL | GLOBAL]","DECLARE CURSOR","DECLARE GLOBAL TEMPORARY TABLE","DECLARE","DESCRIBE CURSOR","DESCRIBE INPUT","DESCRIBE [OUTPUT]","DESCRIBE {PROCEDURE | ROUTINE}","DESCRIBE TABLE","DISCONNECT ALL [SQL]","DISCONNECT [CURRENT]","DROP {ALIAS | INDEX | MASK | PACKAGE | PERMISSION | SCHEMA | SEQUENCE | TABLE | TYPE | VARIABLE | XSROBJECT} [IF EXISTS]","DROP [SPECIFIC] {FUNCTION | PROCEDURE | ROUTINE} [IF EXISTS]","END DECLARE SECTION","EXECUTE [IMMEDIATE]","FREE LOCATOR","GET [SQL] DESCRIPTOR [LOCAL | GLOBAL]","GET [CURRENT | STACKED] DIAGNOSTICS","GRANT {ALL [PRIVILEGES] | ALTER | EXECUTE} ON {FUNCTION | PROCEDURE | ROUTINE | PACKAGE | SCHEMA | SEQUENCE | TABLE | TYPE | VARIABLE | XSROBJECT}","HOLD LOCATOR","INCLUDE","LABEL ON {ALIAS | COLUMN | CONSTRAINT | INDEX | MASK | PACKAGE | PERMISSION | SEQUENCE | TABLE | TRIGGER | VARIABLE | XSROBJECT}","LABEL ON [SPECIFIC] {FUNCTION | PROCEDURE | ROUTINE}","LOCK TABLE","OPEN","PREPARE","REFRESH TABLE","RELEASE","RELEASE [TO] SAVEPOINT","RENAME [TABLE | INDEX] TO","REVOKE {ALL [PRIVILEGES] | ALTER | EXECUTE} ON {FUNCTION | PROCEDURE | ROUTINE | PACKAGE | SCHEMA | SEQUENCE | TABLE | TYPE | VARIABLE | XSROBJECT}","ROLLBACK [WORK] [HOLD | TO SAVEPOINT]","SAVEPOINT","SET CONNECTION","SET CURRENT {DEBUG MODE | DECFLOAT ROUNDING MODE | DEGREE | IMPLICIT XMLPARSE OPTION | TEMPORAL SYSTEM_TIME}","SET [SQL] DESCRIPTOR [LOCAL | GLOBAL]","SET ENCRYPTION PASSWORD","SET OPTION","SET {[CURRENT [FUNCTION]] PATH | CURRENT_PATH}","SET RESULT SETS [WITH RETURN [TO CALLER | TO CLIENT]]","SET SESSION AUTHORIZATION","SET SESSION_USER","SET TRANSACTION","SIGNAL SQLSTATE [VALUE]","TAG","TRANSFER OWNERSHIP OF","WHENEVER {NOT FOUND | SQLERROR | SQLWARNING}"]),L=(0,A.expandPhrases)(["UNION [ALL]","EXCEPT [ALL]","INTERSECT [ALL]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","[LEFT | RIGHT] EXCEPTION JOIN","{INNER | CROSS} JOIN"]),_=(0,A.expandPhrases)(["ON DELETE","ON UPDATE","SET NULL","{ROWS | RANGE} BETWEEN"]),s=(0,A.expandPhrases)([]);T.db2i={name:"db2i",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,nestedBlockComments:!0,extraParens:["[]"],stringTypes:[{quote:"''-qq",prefixes:["G","N"]},{quote:"''-raw",prefixes:["X","BX","GX","UX"],requirePrefix:!0}],identTypes:['""-qq'],identChars:{first:"@#$",rest:"@#$"},paramTypes:{positional:!0,named:[":"]},paramChars:{first:"@#$",rest:"@#$"},operators:["**","¬=","¬>","¬<","!>","!<","||","=>"]},formatOptions:{onelineClauses:[...N,...t],tabularOnelineClauses:t}}},7237:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ARRAY_AGG","AVG","CORR","CORRELATION","COUNT","COUNT_BIG","COVAR_POP","COVARIANCE","COVAR","COVAR_SAMP","COVARIANCE_SAMP","EVERY","GROUPING","JSON_ARRAYAGG","JSON_OBJECTAGG","LISTAGG","MAX","MEDIAN","MIN","PERCENTILE_CONT","PERCENTILE_DISC","REGR_AVGX","REGR_AVGY","REGR_COUNT","REGR_INTERCEPT","REGR_R2","REGR_SLOPE","REGR_SXX","REGR_SXY","REGR_SYY","SOME","STDDEV_POP","STDDEV","STDDEV_SAMP","SUM","VAR_POP","VARIANCE","VAR","VAR_SAMP","VARIANCE_SAMP","XMLAGG","XMLGROUP","ABS","ABSVAL","ACOS","ADD_DAYS","ADD_HOURS","ADD_MINUTES","ADD_MONTHS","ADD_SECONDS","ADD_YEARS","ANTILOG","ARRAY_MAX_CARDINALITY","ARRAY_TRIM","ASCII","ASIN","ATAN","ATAN2","ATANH","BASE64_DECODE","BASE64_ENCODE","BIT_LENGTH","BITAND","BITANDNOT","BITNOT","BITOR","BITXOR","BSON_TO_JSON","CARDINALITY","CEIL","CEILING","CHAR_LENGTH","CHARACTER_LENGTH","CHR","COALESCE","COMPARE_DECFLOAT","CONCAT","CONTAINS","COS","COSH","COT","CURDATE","CURTIME","DATABASE","DATAPARTITIONNAME","DATAPARTITIONNUM","DAY","DAYNAME","DAYOFMONTH","DAYOFWEEK_ISO","DAYOFWEEK","DAYOFYEAR","DAYS","DBPARTITIONNAME","DBPARTITIONNUM","DECFLOAT_FORMAT","DECFLOAT_SORTKEY","DECRYPT_BINARY","DECRYPT_BIT","DECRYPT_CHAR","DECRYPT_DB","DEGREES","DIFFERENCE","DIGITS","DLCOMMENT","DLLINKTYPE","DLURLCOMPLETE","DLURLPATH","DLURLPATHONLY","DLURLSCHEME","DLURLSERVER","DLVALUE","DOUBLE_PRECISION","DOUBLE","ENCRPYT","ENCRYPT_AES","ENCRYPT_AES256","ENCRYPT_RC2","ENCRYPT_TDES","EXP","EXTRACT","FIRST_DAY","FLOOR","GENERATE_UNIQUE","GET_BLOB_FROM_FILE","GET_CLOB_FROM_FILE","GET_DBCLOB_FROM_FILE","GET_XML_FILE","GETHINT","GREATEST","HASH_MD5","HASH_ROW","HASH_SHA1","HASH_SHA256","HASH_SHA512","HASH_VALUES","HASHED_VALUE","HEX","HEXTORAW","HOUR","HTML_ENTITY_DECODE","HTML_ENTITY_ENCODE","HTTP_DELETE_BLOB","HTTP_DELETE","HTTP_GET_BLOB","HTTP_GET","HTTP_PATCH_BLOB","HTTP_PATCH","HTTP_POST_BLOB","HTTP_POST","HTTP_PUT_BLOB","HTTP_PUT","IDENTITY_VAL_LOCAL","IFNULL","INSERT","INSTR","INTERPRET","ISFALSE","ISNOTFALSE","ISNOTTRUE","ISTRUE","JSON_ARRAY","JSON_OBJECT","JSON_QUERY","JSON_TO_BSON","JSON_UPDATE","JSON_VALUE","JULIAN_DAY","LAND","LAST_DAY","LCASE","LEAST","LEFT","LENGTH","LN","LNOT","LOCATE_IN_STRING","LOCATE","LOG10","LOR","LOWER","LPAD","LTRIM","MAX_CARDINALITY","MAX","MICROSECOND","MIDNIGHT_SECONDS","MIN","MINUTE","MOD","MONTH","MONTHNAME","MONTHS_BETWEEN","MQREAD","MQREADCLOB","MQRECEIVE","MQRECEIVECLOB","MQSEND","MULTIPLY_ALT","NEXT_DAY","NORMALIZE_DECFLOAT","NOW","NULLIF","NVL","OCTET_LENGTH","OVERLAY","PI","POSITION","POSSTR","POW","POWER","QUANTIZE","QUARTER","RADIANS","RAISE_ERROR","RANDOM","RAND","REGEXP_COUNT","REGEXP_INSTR","REGEXP_REPLACE","REGEXP_SUBSTR","REPEAT","REPLACE","RID","RIGHT","ROUND_TIMESTAMP","ROUND","RPAD","RRN","RTRIM","SCORE","SECOND","SIGN","SIN","SINH","SOUNDEX","SPACE","SQRT","STRIP","STRLEFT","STRPOS","STRRIGHT","SUBSTR","SUBSTRING","TABLE_NAME","TABLE_SCHEMA","TAN","TANH","TIMESTAMP_FORMAT","TIMESTAMP_ISO","TIMESTAMPDIFF_BIG","TIMESTAMPDIFF","TO_CHAR","TO_CLOB","TO_DATE","TO_NUMBER","TO_TIMESTAMP","TOTALORDER","TRANSLATE","TRIM_ARRAY","TRIM","TRUNC_TIMESTAMP","TRUNC","TRUNCATE","UCASE","UPPER","URL_DECODE","URL_ENCODE","VALUE","VARBINARY_FORMAT","VARCHAR_BIT_FORMAT","VARCHAR_FORMAT_BINARY","VARCHAR_FORMAT","VERIFY_GROUP_FOR_USER","WEEK_ISO","WEEK","WRAP","XMLATTRIBUTES","XMLCOMMENT","XMLCONCAT","XMLDOCUMENT","XMLELEMENT","XMLFOREST","XMLNAMESPACES","XMLPARSE","XMLPI","XMLROW","XMLSERIALIZE","XMLTEXT","XMLVALIDATE","XOR","XSLTRANSFORM","YEAR","ZONED","BASE_TABLE","HTTP_DELETE_BLOB_VERBOSE","HTTP_DELETE_VERBOSE","HTTP_GET_BLOB_VERBOSE","HTTP_GET_VERBOSE","HTTP_PATCH_BLOB_VERBOSE","HTTP_PATCH_VERBOSE","HTTP_POST_BLOB_VERBOSE","HTTP_POST_VERBOSE","HTTP_PUT_BLOB_VERBOSE","HTTP_PUT_VERBOSE","JSON_TABLE","MQREADALL","MQREADALLCLOB","MQRECEIVEALL","MQRECEIVEALLCLOB","XMLTABLE","UNPACK","CUME_DIST","DENSE_RANK","FIRST_VALUE","LAG","LAST_VALUE","LEAD","NTH_VALUE","NTILE","PERCENT_RANK","RANK","RATIO_TO_REPORT","ROW_NUMBER","CAST"]},7002:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ABSENT","ACCORDING","ACCTNG","ACTION","ACTIVATE","ADD","ALIAS","ALL","ALLOCATE","ALLOW","ALTER","AND","ANY","APPEND","APPLNAME","ARRAY","ARRAY_AGG","ARRAY_TRIM","AS","ASC","ASENSITIVE","ASSOCIATE","ATOMIC","ATTACH","ATTRIBUTES","AUTHORIZATION","AUTONOMOUS","BEFORE","BEGIN","BETWEEN","BIND","BSON","BUFFERPOOL","BY","CACHE","CALL","CALLED","CARDINALITY","CASE","CAST","CHECK","CL","CLOSE","CLUSTER","COLLECT","COLLECTION","COLUMN","COMMENT","COMMIT","COMPACT","COMPARISONS","COMPRESS","CONCAT","CONCURRENT","CONDITION","CONNECT","CONNECT_BY_ROOT","CONNECTION","CONSTANT","CONSTRAINT","CONTAINS","CONTENT","CONTINUE","COPY","COUNT","COUNT_BIG","CREATE","CREATEIN","CROSS","CUBE","CUME_DIST","CURRENT","CURRENT_DATE","CURRENT_PATH","CURRENT_SCHEMA","CURRENT_SERVER","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_TIMEZONE","CURRENT_USER","CURSOR","CYCLE","DATABASE","DATAPARTITIONNAME","DATAPARTITIONNUM","DAY","DAYS","DB2GENERAL","DB2GENRL","DB2SQL","DBINFO","DBPARTITIONNAME","DBPARTITIONNUM","DEACTIVATE","DEALLOCATE","DECLARE","DEFAULT","DEFAULTS","DEFER","DEFINE","DEFINITION","DELETE","DELETING","DENSE_RANK","DENSERANK","DESC","DESCRIBE","DESCRIPTOR","DETACH","DETERMINISTIC","DIAGNOSTICS","DISABLE","DISALLOW","DISCONNECT","DISTINCT","DO","DOCUMENT","DROP","DYNAMIC","EACH","ELSE","ELSEIF","EMPTY","ENABLE","ENCODING","ENCRYPTION","END","END-EXEC","ENDING","ENFORCED","ERROR","ESCAPE","EVERY","EXCEPT","EXCEPTION","EXCLUDING","EXCLUSIVE","EXECUTE","EXISTS","EXIT","EXTEND","EXTERNAL","EXTRACT","FALSE","FENCED","FETCH","FIELDPROC","FILE","FINAL","FIRST_VALUE","FOR","FOREIGN","FORMAT","FREE","FREEPAGE","FROM","FULL","FUNCTION","GBPCACHE","GENERAL","GENERATED","GET","GLOBAL","GO","GOTO","GRANT","GROUP","HANDLER","HASH","HASH_ROW","HASHED_VALUE","HAVING","HINT","HOLD","HOUR","HOURS","IDENTITY","IF","IGNORE","IMMEDIATE","IMPLICITLY","IN","INCLUDE","INCLUDING","INCLUSIVE","INCREMENT","INDEX","INDEXBP","INDICATOR","INF","INFINITY","INHERIT","INLINE","INNER","INOUT","INSENSITIVE","INSERT","INSERTING","INTEGRITY","INTERPRET","INTERSECT","INTO","IS","ISNULL","ISOLATION","ITERATE","JAVA","JOIN","JSON","JSON_ARRAY","JSON_ARRAYAGG","JSON_EXISTS","JSON_OBJECT","JSON_OBJECTAGG","JSON_QUERY","JSON_TABLE","JSON_VALUE","KEEP","KEY","KEYS","LABEL","LAG","LANGUAGE","LAST_VALUE","LATERAL","LEAD","LEAVE","LEFT","LEVEL2","LIKE","LIMIT","LINKTYPE","LISTAGG","LOCAL","LOCALDATE","LOCALTIME","LOCALTIMESTAMP","LOCATION","LOCATOR","LOCK","LOCKSIZE","LOG","LOGGED","LOOP","MAINTAINED","MASK","MATCHED","MATERIALIZED","MAXVALUE","MERGE","MICROSECOND","MICROSECONDS","MINPCTUSED","MINUTE","MINUTES","MINVALUE","MIRROR","MIXED","MODE","MODIFIES","MONTH","MONTHS","NAMESPACE","NAN","NATIONAL","NCHAR","NCLOB","NESTED","NEW","NEW_TABLE","NEXTVAL","NO","NOCACHE","NOCYCLE","NODENAME","NODENUMBER","NOMAXVALUE","NOMINVALUE","NONE","NOORDER","NORMALIZED","NOT","NOTNULL","NTH_VALUE","NTILE","NULL","NULLS","NVARCHAR","OBID","OBJECT","OF","OFF","OFFSET","OLD","OLD_TABLE","OMIT","ON","ONLY","OPEN","OPTIMIZE","OPTION","OR","ORDER","ORDINALITY","ORGANIZE","OUT","OUTER","OVER","OVERLAY","OVERRIDING","PACKAGE","PADDED","PAGE","PAGESIZE","PARAMETER","PART","PARTITION","PARTITIONED","PARTITIONING","PARTITIONS","PASSING","PASSWORD","PATH","PCTFREE","PERCENT_RANK","PERCENTILE_CONT","PERCENTILE_DISC","PERIOD","PERMISSION","PIECESIZE","PIPE","PLAN","POSITION","PREPARE","PREVVAL","PRIMARY","PRIOR","PRIQTY","PRIVILEGES","PROCEDURE","PROGRAM","PROGRAMID","QUERY","RANGE","RANK","RATIO_TO_REPORT","RCDFMT","READ","READS","RECOVERY","REFERENCES","REFERENCING","REFRESH","REGEXP_LIKE","RELEASE","RENAME","REPEAT","RESET","RESIGNAL","RESTART","RESULT","RESULT_SET_LOCATOR","RETURN","RETURNING","RETURNS","REVOKE","RID","RIGHT","ROLLBACK","ROLLUP","ROUTINE","ROW","ROW_NUMBER","ROWNUMBER","ROWS","RRN","RUN","SAVEPOINT","SBCS","SCALAR","SCHEMA","SCRATCHPAD","SCROLL","SEARCH","SECOND","SECONDS","SECQTY","SECURED","SELECT","SENSITIVE","SEQUENCE","SESSION","SESSION_USER","SET","SIGNAL","SIMPLE","SKIP","SNAN","SOME","SOURCE","SPECIFIC","SQL","SQLID","SQLIND_DEFAULT","SQLIND_UNASSIGNED","STACKED","START","STARTING","STATEMENT","STATIC","STOGROUP","SUBSTRING","SUMMARY","SYNONYM","SYSTEM_TIME","SYSTEM_USER","TABLE","TABLESPACE","TABLESPACES","TAG","THEN","THREADSAFE","TO","TRANSACTION","TRANSFER","TRIGGER","TRIM","TRIM_ARRAY","TRUE","TRUNCATE","TRY_CAST","TYPE","UNDO","UNION","UNIQUE","UNIT","UNKNOWN","UNNEST","UNTIL","UPDATE","UPDATING","URI","USAGE","USE","USER","USERID","USING","VALUE","VALUES","VARIABLE","VARIANT","VCAT","VERSION","VERSIONING","VIEW","VOLATILE","WAIT","WHEN","WHENEVER","WHERE","WHILE","WITH","WITHIN","WITHOUT","WRAPPED","WRAPPER","WRITE","WRKSTNNAME","XMLAGG","XMLATTRIBUTES","XMLCAST","XMLCOMMENT","XMLCONCAT","XMLDOCUMENT","XMLELEMENT","XMLFOREST","XMLGROUP","XMLNAMESPACES","XMLPARSE","XMLPI","XMLROW","XMLSERIALIZE","XMLTABLE","XMLTEXT","XMLVALIDATE","XSLTRANSFORM","XSROBJECT","YEAR","YEARS","YES","ZONE"],T.dataTypes=["ARRAY","BIGINT","BINARY","BIT","BLOB","BOOLEAN","CCSID","CHAR","CHARACTER","CLOB","DATA","DATALINK","DATE","DBCLOB","DECFLOAT","DECIMAL","DEC","DOUBLE","DOUBLE PRECISION","FLOAT","GRAPHIC","INT","INTEGER","LONG","NUMERIC","REAL","ROWID","SMALLINT","TIME","TIMESTAMP","VARBINARY","VARCHAR","VARGRAPHIC","XML"]},3018:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.duckdb=void 0;const A=R(7163),e=R(3365),S=R(6026),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY [ALL]","HAVING","WINDOW","PARTITION BY","ORDER BY [ALL]","LIMIT","OFFSET","USING SAMPLE","QUALIFY","INSERT [OR REPLACE] INTO","VALUES","DEFAULT VALUES","SET","RETURNING"]),N=(0,A.expandPhrases)(["CREATE [OR REPLACE] [TEMPORARY | TEMP] TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["UPDATE","ON CONFLICT","DELETE FROM","DROP TABLE [IF EXISTS]","TRUNCATE","ALTER TABLE","ADD [COLUMN] [IF NOT EXISTS]","ADD PRIMARY KEY","DROP [COLUMN] [IF EXISTS]","ALTER [COLUMN]","RENAME [COLUMN]","RENAME TO","SET [DATA] TYPE","{SET | DROP} DEFAULT","{SET | DROP} NOT NULL","CREATE [OR REPLACE] [TEMPORARY | TEMP] {MACRO | FUNCTION}","DROP MACRO [TABLE] [IF EXISTS]","DROP FUNCTION [IF EXISTS]","CREATE [UNIQUE] INDEX [IF NOT EXISTS]","DROP INDEX [IF EXISTS]","CREATE [OR REPLACE] SCHEMA [IF NOT EXISTS]","DROP SCHEMA [IF EXISTS]","CREATE [OR REPLACE] [PERSISTENT | TEMPORARY] SECRET [IF NOT EXISTS]","DROP [PERSISTENT | TEMPORARY] SECRET [IF EXISTS]","CREATE [OR REPLACE] [TEMPORARY | TEMP] SEQUENCE","DROP SEQUENCE [IF EXISTS]","CREATE [OR REPLACE] [TEMPORARY | TEMP] VIEW [IF NOT EXISTS]","DROP VIEW [IF EXISTS]","ALTER VIEW","CREATE TYPE","DROP TYPE [IF EXISTS]","ANALYZE","ATTACH [DATABASE] [IF NOT EXISTS]","DETACH [DATABASE] [IF EXISTS]","CALL","[FORCE] CHECKPOINT","COMMENT ON [TABLE | COLUMN | VIEW | INDEX | SEQUENCE | TYPE | MACRO | MACRO TABLE]","COPY [FROM DATABASE]","DESCRIBE","EXPORT DATABASE","IMPORT DATABASE","INSTALL","LOAD","PIVOT","PIVOT_WIDER","UNPIVOT","EXPLAIN [ANALYZE]","SET {LOCAL | SESSION | GLOBAL}","RESET [LOCAL | SESSION | GLOBAL]","{SET | RESET} VARIABLE","SUMMARIZE","BEGIN TRANSACTION","ROLLBACK","COMMIT","ABORT","USE","VACUUM [ANALYZE]","PREPARE","EXECUTE","DEALLOCATE [PREPARE]"]),L=(0,A.expandPhrases)(["UNION [ALL | BY NAME]","EXCEPT [ALL]","INTERSECT [ALL]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","{NATURAL | ASOF} [INNER] JOIN","{NATURAL | ASOF} {LEFT | RIGHT | FULL} [OUTER] JOIN","POSITIONAL JOIN","ANTI JOIN","SEMI JOIN"]),_=(0,A.expandPhrases)(["{ROWS | RANGE | GROUPS} BETWEEN","SIMILAR TO","IS [NOT] DISTINCT FROM"]),s=(0,A.expandPhrases)(["TIMESTAMP WITH TIME ZONE"]);T.duckdb={name:"duckdb",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,nestedBlockComments:!0,extraParens:["[]","{}"],underscoresInNumbers:!0,stringTypes:["$$","''-qq",{quote:"''-qq-bs",prefixes:["E"],requirePrefix:!0},{quote:"''-raw",prefixes:["B","X"],requirePrefix:!0}],identTypes:['""-qq'],identChars:{rest:"$"},paramTypes:{positional:!0,numbered:["$"],quoted:["$"]},operators:["//","%","**","^","!","&","|","~","<<",">>","::","==","->","->>",":",":=","=>","~~","!~~","~~*","!~~*","~~~","~","!~","~*","!~*","^@","||",">>=","<<="]},formatOptions:{alwaysDenseOperators:["::"],onelineClauses:[...N,...t],tabularOnelineClauses:t}}},3365:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ADD","ADD_PARQUET_KEY","AGE","AGGREGATE","ALIAS","ALL_PROFILING_OUTPUT","ANY_VALUE","APPLY","APPROX_COUNT_DISTINCT","APPROX_QUANTILE","ARBITRARY","ARGMAX","ARGMIN","ARG_MAX","ARG_MAX_NULL","ARG_MIN","ARG_MIN_NULL","ARRAY_AGG","ARRAY_AGGR","ARRAY_AGGREGATE","ARRAY_APPEND","ARRAY_APPLY","ARRAY_CAT","ARRAY_CONCAT","ARRAY_CONTAINS","ARRAY_COSINE_SIMILARITY","ARRAY_CROSS_PRODUCT","ARRAY_DISTANCE","ARRAY_DISTINCT","ARRAY_DOT_PRODUCT","ARRAY_EXTRACT","ARRAY_FILTER","ARRAY_GRADE_UP","ARRAY_HAS","ARRAY_HAS_ALL","ARRAY_HAS_ANY","ARRAY_INDEXOF","ARRAY_INNER_PRODUCT","ARRAY_INTERSECT","ARRAY_LENGTH","ARRAY_POP_BACK","ARRAY_POP_FRONT","ARRAY_POSITION","ARRAY_PREPEND","ARRAY_PUSH_BACK","ARRAY_PUSH_FRONT","ARRAY_REDUCE","ARRAY_RESIZE","ARRAY_REVERSE","ARRAY_REVERSE_SORT","ARRAY_SELECT","ARRAY_SLICE","ARRAY_SORT","ARRAY_TO_JSON","ARRAY_TO_STRING","ARRAY_TRANSFORM","ARRAY_UNIQUE","ARRAY_VALUE","ARRAY_WHERE","ARRAY_ZIP","ARROW_SCAN","ARROW_SCAN_DUMB","ASCII","ASIN","ATAN","ATAN2","AVG","BASE64","BIN","BITSTRING","BITSTRING_AGG","BIT_AND","BIT_COUNT","BIT_LENGTH","BIT_OR","BIT_POSITION","BIT_XOR","BOOL_AND","BOOL_OR","CARDINALITY","CBRT","CEIL","CEILING","CENTURY","CHECKPOINT","CHR","COLLATIONS","COL_DESCRIPTION","COMBINE","CONCAT","CONCAT_WS","CONSTANT_OR_NULL","CONTAINS","COPY_DATABASE","CORR","COS","COT","COUNT","COUNT_IF","COUNT_STAR","COVAR_POP","COVAR_SAMP","CREATE_SORT_KEY","CURRENT_CATALOG","CURRENT_DATABASE","CURRENT_DATE","CURRENT_LOCALTIME","CURRENT_LOCALTIMESTAMP","CURRENT_QUERY","CURRENT_ROLE","CURRENT_SCHEMA","CURRENT_SCHEMAS","CURRENT_SETTING","CURRENT_USER","CURRVAL","DAMERAU_LEVENSHTEIN","DATABASE_LIST","DATABASE_SIZE","DATEDIFF","DATEPART","DATESUB","DATETRUNC","DATE_ADD","DATE_DIFF","DATE_PART","DATE_SUB","DATE_TRUNC","DAY","DAYNAME","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","DECADE","DECODE","DEGREES","DISABLE_CHECKPOINT_ON_SHUTDOWN","DISABLE_OBJECT_CACHE","DISABLE_OPTIMIZER","DISABLE_PRINT_PROGRESS_BAR","DISABLE_PROFILE","DISABLE_PROFILING","DISABLE_PROGRESS_BAR","DISABLE_VERIFICATION","DISABLE_VERIFY_EXTERNAL","DISABLE_VERIFY_FETCH_ROW","DISABLE_VERIFY_PARALLELISM","DISABLE_VERIFY_SERIALIZER","DIVIDE","DUCKDB_COLUMNS","DUCKDB_CONSTRAINTS","DUCKDB_DATABASES","DUCKDB_DEPENDENCIES","DUCKDB_EXTENSIONS","DUCKDB_FUNCTIONS","DUCKDB_INDEXES","DUCKDB_KEYWORDS","DUCKDB_MEMORY","DUCKDB_OPTIMIZERS","DUCKDB_SCHEMAS","DUCKDB_SECRETS","DUCKDB_SEQUENCES","DUCKDB_SETTINGS","DUCKDB_TABLES","DUCKDB_TEMPORARY_FILES","DUCKDB_TYPES","DUCKDB_VIEWS","EDIT","EDITDIST3","ELEMENT_AT","ENABLE_CHECKPOINT_ON_SHUTDOWN","ENABLE_OBJECT_CACHE","ENABLE_OPTIMIZER","ENABLE_PRINT_PROGRESS_BAR","ENABLE_PROFILE","ENABLE_PROFILING","ENABLE_PROGRESS_BAR","ENABLE_VERIFICATION","ENCODE","ENDS_WITH","ENTROPY","ENUM_CODE","ENUM_FIRST","ENUM_LAST","ENUM_RANGE","ENUM_RANGE_BOUNDARY","EPOCH","EPOCH_MS","EPOCH_NS","EPOCH_US","ERA","ERROR","EVEN","EXP","FACTORIAL","FAVG","FDIV","FILTER","FINALIZE","FIRST","FLATTEN","FLOOR","FMOD","FORCE_CHECKPOINT","FORMAT","FORMATREADABLEDECIMALSIZE","FORMATREADABLESIZE","FORMAT_BYTES","FORMAT_PG_TYPE","FORMAT_TYPE","FROM_BASE64","FROM_BINARY","FROM_HEX","FROM_JSON","FROM_JSON_STRICT","FSUM","FUNCTIONS","GAMMA","GCD","GENERATE_SERIES","GENERATE_SUBSCRIPTS","GEN_RANDOM_UUID","GEOMEAN","GEOMETRIC_MEAN","GETENV","GET_BIT","GET_BLOCK_SIZE","GET_CURRENT_TIME","GET_CURRENT_TIMESTAMP","GLOB","GRADE_UP","GREATEST","GREATEST_COMMON_DIVISOR","GROUP_CONCAT","HAMMING","HASH","HAS_ANY_COLUMN_PRIVILEGE","HAS_COLUMN_PRIVILEGE","HAS_DATABASE_PRIVILEGE","HAS_FOREIGN_DATA_WRAPPER_PRIVILEGE","HAS_FUNCTION_PRIVILEGE","HAS_LANGUAGE_PRIVILEGE","HAS_SCHEMA_PRIVILEGE","HAS_SEQUENCE_PRIVILEGE","HAS_SERVER_PRIVILEGE","HAS_TABLESPACE_PRIVILEGE","HAS_TABLE_PRIVILEGE","HEX","HISTOGRAM","HOUR","ICU_CALENDAR_NAMES","ICU_SORT_KEY","ILIKE_ESCAPE","IMPORT_DATABASE","INDEX_SCAN","INET_CLIENT_ADDR","INET_CLIENT_PORT","INET_SERVER_ADDR","INET_SERVER_PORT","INSTR","IN_SEARCH_PATH","ISFINITE","ISINF","ISNAN","ISODOW","ISOYEAR","JACCARD","JARO_SIMILARITY","JARO_WINKLER_SIMILARITY","JSON_ARRAY","JSON_ARRAY_LENGTH","JSON_CONTAINS","JSON_DESERIALIZE_SQL","JSON_EXECUTE_SERIALIZED_SQL","JSON_EXTRACT","JSON_EXTRACT_PATH","JSON_EXTRACT_PATH_TEXT","JSON_EXTRACT_STRING","JSON_GROUP_ARRAY","JSON_GROUP_OBJECT","JSON_GROUP_STRUCTURE","JSON_KEYS","JSON_MERGE_PATCH","JSON_OBJECT","JSON_QUOTE","JSON_SERIALIZE_PLAN","JSON_SERIALIZE_SQL","JSON_STRUCTURE","JSON_TRANSFORM","JSON_TRANSFORM_STRICT","JSON_TYPE","JSON_VALID","JULIAN","KAHAN_SUM","KURTOSIS","KURTOSIS_POP","LAST","LAST_DAY","LCASE","LCM","LEAST","LEAST_COMMON_MULTIPLE","LEFT","LEFT_GRAPHEME","LEN","LENGTH","LENGTH_GRAPHEME","LEVENSHTEIN","LGAMMA","LIKE_ESCAPE","LIST","LISTAGG","LIST_AGGR","LIST_AGGREGATE","LIST_ANY_VALUE","LIST_APPEND","LIST_APPLY","LIST_APPROX_COUNT_DISTINCT","LIST_AVG","LIST_BIT_AND","LIST_BIT_OR","LIST_BIT_XOR","LIST_BOOL_AND","LIST_BOOL_OR","LIST_CAT","LIST_CONCAT","LIST_CONTAINS","LIST_COSINE_SIMILARITY","LIST_COUNT","LIST_DISTANCE","LIST_DISTINCT","LIST_DOT_PRODUCT","LIST_ELEMENT","LIST_ENTROPY","LIST_EXTRACT","LIST_FILTER","LIST_FIRST","LIST_GRADE_UP","LIST_HAS","LIST_HAS_ALL","LIST_HAS_ANY","LIST_HISTOGRAM","LIST_INDEXOF","LIST_INNER_PRODUCT","LIST_INTERSECT","LIST_KURTOSIS","LIST_KURTOSIS_POP","LIST_LAST","LIST_MAD","LIST_MAX","LIST_MEDIAN","LIST_MIN","LIST_MODE","LIST_PACK","LIST_POSITION","LIST_PREPEND","LIST_PRODUCT","LIST_REDUCE","LIST_RESIZE","LIST_REVERSE","LIST_REVERSE_SORT","LIST_SELECT","LIST_SEM","LIST_SKEWNESS","LIST_SLICE","LIST_SORT","LIST_STDDEV_POP","LIST_STDDEV_SAMP","LIST_STRING_AGG","LIST_SUM","LIST_TRANSFORM","LIST_UNIQUE","LIST_VALUE","LIST_VAR_POP","LIST_VAR_SAMP","LIST_WHERE","LIST_ZIP","LN","LOG","LOG10","LOG2","LOWER","LPAD","LSMODE","LTRIM","MAD","MAKE_DATE","MAKE_TIME","MAKE_TIMESTAMP","MAKE_TIMESTAMPTZ","MAP","MAP_CONCAT","MAP_ENTRIES","MAP_EXTRACT","MAP_FROM_ENTRIES","MAP_KEYS","MAP_VALUES","MAX","MAX_BY","MD5","MD5_NUMBER","MD5_NUMBER_LOWER","MD5_NUMBER_UPPER","MEAN","MEDIAN","METADATA_INFO","MICROSECOND","MILLENNIUM","MILLISECOND","MIN","MINUTE","MIN_BY","MISMATCHES","MOD","MODE","MONTH","MONTHNAME","MULTIPLY","NEXTAFTER","NEXTVAL","NFC_NORMALIZE","NOT_ILIKE_ESCAPE","NOT_LIKE_ESCAPE","NOW","NULLIF","OBJ_DESCRIPTION","OCTET_LENGTH","ORD","PARQUET_FILE_METADATA","PARQUET_KV_METADATA","PARQUET_METADATA","PARQUET_SCAN","PARQUET_SCHEMA","PARSE_DIRNAME","PARSE_DIRPATH","PARSE_FILENAME","PARSE_PATH","PG_COLLATION_IS_VISIBLE","PG_CONF_LOAD_TIME","PG_CONVERSION_IS_VISIBLE","PG_FUNCTION_IS_VISIBLE","PG_GET_CONSTRAINTDEF","PG_GET_EXPR","PG_GET_VIEWDEF","PG_HAS_ROLE","PG_IS_OTHER_TEMP_SCHEMA","PG_MY_TEMP_SCHEMA","PG_OPCLASS_IS_VISIBLE","PG_OPERATOR_IS_VISIBLE","PG_OPFAMILY_IS_VISIBLE","PG_POSTMASTER_START_TIME","PG_SIZE_PRETTY","PG_TABLE_IS_VISIBLE","PG_TIMEZONE_NAMES","PG_TS_CONFIG_IS_VISIBLE","PG_TS_DICT_IS_VISIBLE","PG_TS_PARSER_IS_VISIBLE","PG_TS_TEMPLATE_IS_VISIBLE","PG_TYPEOF","PG_TYPE_IS_VISIBLE","PI","PLATFORM","POSITION","POW","POWER","PRAGMA_COLLATIONS","PRAGMA_DATABASE_SIZE","PRAGMA_METADATA_INFO","PRAGMA_PLATFORM","PRAGMA_SHOW","PRAGMA_STORAGE_INFO","PRAGMA_TABLE_INFO","PRAGMA_USER_AGENT","PRAGMA_VERSION","PREFIX","PRINTF","PRODUCT","QUANTILE","QUANTILE_CONT","QUANTILE_DISC","QUARTER","RADIANS","RANDOM","RANGE","READFILE","READ_BLOB","READ_CSV","READ_CSV_AUTO","READ_JSON","READ_JSON_AUTO","READ_JSON_OBJECTS","READ_JSON_OBJECTS_AUTO","READ_NDJSON","READ_NDJSON_AUTO","READ_NDJSON_OBJECTS","READ_PARQUET","READ_TEXT","REDUCE","REGEXP_ESCAPE","REGEXP_EXTRACT","REGEXP_EXTRACT_ALL","REGEXP_FULL_MATCH","REGEXP_MATCHES","REGEXP_REPLACE","REGEXP_SPLIT_TO_ARRAY","REGEXP_SPLIT_TO_TABLE","REGR_AVGX","REGR_AVGY","REGR_COUNT","REGR_INTERCEPT","REGR_R2","REGR_SLOPE","REGR_SXX","REGR_SXY","REGR_SYY","REPEAT","REPEAT_ROW","REPLACE","RESERVOIR_QUANTILE","REVERSE","RIGHT","RIGHT_GRAPHEME","ROUND","ROUNDBANKERS","ROUND_EVEN","ROW","ROW_TO_JSON","RPAD","RTRIM","SECOND","SEM","SEQ_SCAN","SESSION_USER","SETSEED","SET_BIT","SHA256","SHA3","SHELL_ADD_SCHEMA","SHELL_ESCAPE_CRNL","SHELL_IDQUOTE","SHELL_MODULE_SCHEMA","SHELL_PUTSNL","SHOBJ_DESCRIPTION","SHOW","SHOW_DATABASES","SHOW_TABLES","SHOW_TABLES_EXPANDED","SIGN","SIGNBIT","SIN","SKEWNESS","SNIFF_CSV","SPLIT","SPLIT_PART","SQL_AUTO_COMPLETE","SQRT","STARTS_WITH","STATS","STDDEV","STDDEV_POP","STDDEV_SAMP","STORAGE_INFO","STRFTIME","STRING_AGG","STRING_SPLIT","STRING_SPLIT_REGEX","STRING_TO_ARRAY","STRIP_ACCENTS","STRLEN","STRPOS","STRPTIME","STRUCT_EXTRACT","STRUCT_INSERT","STRUCT_PACK","STR_SPLIT","STR_SPLIT_REGEX","SUBSTR","SUBSTRING","SUBSTRING_GRAPHEME","SUBTRACT","SUFFIX","SUM","SUMKAHAN","SUMMARY","SUM_NO_OVERFLOW","TABLE_INFO","TAN","TEST_ALL_TYPES","TEST_VECTOR_TYPES","TIMEZONE","TIMEZONE_HOUR","TIMEZONE_MINUTE","TIME_BUCKET","TODAY","TO_BASE","TO_BASE64","TO_BINARY","TO_CENTURIES","TO_DAYS","TO_DECADES","TO_HEX","TO_HOURS","TO_JSON","TO_MICROSECONDS","TO_MILLENNIA","TO_MILLISECONDS","TO_MINUTES","TO_MONTHS","TO_SECONDS","TO_TIMESTAMP","TO_WEEKS","TO_YEARS","TRANSACTION_TIMESTAMP","TRANSLATE","TRIM","TRUNC","TRY_STRPTIME","TXID_CURRENT","TYPEOF","UCASE","UNBIN","UNHEX","UNICODE","UNION_EXTRACT","UNION_TAG","UNION_VALUE","UNNEST","UNPIVOT_LIST","UPPER","USER","USER_AGENT","UUID","VARIANCE","VAR_POP","VAR_SAMP","VECTOR_TYPE","VERIFY_EXTERNAL","VERIFY_FETCH_ROW","VERIFY_PARALLELISM","VERIFY_SERIALIZER","VERSION","WEEK","WEEKDAY","WEEKOFYEAR","WHICH_SECRET","WRITEFILE","XOR","YEAR","YEARWEEK","CAST","COALESCE","RANK","ROW_NUMBER"]},6026:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ALL","ANALYSE","ANALYZE","AND","ANY","AS","ASC","ATTACH","ASYMMETRIC","BOTH","CASE","CAST","CHECK","COLLATE","COLUMN","CONSTRAINT","CREATE","DEFAULT","DEFERRABLE","DESC","DESCRIBE","DETACH","DISTINCT","DO","ELSE","END","EXCEPT","FALSE","FETCH","FOR","FOREIGN","FROM","GRANT","GROUP","HAVING","IN","INITIALLY","INTERSECT","INTO","IS","LATERAL","LEADING","LIMIT","NOT","NULL","OFFSET","ON","ONLY","OR","ORDER","PIVOT","PIVOT_LONGER","PIVOT_WIDER","PLACING","PRIMARY","REFERENCES","RETURNING","SELECT","SHOW","SOME","SUMMARIZE","SYMMETRIC","TABLE","THEN","TO","TRAILING","TRUE","UNION","UNIQUE","UNPIVOT","USING","VARIADIC","WHEN","WHERE","WINDOW","WITH"],T.dataTypes=["ARRAY","BIGINT","BINARY","BIT","BITSTRING","BLOB","BOOL","BOOLEAN","BPCHAR","BYTEA","CHAR","DATE","DATETIME","DEC","DECIMAL","DOUBLE","ENUM","FLOAT","FLOAT4","FLOAT8","GUID","HUGEINT","INET","INT","INT1","INT128","INT16","INT2","INT32","INT4","INT64","INT8","INTEGER","INTEGRAL","INTERVAL","JSON","LIST","LOGICAL","LONG","MAP","NUMERIC","NVARCHAR","OID","REAL","ROW","SHORT","SIGNED","SMALLINT","STRING","STRUCT","TEXT","TIME","TIMESTAMP_MS","TIMESTAMP_NS","TIMESTAMP_S","TIMESTAMP_US","TIMESTAMP","TIMESTAMPTZ","TIMETZ","TINYINT","UBIGINT","UHUGEINT","UINT128","UINT16","UINT32","UINT64","UINT8","UINTEGER","UNION","USMALLINT","UTINYINT","UUID","VARBINARY","VARCHAR"]},7340:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.hive=void 0;const A=R(7163),e=R(4039),S=R(6220),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","SORT BY","CLUSTER BY","DISTRIBUTE BY","LIMIT","INSERT INTO [TABLE]","VALUES","SET","MERGE INTO","WHEN [NOT] MATCHED [THEN]","UPDATE SET","INSERT [VALUES]","INSERT OVERWRITE [LOCAL] DIRECTORY","LOAD DATA [LOCAL] INPATH","[OVERWRITE] INTO TABLE"]),N=(0,A.expandPhrases)(["CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [MATERIALIZED] VIEW [IF NOT EXISTS]","UPDATE","DELETE FROM","DROP TABLE [IF EXISTS]","ALTER TABLE","RENAME TO","TRUNCATE [TABLE]","ALTER","CREATE","USE","DESCRIBE","DROP","FETCH","SHOW","STORED AS","STORED BY","ROW FORMAT"]),L=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","LEFT SEMI JOIN"]),_=(0,A.expandPhrases)(["{ROWS | RANGE} BETWEEN"]),s=(0,A.expandPhrases)([]);T.hive={name:"hive",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,extraParens:["[]"],stringTypes:['""-bs',"''-bs"],identTypes:["``"],variableTypes:[{quote:"{}",prefixes:["$"],requirePrefix:!0}],operators:["%","~","^","|","&","<=>","==","!","||"]},formatOptions:{onelineClauses:[...N,...t],tabularOnelineClauses:t}}},4039:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ASIN","ATAN","BIN","BROUND","CBRT","CEIL","CEILING","CONV","COS","DEGREES","EXP","FACTORIAL","FLOOR","GREATEST","HEX","LEAST","LN","LOG","LOG10","LOG2","NEGATIVE","PI","PMOD","POSITIVE","POW","POWER","RADIANS","RAND","ROUND","SHIFTLEFT","SHIFTRIGHT","SHIFTRIGHTUNSIGNED","SIGN","SIN","SQRT","TAN","UNHEX","WIDTH_BUCKET","ARRAY_CONTAINS","MAP_KEYS","MAP_VALUES","SIZE","SORT_ARRAY","BINARY","CAST","ADD_MONTHS","DATE","DATE_ADD","DATE_FORMAT","DATE_SUB","DATEDIFF","DAY","DAYNAME","DAYOFMONTH","DAYOFYEAR","EXTRACT","FROM_UNIXTIME","FROM_UTC_TIMESTAMP","HOUR","LAST_DAY","MINUTE","MONTH","MONTHS_BETWEEN","NEXT_DAY","QUARTER","SECOND","TIMESTAMP","TO_DATE","TO_UTC_TIMESTAMP","TRUNC","UNIX_TIMESTAMP","WEEKOFYEAR","YEAR","ASSERT_TRUE","COALESCE","IF","ISNOTNULL","ISNULL","NULLIF","NVL","ASCII","BASE64","CHARACTER_LENGTH","CHR","CONCAT","CONCAT_WS","CONTEXT_NGRAMS","DECODE","ELT","ENCODE","FIELD","FIND_IN_SET","FORMAT_NUMBER","GET_JSON_OBJECT","IN_FILE","INITCAP","INSTR","LCASE","LENGTH","LEVENSHTEIN","LOCATE","LOWER","LPAD","LTRIM","NGRAMS","OCTET_LENGTH","PARSE_URL","PRINTF","QUOTE","REGEXP_EXTRACT","REGEXP_REPLACE","REPEAT","REVERSE","RPAD","RTRIM","SENTENCES","SOUNDEX","SPACE","SPLIT","STR_TO_MAP","SUBSTR","SUBSTRING","TRANSLATE","TRIM","UCASE","UNBASE64","UPPER","MASK","MASK_FIRST_N","MASK_HASH","MASK_LAST_N","MASK_SHOW_FIRST_N","MASK_SHOW_LAST_N","AES_DECRYPT","AES_ENCRYPT","CRC32","CURRENT_DATABASE","CURRENT_USER","HASH","JAVA_METHOD","LOGGED_IN_USER","MD5","REFLECT","SHA","SHA1","SHA2","SURROGATE_KEY","VERSION","AVG","COLLECT_LIST","COLLECT_SET","CORR","COUNT","COVAR_POP","COVAR_SAMP","HISTOGRAM_NUMERIC","MAX","MIN","NTILE","PERCENTILE","PERCENTILE_APPROX","REGR_AVGX","REGR_AVGY","REGR_COUNT","REGR_INTERCEPT","REGR_R2","REGR_SLOPE","REGR_SXX","REGR_SXY","REGR_SYY","STDDEV_POP","STDDEV_SAMP","SUM","VAR_POP","VAR_SAMP","VARIANCE","EXPLODE","INLINE","JSON_TUPLE","PARSE_URL_TUPLE","POSEXPLODE","STACK","LEAD","LAG","FIRST_VALUE","LAST_VALUE","RANK","ROW_NUMBER","DENSE_RANK","CUME_DIST","PERCENT_RANK","NTILE"]},6220:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADD","ADMIN","AFTER","ANALYZE","ARCHIVE","ASC","BEFORE","BUCKET","BUCKETS","CASCADE","CHANGE","CLUSTER","CLUSTERED","CLUSTERSTATUS","COLLECTION","COLUMNS","COMMENT","COMPACT","COMPACTIONS","COMPUTE","CONCATENATE","CONTINUE","DATA","DATABASES","DATETIME","DAY","DBPROPERTIES","DEFERRED","DEFINED","DELIMITED","DEPENDENCY","DESC","DIRECTORIES","DIRECTORY","DISABLE","DISTRIBUTE","ELEM_TYPE","ENABLE","ESCAPED","EXCLUSIVE","EXPLAIN","EXPORT","FIELDS","FILE","FILEFORMAT","FIRST","FORMAT","FORMATTED","FUNCTIONS","HOLD_DDLTIME","HOUR","IDXPROPERTIES","IGNORE","INDEX","INDEXES","INPATH","INPUTDRIVER","INPUTFORMAT","ITEMS","JAR","KEYS","KEY_TYPE","LIMIT","LINES","LOAD","LOCATION","LOCK","LOCKS","LOGICAL","LONG","MAPJOIN","MATERIALIZED","METADATA","MINUS","MINUTE","MONTH","MSCK","NOSCAN","NO_DROP","OFFLINE","OPTION","OUTPUTDRIVER","OUTPUTFORMAT","OVERWRITE","OWNER","PARTITIONED","PARTITIONS","PLUS","PRETTY","PRINCIPALS","PROTECTION","PURGE","READ","READONLY","REBUILD","RECORDREADER","RECORDWRITER","RELOAD","RENAME","REPAIR","REPLACE","REPLICATION","RESTRICT","REWRITE","ROLE","ROLES","SCHEMA","SCHEMAS","SECOND","SEMI","SERDE","SERDEPROPERTIES","SERVER","SETS","SHARED","SHOW","SHOW_DATABASE","SKEWED","SORT","SORTED","SSL","STATISTICS","STORED","STREAMTABLE","STRING","TABLES","TBLPROPERTIES","TEMPORARY","TERMINATED","TINYINT","TOUCH","TRANSACTIONS","UNARCHIVE","UNDO","UNIONTYPE","UNLOCK","UNSET","UNSIGNED","URI","USE","UTC","UTCTIMESTAMP","VALUE_TYPE","VIEW","WHILE","YEAR","AUTOCOMMIT","ISOLATION","LEVEL","OFFSET","SNAPSHOT","TRANSACTION","WORK","WRITE","ABORT","KEY","LAST","NORELY","NOVALIDATE","NULLS","RELY","VALIDATE","DETAIL","DOW","EXPRESSION","OPERATOR","QUARTER","SUMMARY","VECTORIZATION","WEEK","YEARS","MONTHS","WEEKS","DAYS","HOURS","MINUTES","SECONDS","TIMESTAMPTZ","ZONE","ALL","ALTER","AND","AS","AUTHORIZATION","BETWEEN","BOTH","BY","CASE","CAST","COLUMN","CONF","CREATE","CROSS","CUBE","CURRENT","CURRENT_DATE","CURRENT_TIMESTAMP","CURSOR","DATABASE","DELETE","DESCRIBE","DISTINCT","DROP","ELSE","END","EXCHANGE","EXISTS","EXTENDED","EXTERNAL","FALSE","FETCH","FOLLOWING","FOR","FROM","FULL","FUNCTION","GRANT","GROUP","GROUPING","HAVING","IF","IMPORT","IN","INNER","INSERT","INTERSECT","INTO","IS","JOIN","LATERAL","LEFT","LESS","LIKE","LOCAL","MACRO","MORE","NONE","NOT","NULL","OF","ON","OR","ORDER","OUT","OUTER","OVER","PARTIALSCAN","PARTITION","PERCENT","PRECEDING","PRESERVE","PROCEDURE","RANGE","READS","REDUCE","REVOKE","RIGHT","ROLLUP","ROW","ROWS","SELECT","SET","TABLE","TABLESAMPLE","THEN","TO","TRANSFORM","TRIGGER","TRUE","TRUNCATE","UNBOUNDED","UNION","UNIQUEJOIN","UPDATE","USER","USING","UTC_TMESTAMP","VALUES","WHEN","WHERE","WINDOW","WITH","COMMIT","ONLY","REGEXP","RLIKE","ROLLBACK","START","CACHE","CONSTRAINT","FOREIGN","PRIMARY","REFERENCES","DAYOFWEEK","EXTRACT","FLOOR","VIEWS","TIME","SYNC","TEXTFILE","SEQUENCEFILE","ORC","CSV","TSV","PARQUET","AVRO","RCFILE","JSONFILE","INPUTFORMAT","OUTPUTFORMAT"],T.dataTypes=["ARRAY","BIGINT","BINARY","BOOLEAN","CHAR","DATE","DECIMAL","DOUBLE","FLOAT","INT","INTEGER","INTERVAL","MAP","NUMERIC","PRECISION","SMALLINT","STRUCT","TIMESTAMP","VARCHAR"]},6831:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.postProcess=void 0;const A=R(2437);T.postProcess=function(E){return E.map(((T,R)=>{const e=E[R+1]||A.EOF_TOKEN;if(A.isToken.SET(T)&&"("===e.text)return Object.assign(Object.assign({},T),{type:A.TokenType.RESERVED_FUNCTION_NAME});const S=E[R-1]||A.EOF_TOKEN;return A.isToken.VALUES(T)&&"="===S.text?Object.assign(Object.assign({},T),{type:A.TokenType.RESERVED_FUNCTION_NAME}):T}))}},1378:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.mariadb=void 0;const A=R(7163),e=R(6831),S=R(7714),I=R(2381),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT | DISTINCTROW]"]),N=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","PARTITION BY","ORDER BY","LIMIT","OFFSET","FETCH {FIRST | NEXT}","INSERT [LOW_PRIORITY | DELAYED | HIGH_PRIORITY] [IGNORE] [INTO]","REPLACE [LOW_PRIORITY | DELAYED] [INTO]","VALUES","ON DUPLICATE KEY UPDATE","SET","RETURNING"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [TEMPORARY] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["CREATE [OR REPLACE] [SQL SECURITY DEFINER | SQL SECURITY INVOKER] VIEW [IF NOT EXISTS]","UPDATE [LOW_PRIORITY] [IGNORE]","DELETE [LOW_PRIORITY] [QUICK] [IGNORE] FROM","DROP [TEMPORARY] TABLE [IF EXISTS]","ALTER [ONLINE] [IGNORE] TABLE [IF EXISTS]","ADD [COLUMN] [IF NOT EXISTS]","{CHANGE | MODIFY} [COLUMN] [IF EXISTS]","DROP [COLUMN] [IF EXISTS]","RENAME [TO]","RENAME COLUMN","ALTER [COLUMN]","{SET | DROP} DEFAULT","SET {VISIBLE | INVISIBLE}","TRUNCATE [TABLE]","ALTER DATABASE","ALTER DATABASE COMMENT","ALTER EVENT","ALTER FUNCTION","ALTER PROCEDURE","ALTER SCHEMA","ALTER SCHEMA COMMENT","ALTER SEQUENCE","ALTER SERVER","ALTER USER","ALTER VIEW","ANALYZE","ANALYZE TABLE","BACKUP LOCK","BACKUP STAGE","BACKUP UNLOCK","BEGIN","BINLOG","CACHE INDEX","CALL","CHANGE MASTER TO","CHECK TABLE","CHECK VIEW","CHECKSUM TABLE","COMMIT","CREATE AGGREGATE FUNCTION","CREATE DATABASE","CREATE EVENT","CREATE FUNCTION","CREATE INDEX","CREATE PROCEDURE","CREATE ROLE","CREATE SEQUENCE","CREATE SERVER","CREATE SPATIAL INDEX","CREATE TRIGGER","CREATE UNIQUE INDEX","CREATE USER","DEALLOCATE PREPARE","DESCRIBE","DROP DATABASE","DROP EVENT","DROP FUNCTION","DROP INDEX","DROP PREPARE","DROP PROCEDURE","DROP ROLE","DROP SEQUENCE","DROP SERVER","DROP TRIGGER","DROP USER","DROP VIEW","EXECUTE","EXPLAIN","FLUSH","GET DIAGNOSTICS","GET DIAGNOSTICS CONDITION","GRANT","HANDLER","HELP","INSTALL PLUGIN","INSTALL SONAME","KILL","LOAD DATA INFILE","LOAD INDEX INTO CACHE","LOAD XML INFILE","LOCK TABLE","OPTIMIZE TABLE","PREPARE","PURGE BINARY LOGS","PURGE MASTER LOGS","RELEASE SAVEPOINT","RENAME TABLE","RENAME USER","REPAIR TABLE","REPAIR VIEW","RESET MASTER","RESET QUERY CACHE","RESET REPLICA","RESET SLAVE","RESIGNAL","REVOKE","ROLLBACK","SAVEPOINT","SET CHARACTER SET","SET DEFAULT ROLE","SET GLOBAL TRANSACTION","SET NAMES","SET PASSWORD","SET ROLE","SET STATEMENT","SET TRANSACTION","SHOW","SHOW ALL REPLICAS STATUS","SHOW ALL SLAVES STATUS","SHOW AUTHORS","SHOW BINARY LOGS","SHOW BINLOG EVENTS","SHOW BINLOG STATUS","SHOW CHARACTER SET","SHOW CLIENT_STATISTICS","SHOW COLLATION","SHOW COLUMNS","SHOW CONTRIBUTORS","SHOW CREATE DATABASE","SHOW CREATE EVENT","SHOW CREATE FUNCTION","SHOW CREATE PACKAGE","SHOW CREATE PACKAGE BODY","SHOW CREATE PROCEDURE","SHOW CREATE SEQUENCE","SHOW CREATE TABLE","SHOW CREATE TRIGGER","SHOW CREATE USER","SHOW CREATE VIEW","SHOW DATABASES","SHOW ENGINE","SHOW ENGINE INNODB STATUS","SHOW ENGINES","SHOW ERRORS","SHOW EVENTS","SHOW EXPLAIN","SHOW FUNCTION CODE","SHOW FUNCTION STATUS","SHOW GRANTS","SHOW INDEX","SHOW INDEXES","SHOW INDEX_STATISTICS","SHOW KEYS","SHOW LOCALES","SHOW MASTER LOGS","SHOW MASTER STATUS","SHOW OPEN TABLES","SHOW PACKAGE BODY CODE","SHOW PACKAGE BODY STATUS","SHOW PACKAGE STATUS","SHOW PLUGINS","SHOW PLUGINS SONAME","SHOW PRIVILEGES","SHOW PROCEDURE CODE","SHOW PROCEDURE STATUS","SHOW PROCESSLIST","SHOW PROFILE","SHOW PROFILES","SHOW QUERY_RESPONSE_TIME","SHOW RELAYLOG EVENTS","SHOW REPLICA","SHOW REPLICA HOSTS","SHOW REPLICA STATUS","SHOW SCHEMAS","SHOW SLAVE","SHOW SLAVE HOSTS","SHOW SLAVE STATUS","SHOW STATUS","SHOW STORAGE ENGINES","SHOW TABLE STATUS","SHOW TABLES","SHOW TRIGGERS","SHOW USER_STATISTICS","SHOW VARIABLES","SHOW WARNINGS","SHOW WSREP_MEMBERSHIP","SHOW WSREP_STATUS","SHUTDOWN","SIGNAL","START ALL REPLICAS","START ALL SLAVES","START REPLICA","START SLAVE","START TRANSACTION","STOP ALL REPLICAS","STOP ALL SLAVES","STOP REPLICA","STOP SLAVE","UNINSTALL PLUGIN","UNINSTALL SONAME","UNLOCK TABLE","USE","XA BEGIN","XA COMMIT","XA END","XA PREPARE","XA RECOVER","XA ROLLBACK","XA START"]),C=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","EXCEPT [ALL | DISTINCT]","INTERSECT [ALL | DISTINCT]","MINUS [ALL | DISTINCT]"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL JOIN","NATURAL {LEFT | RIGHT} [OUTER] JOIN","STRAIGHT_JOIN"]),s=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL | SET DEFAULT]","CHARACTER SET","{ROWS | RANGE} BETWEEN","IDENTIFIED BY"]),r=(0,A.expandPhrases)([]);T.mariadb={name:"mariadb",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:I.functions,stringTypes:['""-qq-bs',"''-qq-bs",{quote:"''-raw",prefixes:["B","X"],requirePrefix:!0}],identTypes:["``"],identChars:{first:"$",rest:"$",allowFirstCharNumber:!0},variableTypes:[{regex:"@@?[A-Za-z0-9_.$]+"},{quote:'""-qq-bs',prefixes:["@"],requirePrefix:!0},{quote:"''-qq-bs",prefixes:["@"],requirePrefix:!0},{quote:"``",prefixes:["@"],requirePrefix:!0}],paramTypes:{positional:!0},lineCommentTypes:["--","#"],operators:["%",":=","&","|","^","~","<<",">>","<=>","&&","||","!","*.*"],postProcess:e.postProcess},formatOptions:{onelineClauses:[...t,...L],tabularOnelineClauses:L}}},2381:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ADDDATE","ADD_MONTHS","BIT_AND","BIT_OR","BIT_XOR","CAST","COUNT","CUME_DIST","CURDATE","CURTIME","DATE_ADD","DATE_SUB","DATE_FORMAT","DECODE","DENSE_RANK","EXTRACT","FIRST_VALUE","GROUP_CONCAT","JSON_ARRAYAGG","JSON_OBJECTAGG","LAG","LEAD","MAX","MEDIAN","MID","MIN","NOW","NTH_VALUE","NTILE","POSITION","PERCENT_RANK","PERCENTILE_CONT","PERCENTILE_DISC","RANK","ROW_NUMBER","SESSION_USER","STD","STDDEV","STDDEV_POP","STDDEV_SAMP","SUBDATE","SUBSTR","SUBSTRING","SUM","SYSTEM_USER","TRIM","TRIM_ORACLE","VARIANCE","VAR_POP","VAR_SAMP","ABS","ACOS","ADDTIME","AES_DECRYPT","AES_ENCRYPT","ASIN","ATAN","ATAN2","BENCHMARK","BIN","BINLOG_GTID_POS","BIT_COUNT","BIT_LENGTH","CEIL","CEILING","CHARACTER_LENGTH","CHAR_LENGTH","CHR","COERCIBILITY","COLUMN_CHECK","COLUMN_EXISTS","COLUMN_LIST","COLUMN_JSON","COMPRESS","CONCAT","CONCAT_OPERATOR_ORACLE","CONCAT_WS","CONNECTION_ID","CONV","CONVERT_TZ","COS","COT","CRC32","DATEDIFF","DAYNAME","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","DEGREES","DECODE_HISTOGRAM","DECODE_ORACLE","DES_DECRYPT","DES_ENCRYPT","ELT","ENCODE","ENCRYPT","EXP","EXPORT_SET","EXTRACTVALUE","FIELD","FIND_IN_SET","FLOOR","FORMAT","FOUND_ROWS","FROM_BASE64","FROM_DAYS","FROM_UNIXTIME","GET_LOCK","GREATEST","HEX","IFNULL","INSTR","ISNULL","IS_FREE_LOCK","IS_USED_LOCK","JSON_ARRAY","JSON_ARRAY_APPEND","JSON_ARRAY_INSERT","JSON_COMPACT","JSON_CONTAINS","JSON_CONTAINS_PATH","JSON_DEPTH","JSON_DETAILED","JSON_EXISTS","JSON_EXTRACT","JSON_INSERT","JSON_KEYS","JSON_LENGTH","JSON_LOOSE","JSON_MERGE","JSON_MERGE_PATCH","JSON_MERGE_PRESERVE","JSON_QUERY","JSON_QUOTE","JSON_OBJECT","JSON_REMOVE","JSON_REPLACE","JSON_SET","JSON_SEARCH","JSON_TYPE","JSON_UNQUOTE","JSON_VALID","JSON_VALUE","LAST_DAY","LAST_INSERT_ID","LCASE","LEAST","LENGTH","LENGTHB","LN","LOAD_FILE","LOCATE","LOG","LOG10","LOG2","LOWER","LPAD","LPAD_ORACLE","LTRIM","LTRIM_ORACLE","MAKEDATE","MAKETIME","MAKE_SET","MASTER_GTID_WAIT","MASTER_POS_WAIT","MD5","MONTHNAME","NAME_CONST","NVL","NVL2","OCT","OCTET_LENGTH","ORD","PERIOD_ADD","PERIOD_DIFF","PI","POW","POWER","QUOTE","REGEXP_INSTR","REGEXP_REPLACE","REGEXP_SUBSTR","RADIANS","RAND","RELEASE_ALL_LOCKS","RELEASE_LOCK","REPLACE_ORACLE","REVERSE","ROUND","RPAD","RPAD_ORACLE","RTRIM","RTRIM_ORACLE","SEC_TO_TIME","SHA","SHA1","SHA2","SIGN","SIN","SLEEP","SOUNDEX","SPACE","SQRT","STRCMP","STR_TO_DATE","SUBSTR_ORACLE","SUBSTRING_INDEX","SUBTIME","SYS_GUID","TAN","TIMEDIFF","TIME_FORMAT","TIME_TO_SEC","TO_BASE64","TO_CHAR","TO_DAYS","TO_SECONDS","UCASE","UNCOMPRESS","UNCOMPRESSED_LENGTH","UNHEX","UNIX_TIMESTAMP","UPDATEXML","UPPER","UUID","UUID_SHORT","VERSION","WEEKDAY","WEEKOFYEAR","WSREP_LAST_WRITTEN_GTID","WSREP_LAST_SEEN_GTID","WSREP_SYNC_WAIT_UPTO_GTID","YEARWEEK","COALESCE","NULLIF"]},7714:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ACCESSIBLE","ADD","ALL","ALTER","ANALYZE","AND","AS","ASC","ASENSITIVE","BEFORE","BETWEEN","BOTH","BY","CALL","CASCADE","CASE","CHANGE","CHECK","COLLATE","COLUMN","CONDITION","CONSTRAINT","CONTINUE","CONVERT","CREATE","CROSS","CURRENT_DATE","CURRENT_ROLE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURSOR","DATABASE","DATABASES","DAY_HOUR","DAY_MICROSECOND","DAY_MINUTE","DAY_SECOND","DECLARE","DEFAULT","DELAYED","DELETE","DELETE_DOMAIN_ID","DESC","DESCRIBE","DETERMINISTIC","DISTINCT","DISTINCTROW","DIV","DO_DOMAIN_IDS","DROP","DUAL","EACH","ELSE","ELSEIF","ENCLOSED","ESCAPED","EXCEPT","EXISTS","EXIT","EXPLAIN","FALSE","FETCH","FOR","FORCE","FOREIGN","FROM","FULLTEXT","GENERAL","GRANT","GROUP","HAVING","HIGH_PRIORITY","HOUR_MICROSECOND","HOUR_MINUTE","HOUR_SECOND","IF","IGNORE","IGNORE_DOMAIN_IDS","IGNORE_SERVER_IDS","IN","INDEX","INFILE","INNER","INOUT","INSENSITIVE","INSERT","INTERSECT","INTERVAL","INTO","IS","ITERATE","JOIN","KEY","KEYS","KILL","LEADING","LEAVE","LEFT","LIKE","LIMIT","LINEAR","LINES","LOAD","LOCALTIME","LOCALTIMESTAMP","LOCK","LOOP","LOW_PRIORITY","MASTER_HEARTBEAT_PERIOD","MASTER_SSL_VERIFY_SERVER_CERT","MATCH","MAXVALUE","MINUTE_MICROSECOND","MINUTE_SECOND","MOD","MODIFIES","NATURAL","NOT","NO_WRITE_TO_BINLOG","NULL","OFFSET","ON","OPTIMIZE","OPTION","OPTIONALLY","OR","ORDER","OUT","OUTER","OUTFILE","OVER","PAGE_CHECKSUM","PARSE_VCOL_EXPR","PARTITION","POSITION","PRIMARY","PROCEDURE","PURGE","RANGE","READ","READS","READ_WRITE","RECURSIVE","REF_SYSTEM_ID","REFERENCES","REGEXP","RELEASE","RENAME","REPEAT","REPLACE","REQUIRE","RESIGNAL","RESTRICT","RETURN","RETURNING","REVOKE","RIGHT","RLIKE","ROW_NUMBER","ROWS","SCHEMA","SCHEMAS","SECOND_MICROSECOND","SELECT","SENSITIVE","SEPARATOR","SET","SHOW","SIGNAL","SLOW","SPATIAL","SPECIFIC","SQL","SQLEXCEPTION","SQLSTATE","SQLWARNING","SQL_BIG_RESULT","SQL_CALC_FOUND_ROWS","SQL_SMALL_RESULT","SSL","STARTING","STATS_AUTO_RECALC","STATS_PERSISTENT","STATS_SAMPLE_PAGES","STRAIGHT_JOIN","TABLE","TERMINATED","THEN","TO","TRAILING","TRIGGER","TRUE","UNDO","UNION","UNIQUE","UNLOCK","UNSIGNED","UPDATE","USAGE","USE","USING","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","VALUES","WHEN","WHERE","WHILE","WINDOW","WITH","WRITE","XOR","YEAR_MONTH","ZEROFILL"],T.dataTypes=["BIGINT","BINARY","BIT","BLOB","CHAR BYTE","CHAR","CHARACTER","DATETIME","DEC","DECIMAL","DOUBLE PRECISION","DOUBLE","ENUM","FIXED","FLOAT","FLOAT4","FLOAT8","INT","INT1","INT2","INT3","INT4","INT8","INTEGER","LONG","LONGBLOB","LONGTEXT","MEDIUMBLOB","MEDIUMINT","MEDIUMTEXT","MIDDLEINT","NATIONAL CHAR","NATIONAL VARCHAR","NUMERIC","PRECISION","REAL","SMALLINT","TEXT","TIMESTAMP","TINYBLOB","TINYINT","TINYTEXT","VARBINARY","VARCHAR","VARCHARACTER","VARYING","YEAR"]},3358:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.mysql=void 0;const A=R(7163),e=R(6831),S=R(9510),I=R(2761),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT | DISTINCTROW]"]),N=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","INSERT [LOW_PRIORITY | DELAYED | HIGH_PRIORITY] [IGNORE] [INTO]","REPLACE [LOW_PRIORITY | DELAYED] [INTO]","VALUES","ON DUPLICATE KEY UPDATE","SET"]),t=(0,A.expandPhrases)(["CREATE [TEMPORARY] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["CREATE [OR REPLACE] [SQL SECURITY DEFINER | SQL SECURITY INVOKER] VIEW [IF NOT EXISTS]","UPDATE [LOW_PRIORITY] [IGNORE]","DELETE [LOW_PRIORITY] [QUICK] [IGNORE] FROM","DROP [TEMPORARY] TABLE [IF EXISTS]","ALTER TABLE","ADD [COLUMN]","{CHANGE | MODIFY} [COLUMN]","DROP [COLUMN]","RENAME [TO | AS]","RENAME COLUMN","ALTER [COLUMN]","{SET | DROP} DEFAULT","TRUNCATE [TABLE]","ALTER DATABASE","ALTER EVENT","ALTER FUNCTION","ALTER INSTANCE","ALTER LOGFILE GROUP","ALTER PROCEDURE","ALTER RESOURCE GROUP","ALTER SERVER","ALTER TABLESPACE","ALTER USER","ALTER VIEW","ANALYZE TABLE","BINLOG","CACHE INDEX","CALL","CHANGE MASTER TO","CHANGE REPLICATION FILTER","CHANGE REPLICATION SOURCE TO","CHECK TABLE","CHECKSUM TABLE","CLONE","COMMIT","CREATE DATABASE","CREATE EVENT","CREATE FUNCTION","CREATE FUNCTION","CREATE INDEX","CREATE LOGFILE GROUP","CREATE PROCEDURE","CREATE RESOURCE GROUP","CREATE ROLE","CREATE SERVER","CREATE SPATIAL REFERENCE SYSTEM","CREATE TABLESPACE","CREATE TRIGGER","CREATE USER","DEALLOCATE PREPARE","DESCRIBE","DROP DATABASE","DROP EVENT","DROP FUNCTION","DROP FUNCTION","DROP INDEX","DROP LOGFILE GROUP","DROP PROCEDURE","DROP RESOURCE GROUP","DROP ROLE","DROP SERVER","DROP SPATIAL REFERENCE SYSTEM","DROP TABLESPACE","DROP TRIGGER","DROP USER","DROP VIEW","EXECUTE","EXPLAIN","FLUSH","GRANT","HANDLER","HELP","IMPORT TABLE","INSTALL COMPONENT","INSTALL PLUGIN","KILL","LOAD DATA","LOAD INDEX INTO CACHE","LOAD XML","LOCK INSTANCE FOR BACKUP","LOCK TABLES","MASTER_POS_WAIT","OPTIMIZE TABLE","PREPARE","PURGE BINARY LOGS","RELEASE SAVEPOINT","RENAME TABLE","RENAME USER","REPAIR TABLE","RESET","RESET MASTER","RESET PERSIST","RESET REPLICA","RESET SLAVE","RESTART","REVOKE","ROLLBACK","ROLLBACK TO SAVEPOINT","SAVEPOINT","SET CHARACTER SET","SET DEFAULT ROLE","SET NAMES","SET PASSWORD","SET RESOURCE GROUP","SET ROLE","SET TRANSACTION","SHOW","SHOW BINARY LOGS","SHOW BINLOG EVENTS","SHOW CHARACTER SET","SHOW COLLATION","SHOW COLUMNS","SHOW CREATE DATABASE","SHOW CREATE EVENT","SHOW CREATE FUNCTION","SHOW CREATE PROCEDURE","SHOW CREATE TABLE","SHOW CREATE TRIGGER","SHOW CREATE USER","SHOW CREATE VIEW","SHOW DATABASES","SHOW ENGINE","SHOW ENGINES","SHOW ERRORS","SHOW EVENTS","SHOW FUNCTION CODE","SHOW FUNCTION STATUS","SHOW GRANTS","SHOW INDEX","SHOW MASTER STATUS","SHOW OPEN TABLES","SHOW PLUGINS","SHOW PRIVILEGES","SHOW PROCEDURE CODE","SHOW PROCEDURE STATUS","SHOW PROCESSLIST","SHOW PROFILE","SHOW PROFILES","SHOW RELAYLOG EVENTS","SHOW REPLICA STATUS","SHOW REPLICAS","SHOW SLAVE","SHOW SLAVE HOSTS","SHOW STATUS","SHOW TABLE STATUS","SHOW TABLES","SHOW TRIGGERS","SHOW VARIABLES","SHOW WARNINGS","SHUTDOWN","SOURCE_POS_WAIT","START GROUP_REPLICATION","START REPLICA","START SLAVE","START TRANSACTION","STOP GROUP_REPLICATION","STOP REPLICA","STOP SLAVE","TABLE","UNINSTALL COMPONENT","UNINSTALL PLUGIN","UNLOCK INSTANCE","UNLOCK TABLES","USE","XA","ITERATE","LEAVE","LOOP","REPEAT","RETURN","WHILE"]),C=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT} [OUTER] JOIN","STRAIGHT_JOIN"]),s=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL]","CHARACTER SET","{ROWS | RANGE} BETWEEN","IDENTIFIED BY"]),r=(0,A.expandPhrases)([]);T.mysql={name:"mysql",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:I.functions,stringTypes:['""-qq-bs',{quote:"''-qq-bs",prefixes:["N"]},{quote:"''-raw",prefixes:["B","X"],requirePrefix:!0}],identTypes:["``"],identChars:{first:"$",rest:"$",allowFirstCharNumber:!0},variableTypes:[{regex:"@@?[A-Za-z0-9_.$]+"},{quote:'""-qq-bs',prefixes:["@"],requirePrefix:!0},{quote:"''-qq-bs",prefixes:["@"],requirePrefix:!0},{quote:"``",prefixes:["@"],requirePrefix:!0}],paramTypes:{positional:!0},lineCommentTypes:["--","#"],operators:["%",":=","&","|","^","~","<<",">>","<=>","->","->>","&&","||","!","*.*"],postProcess:e.postProcess},formatOptions:{onelineClauses:[...t,...L],tabularOnelineClauses:L}}},2761:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ADDDATE","ADDTIME","AES_DECRYPT","AES_ENCRYPT","ANY_VALUE","ASCII","ASIN","ATAN","ATAN2","AVG","BENCHMARK","BIN","BIN_TO_UUID","BINARY","BIT_AND","BIT_COUNT","BIT_LENGTH","BIT_OR","BIT_XOR","CAN_ACCESS_COLUMN","CAN_ACCESS_DATABASE","CAN_ACCESS_TABLE","CAN_ACCESS_USER","CAN_ACCESS_VIEW","CAST","CEIL","CEILING","CHAR","CHAR_LENGTH","CHARACTER_LENGTH","CHARSET","COALESCE","COERCIBILITY","COLLATION","COMPRESS","CONCAT","CONCAT_WS","CONNECTION_ID","CONV","CONVERT","CONVERT_TZ","COS","COT","COUNT","CRC32","CUME_DIST","CURDATE","CURRENT_DATE","CURRENT_ROLE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURTIME","DATABASE","DATE","DATE_ADD","DATE_FORMAT","DATE_SUB","DATEDIFF","DAY","DAYNAME","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","DEFAULT","DEGREES","DENSE_RANK","DIV","ELT","EXP","EXPORT_SET","EXTRACT","EXTRACTVALUE","FIELD","FIND_IN_SET","FIRST_VALUE","FLOOR","FORMAT","FORMAT_BYTES","FORMAT_PICO_TIME","FOUND_ROWS","FROM_BASE64","FROM_DAYS","FROM_UNIXTIME","GEOMCOLLECTION","GEOMETRYCOLLECTION","GET_DD_COLUMN_PRIVILEGES","GET_DD_CREATE_OPTIONS","GET_DD_INDEX_SUB_PART_LENGTH","GET_FORMAT","GET_LOCK","GREATEST","GROUP_CONCAT","GROUPING","GTID_SUBSET","GTID_SUBTRACT","HEX","HOUR","ICU_VERSION","IF","IFNULL","INET_ATON","INET_NTOA","INET6_ATON","INET6_NTOA","INSERT","INSTR","INTERNAL_AUTO_INCREMENT","INTERNAL_AVG_ROW_LENGTH","INTERNAL_CHECK_TIME","INTERNAL_CHECKSUM","INTERNAL_DATA_FREE","INTERNAL_DATA_LENGTH","INTERNAL_DD_CHAR_LENGTH","INTERNAL_GET_COMMENT_OR_ERROR","INTERNAL_GET_ENABLED_ROLE_JSON","INTERNAL_GET_HOSTNAME","INTERNAL_GET_USERNAME","INTERNAL_GET_VIEW_WARNING_OR_ERROR","INTERNAL_INDEX_COLUMN_CARDINALITY","INTERNAL_INDEX_LENGTH","INTERNAL_IS_ENABLED_ROLE","INTERNAL_IS_MANDATORY_ROLE","INTERNAL_KEYS_DISABLED","INTERNAL_MAX_DATA_LENGTH","INTERNAL_TABLE_ROWS","INTERNAL_UPDATE_TIME","INTERVAL","IS","IS_FREE_LOCK","IS_IPV4","IS_IPV4_COMPAT","IS_IPV4_MAPPED","IS_IPV6","IS NOT","IS NOT NULL","IS NULL","IS_USED_LOCK","IS_UUID","ISNULL","JSON_ARRAY","JSON_ARRAY_APPEND","JSON_ARRAY_INSERT","JSON_ARRAYAGG","JSON_CONTAINS","JSON_CONTAINS_PATH","JSON_DEPTH","JSON_EXTRACT","JSON_INSERT","JSON_KEYS","JSON_LENGTH","JSON_MERGE","JSON_MERGE_PATCH","JSON_MERGE_PRESERVE","JSON_OBJECT","JSON_OBJECTAGG","JSON_OVERLAPS","JSON_PRETTY","JSON_QUOTE","JSON_REMOVE","JSON_REPLACE","JSON_SCHEMA_VALID","JSON_SCHEMA_VALIDATION_REPORT","JSON_SEARCH","JSON_SET","JSON_STORAGE_FREE","JSON_STORAGE_SIZE","JSON_TABLE","JSON_TYPE","JSON_UNQUOTE","JSON_VALID","JSON_VALUE","LAG","LAST_DAY","LAST_INSERT_ID","LAST_VALUE","LCASE","LEAD","LEAST","LEFT","LENGTH","LIKE","LINESTRING","LN","LOAD_FILE","LOCALTIME","LOCALTIMESTAMP","LOCATE","LOG","LOG10","LOG2","LOWER","LPAD","LTRIM","MAKE_SET","MAKEDATE","MAKETIME","MASTER_POS_WAIT","MATCH","MAX","MBRCONTAINS","MBRCOVEREDBY","MBRCOVERS","MBRDISJOINT","MBREQUALS","MBRINTERSECTS","MBROVERLAPS","MBRTOUCHES","MBRWITHIN","MD5","MEMBER OF","MICROSECOND","MID","MIN","MINUTE","MOD","MONTH","MONTHNAME","MULTILINESTRING","MULTIPOINT","MULTIPOLYGON","NAME_CONST","NOT","NOT IN","NOT LIKE","NOT REGEXP","NOW","NTH_VALUE","NTILE","NULLIF","OCT","OCTET_LENGTH","ORD","PERCENT_RANK","PERIOD_ADD","PERIOD_DIFF","PI","POINT","POLYGON","POSITION","POW","POWER","PS_CURRENT_THREAD_ID","PS_THREAD_ID","QUARTER","QUOTE","RADIANS","RAND","RANDOM_BYTES","RANK","REGEXP","REGEXP_INSTR","REGEXP_LIKE","REGEXP_REPLACE","REGEXP_SUBSTR","RELEASE_ALL_LOCKS","RELEASE_LOCK","REPEAT","REPLACE","REVERSE","RIGHT","RLIKE","ROLES_GRAPHML","ROUND","ROW_COUNT","ROW_NUMBER","RPAD","RTRIM","SCHEMA","SEC_TO_TIME","SECOND","SESSION_USER","SHA1","SHA2","SIGN","SIN","SLEEP","SOUNDEX","SOUNDS LIKE","SOURCE_POS_WAIT","SPACE","SQRT","ST_AREA","ST_ASBINARY","ST_ASGEOJSON","ST_ASTEXT","ST_BUFFER","ST_BUFFER_STRATEGY","ST_CENTROID","ST_COLLECT","ST_CONTAINS","ST_CONVEXHULL","ST_CROSSES","ST_DIFFERENCE","ST_DIMENSION","ST_DISJOINT","ST_DISTANCE","ST_DISTANCE_SPHERE","ST_ENDPOINT","ST_ENVELOPE","ST_EQUALS","ST_EXTERIORRING","ST_FRECHETDISTANCE","ST_GEOHASH","ST_GEOMCOLLFROMTEXT","ST_GEOMCOLLFROMWKB","ST_GEOMETRYN","ST_GEOMETRYTYPE","ST_GEOMFROMGEOJSON","ST_GEOMFROMTEXT","ST_GEOMFROMWKB","ST_HAUSDORFFDISTANCE","ST_INTERIORRINGN","ST_INTERSECTION","ST_INTERSECTS","ST_ISCLOSED","ST_ISEMPTY","ST_ISSIMPLE","ST_ISVALID","ST_LATFROMGEOHASH","ST_LATITUDE","ST_LENGTH","ST_LINEFROMTEXT","ST_LINEFROMWKB","ST_LINEINTERPOLATEPOINT","ST_LINEINTERPOLATEPOINTS","ST_LONGFROMGEOHASH","ST_LONGITUDE","ST_MAKEENVELOPE","ST_MLINEFROMTEXT","ST_MLINEFROMWKB","ST_MPOINTFROMTEXT","ST_MPOINTFROMWKB","ST_MPOLYFROMTEXT","ST_MPOLYFROMWKB","ST_NUMGEOMETRIES","ST_NUMINTERIORRING","ST_NUMPOINTS","ST_OVERLAPS","ST_POINTATDISTANCE","ST_POINTFROMGEOHASH","ST_POINTFROMTEXT","ST_POINTFROMWKB","ST_POINTN","ST_POLYFROMTEXT","ST_POLYFROMWKB","ST_SIMPLIFY","ST_SRID","ST_STARTPOINT","ST_SWAPXY","ST_SYMDIFFERENCE","ST_TOUCHES","ST_TRANSFORM","ST_UNION","ST_VALIDATE","ST_WITHIN","ST_X","ST_Y","STATEMENT_DIGEST","STATEMENT_DIGEST_TEXT","STD","STDDEV","STDDEV_POP","STDDEV_SAMP","STR_TO_DATE","STRCMP","SUBDATE","SUBSTR","SUBSTRING","SUBSTRING_INDEX","SUBTIME","SUM","SYSDATE","SYSTEM_USER","TAN","TIME","TIME_FORMAT","TIME_TO_SEC","TIMEDIFF","TIMESTAMP","TIMESTAMPADD","TIMESTAMPDIFF","TO_BASE64","TO_DAYS","TO_SECONDS","TRIM","TRUNCATE","UCASE","UNCOMPRESS","UNCOMPRESSED_LENGTH","UNHEX","UNIX_TIMESTAMP","UPDATEXML","UPPER","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","UUID","UUID_SHORT","UUID_TO_BIN","VALIDATE_PASSWORD_STRENGTH","VALUES","VAR_POP","VAR_SAMP","VARIANCE","VERSION","WAIT_FOR_EXECUTED_GTID_SET","WAIT_UNTIL_SQL_THREAD_AFTER_GTIDS","WEEK","WEEKDAY","WEEKOFYEAR","WEIGHT_STRING","YEAR","YEARWEEK"]},9510:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ACCESSIBLE","ADD","ALL","ALTER","ANALYZE","AND","AS","ASC","ASENSITIVE","BEFORE","BETWEEN","BOTH","BY","CALL","CASCADE","CASE","CHANGE","CHECK","COLLATE","COLUMN","CONDITION","CONSTRAINT","CONTINUE","CONVERT","CREATE","CROSS","CUBE","CUME_DIST","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURSOR","DATABASE","DATABASES","DAY_HOUR","DAY_MICROSECOND","DAY_MINUTE","DAY_SECOND","DECLARE","DEFAULT","DELAYED","DELETE","DENSE_RANK","DESC","DESCRIBE","DETERMINISTIC","DISTINCT","DISTINCTROW","DIV","DROP","DUAL","EACH","ELSE","ELSEIF","EMPTY","ENCLOSED","ESCAPED","EXCEPT","EXISTS","EXIT","EXPLAIN","FALSE","FETCH","FIRST_VALUE","FOR","FORCE","FOREIGN","FROM","FULLTEXT","FUNCTION","GENERATED","GET","GRANT","GROUP","GROUPING","GROUPS","HAVING","HIGH_PRIORITY","HOUR_MICROSECOND","HOUR_MINUTE","HOUR_SECOND","IF","IGNORE","IN","INDEX","INFILE","INNER","INOUT","INSENSITIVE","INSERT","IN","INTERSECT","INTERVAL","INTO","IO_AFTER_GTIDS","IO_BEFORE_GTIDS","IS","ITERATE","JOIN","JSON_TABLE","KEY","KEYS","KILL","LAG","LAST_VALUE","LATERAL","LEAD","LEADING","LEAVE","LEFT","LIKE","LIMIT","LINEAR","LINES","LOAD","LOCALTIME","LOCALTIMESTAMP","LOCK","LONG","LOOP","LOW_PRIORITY","MASTER_BIND","MASTER_SSL_VERIFY_SERVER_CERT","MATCH","MAXVALUE","MINUTE_MICROSECOND","MINUTE_SECOND","MOD","MODIFIES","NATURAL","NOT","NO_WRITE_TO_BINLOG","NTH_VALUE","NTILE","NULL","OF","ON","OPTIMIZE","OPTIMIZER_COSTS","OPTION","OPTIONALLY","OR","ORDER","OUT","OUTER","OUTFILE","OVER","PARTITION","PERCENT_RANK","PRIMARY","PROCEDURE","PURGE","RANGE","RANK","READ","READS","READ_WRITE","RECURSIVE","REFERENCES","REGEXP","RELEASE","RENAME","REPEAT","REPLACE","REQUIRE","RESIGNAL","RESTRICT","RETURN","REVOKE","RIGHT","RLIKE","ROW","ROWS","ROW_NUMBER","SCHEMA","SCHEMAS","SECOND_MICROSECOND","SELECT","SENSITIVE","SEPARATOR","SET","SHOW","SIGNAL","SPATIAL","SPECIFIC","SQL","SQLEXCEPTION","SQLSTATE","SQLWARNING","SQL_BIG_RESULT","SQL_CALC_FOUND_ROWS","SQL_SMALL_RESULT","SSL","STARTING","STORED","STRAIGHT_JOIN","SYSTEM","TABLE","TERMINATED","THEN","TO","TRAILING","TRIGGER","TRUE","UNDO","UNION","UNIQUE","UNLOCK","UNSIGNED","UPDATE","USAGE","USE","USING","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","VALUES","VIRTUAL","WHEN","WHERE","WHILE","WINDOW","WITH","WRITE","XOR","YEAR_MONTH","ZEROFILL"],T.dataTypes=["BIGINT","BINARY","BIT","BLOB","BOOL","BOOLEAN","CHAR","CHARACTER","DATE","DATETIME","DEC","DECIMAL","DOUBLE PRECISION","DOUBLE","ENUM","FIXED","FLOAT","FLOAT4","FLOAT8","INT","INT1","INT2","INT3","INT4","INT8","INTEGER","LONGBLOB","LONGTEXT","MEDIUMBLOB","MEDIUMINT","MEDIUMTEXT","MIDDLEINT","NATIONAL CHAR","NATIONAL VARCHAR","NUMERIC","PRECISION","REAL","SMALLINT","TEXT","TIME","TIMESTAMP","TINYBLOB","TINYINT","TINYTEXT","VARBINARY","VARCHAR","VARCHARACTER","VARYING","YEAR"]},7328:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.n1ql=void 0;const A=R(7163),e=R(7227),S=R(5720),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","INSERT INTO","VALUES","SET","MERGE INTO","WHEN [NOT] MATCHED THEN","UPDATE SET","INSERT","NEST","UNNEST","RETURNING"]),N=(0,A.expandPhrases)(["UPDATE","DELETE FROM","SET SCHEMA","ADVISE","ALTER INDEX","BEGIN TRANSACTION","BUILD INDEX","COMMIT TRANSACTION","CREATE COLLECTION","CREATE FUNCTION","CREATE INDEX","CREATE PRIMARY INDEX","CREATE SCOPE","DROP COLLECTION","DROP FUNCTION","DROP INDEX","DROP PRIMARY INDEX","DROP SCOPE","EXECUTE","EXECUTE FUNCTION","EXPLAIN","GRANT","INFER","PREPARE","REVOKE","ROLLBACK TRANSACTION","SAVEPOINT","SET TRANSACTION","UPDATE STATISTICS","UPSERT","LET","SET CURRENT SCHEMA","SHOW","USE [PRIMARY] KEYS"]),t=(0,A.expandPhrases)(["UNION [ALL]","EXCEPT [ALL]","INTERSECT [ALL]"]),L=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT} [OUTER] JOIN","INNER JOIN"]),C=(0,A.expandPhrases)(["{ROWS | RANGE | GROUPS} BETWEEN"]),_=(0,A.expandPhrases)([]);T.n1ql={name:"n1ql",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N],reservedSetOperations:t,reservedJoins:L,reservedKeywordPhrases:C,reservedDataTypePhrases:_,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,stringTypes:['""-bs',"''-bs"],identTypes:["``"],extraParens:["[]","{}"],paramTypes:{positional:!0,numbered:["$"],named:["$"]},lineCommentTypes:["#","--"],operators:["%","==",":","||"]},formatOptions:{onelineClauses:N}}},7227:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABORT","ABS","ACOS","ADVISOR","ARRAY_AGG","ARRAY_AGG","ARRAY_APPEND","ARRAY_AVG","ARRAY_BINARY_SEARCH","ARRAY_CONCAT","ARRAY_CONTAINS","ARRAY_COUNT","ARRAY_DISTINCT","ARRAY_EXCEPT","ARRAY_FLATTEN","ARRAY_IFNULL","ARRAY_INSERT","ARRAY_INTERSECT","ARRAY_LENGTH","ARRAY_MAX","ARRAY_MIN","ARRAY_MOVE","ARRAY_POSITION","ARRAY_PREPEND","ARRAY_PUT","ARRAY_RANGE","ARRAY_REMOVE","ARRAY_REPEAT","ARRAY_REPLACE","ARRAY_REVERSE","ARRAY_SORT","ARRAY_STAR","ARRAY_SUM","ARRAY_SYMDIFF","ARRAY_SYMDIFF1","ARRAY_SYMDIFFN","ARRAY_UNION","ASIN","ATAN","ATAN2","AVG","BASE64","BASE64_DECODE","BASE64_ENCODE","BITAND ","BITCLEAR ","BITNOT ","BITOR ","BITSET ","BITSHIFT ","BITTEST ","BITXOR ","CEIL","CLOCK_LOCAL","CLOCK_MILLIS","CLOCK_STR","CLOCK_TZ","CLOCK_UTC","COALESCE","CONCAT","CONCAT2","CONTAINS","CONTAINS_TOKEN","CONTAINS_TOKEN_LIKE","CONTAINS_TOKEN_REGEXP","COS","COUNT","COUNT","COUNTN","CUME_DIST","CURL","DATE_ADD_MILLIS","DATE_ADD_STR","DATE_DIFF_MILLIS","DATE_DIFF_STR","DATE_FORMAT_STR","DATE_PART_MILLIS","DATE_PART_STR","DATE_RANGE_MILLIS","DATE_RANGE_STR","DATE_TRUNC_MILLIS","DATE_TRUNC_STR","DECODE","DECODE_JSON","DEGREES","DENSE_RANK","DURATION_TO_STR","ENCODED_SIZE","ENCODE_JSON","EXP","FIRST_VALUE","FLOOR","GREATEST","HAS_TOKEN","IFINF","IFMISSING","IFMISSINGORNULL","IFNAN","IFNANORINF","IFNULL","INITCAP","ISARRAY","ISATOM","ISBITSET","ISBOOLEAN","ISNUMBER","ISOBJECT","ISSTRING","LAG","LAST_VALUE","LEAD","LEAST","LENGTH","LN","LOG","LOWER","LTRIM","MAX","MEAN","MEDIAN","META","MILLIS","MILLIS_TO_LOCAL","MILLIS_TO_STR","MILLIS_TO_TZ","MILLIS_TO_UTC","MILLIS_TO_ZONE_NAME","MIN","MISSINGIF","NANIF","NEGINFIF","NOW_LOCAL","NOW_MILLIS","NOW_STR","NOW_TZ","NOW_UTC","NTH_VALUE","NTILE","NULLIF","NVL","NVL2","OBJECT_ADD","OBJECT_CONCAT","OBJECT_INNER_PAIRS","OBJECT_INNER_VALUES","OBJECT_LENGTH","OBJECT_NAMES","OBJECT_PAIRS","OBJECT_PUT","OBJECT_REMOVE","OBJECT_RENAME","OBJECT_REPLACE","OBJECT_UNWRAP","OBJECT_VALUES","PAIRS","PERCENT_RANK","PI","POLY_LENGTH","POSINFIF","POSITION","POWER","RADIANS","RANDOM","RANK","RATIO_TO_REPORT","REGEXP_CONTAINS","REGEXP_LIKE","REGEXP_MATCHES","REGEXP_POSITION","REGEXP_REPLACE","REGEXP_SPLIT","REGEX_CONTAINS","REGEX_LIKE","REGEX_MATCHES","REGEX_POSITION","REGEX_REPLACE","REGEX_SPLIT","REPEAT","REPLACE","REVERSE","ROUND","ROW_NUMBER","RTRIM","SEARCH","SEARCH_META","SEARCH_SCORE","SIGN","SIN","SPLIT","SQRT","STDDEV","STDDEV_POP","STDDEV_SAMP","STR_TO_DURATION","STR_TO_MILLIS","STR_TO_TZ","STR_TO_UTC","STR_TO_ZONE_NAME","SUBSTR","SUFFIXES","SUM","TAN","TITLE","TOARRAY","TOATOM","TOBOOLEAN","TOKENS","TOKENS","TONUMBER","TOOBJECT","TOSTRING","TRIM","TRUNC","UPPER","UUID","VARIANCE","VARIANCE_POP","VARIANCE_SAMP","VAR_POP","VAR_SAMP","WEEKDAY_MILLIS","WEEKDAY_STR","CAST"]},5720:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADVISE","ALL","ALTER","ANALYZE","AND","ANY","ARRAY","AS","ASC","AT","BEGIN","BETWEEN","BINARY","BOOLEAN","BREAK","BUCKET","BUILD","BY","CALL","CASE","CAST","CLUSTER","COLLATE","COLLECTION","COMMIT","COMMITTED","CONNECT","CONTINUE","CORRELATED","COVER","CREATE","CURRENT","DATABASE","DATASET","DATASTORE","DECLARE","DECREMENT","DELETE","DERIVED","DESC","DESCRIBE","DISTINCT","DO","DROP","EACH","ELEMENT","ELSE","END","EVERY","EXCEPT","EXCLUDE","EXECUTE","EXISTS","EXPLAIN","FALSE","FETCH","FILTER","FIRST","FLATTEN","FLUSH","FOLLOWING","FOR","FORCE","FROM","FTS","FUNCTION","GOLANG","GRANT","GROUP","GROUPS","GSI","HASH","HAVING","IF","IGNORE","ILIKE","IN","INCLUDE","INCREMENT","INDEX","INFER","INLINE","INNER","INSERT","INTERSECT","INTO","IS","ISOLATION","JAVASCRIPT","JOIN","KEY","KEYS","KEYSPACE","KNOWN","LANGUAGE","LAST","LEFT","LET","LETTING","LEVEL","LIKE","LIMIT","LSM","MAP","MAPPING","MATCHED","MATERIALIZED","MERGE","MINUS","MISSING","NAMESPACE","NEST","NL","NO","NOT","NTH_VALUE","NULL","NULLS","NUMBER","OBJECT","OFFSET","ON","OPTION","OPTIONS","OR","ORDER","OTHERS","OUTER","OVER","PARSE","PARTITION","PASSWORD","PATH","POOL","PRECEDING","PREPARE","PRIMARY","PRIVATE","PRIVILEGE","PROBE","PROCEDURE","PUBLIC","RANGE","RAW","REALM","REDUCE","RENAME","RESPECT","RETURN","RETURNING","REVOKE","RIGHT","ROLE","ROLLBACK","ROW","ROWS","SATISFIES","SAVEPOINT","SCHEMA","SCOPE","SELECT","SELF","SEMI","SET","SHOW","SOME","START","STATISTICS","STRING","SYSTEM","THEN","TIES","TO","TRAN","TRANSACTION","TRIGGER","TRUE","TRUNCATE","UNBOUNDED","UNDER","UNION","UNIQUE","UNKNOWN","UNNEST","UNSET","UPDATE","UPSERT","USE","USER","USING","VALIDATE","VALUE","VALUED","VALUES","VIA","VIEW","WHEN","WHERE","WHILE","WINDOW","WITH","WITHIN","WORK","XOR"],T.dataTypes=[]},6910:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.plsql=void 0;const A=R(7163),e=R(2437),S=R(1670),I=R(4937),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT | UNIQUE]"]),N=(0,A.expandPhrases)(["WITH","FROM","WHERE","GROUP BY","HAVING","PARTITION BY","ORDER [SIBLINGS] BY","OFFSET","FETCH {FIRST | NEXT}","FOR UPDATE [OF]","INSERT [INTO | ALL INTO]","VALUES","SET","MERGE [INTO]","WHEN [NOT] MATCHED [THEN]","UPDATE SET","RETURNING"]),t=(0,A.expandPhrases)(["CREATE [GLOBAL TEMPORARY | PRIVATE TEMPORARY | SHARDED | DUPLICATED | IMMUTABLE BLOCKCHAIN | BLOCKCHAIN | IMMUTABLE] TABLE"]),L=(0,A.expandPhrases)(["CREATE [OR REPLACE] [NO FORCE | FORCE] [EDITIONING | EDITIONABLE | EDITIONABLE EDITIONING | NONEDITIONABLE] VIEW","CREATE MATERIALIZED VIEW","UPDATE [ONLY]","DELETE FROM [ONLY]","DROP TABLE","ALTER TABLE","ADD","DROP {COLUMN | UNUSED COLUMNS | COLUMNS CONTINUE}","MODIFY","RENAME TO","RENAME COLUMN","TRUNCATE TABLE","SET SCHEMA","BEGIN","CONNECT BY","DECLARE","EXCEPT","EXCEPTION","LOOP","START WITH"]),C=(0,A.expandPhrases)(["UNION [ALL]","MINUS","INTERSECT"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN","{CROSS | OUTER} APPLY"]),s=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL]","ON COMMIT","{ROWS | RANGE} BETWEEN"]),r=(0,A.expandPhrases)([]);T.plsql={name:"plsql",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:I.functions,stringTypes:[{quote:"''-qq",prefixes:["N"]},{quote:"q''",prefixes:["N"]}],identTypes:['""-qq'],identChars:{rest:"$#"},variableTypes:[{regex:"&{1,2}[A-Za-z][A-Za-z0-9_$#]*"}],paramTypes:{numbered:[":"],named:[":"]},operators:["**",":=","%","~=","^=",">>","<<","=>","@","||"],postProcess:function(E){let T=e.EOF_TOKEN;return E.map((E=>e.isToken.SET(E)&&e.isToken.BY(T)?Object.assign(Object.assign({},E),{type:e.TokenType.RESERVED_KEYWORD}):((0,e.isReserved)(E.type)&&(T=E),E)))}},formatOptions:{alwaysDenseOperators:["@"],onelineClauses:[...t,...L],tabularOnelineClauses:L}}},4937:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ASIN","ATAN","ATAN2","BITAND","CEIL","COS","COSH","EXP","FLOOR","LN","LOG","MOD","NANVL","POWER","REMAINDER","ROUND","SIGN","SIN","SINH","SQRT","TAN","TANH","TRUNC","WIDTH_BUCKET","CHR","CONCAT","INITCAP","LOWER","LPAD","LTRIM","NLS_INITCAP","NLS_LOWER","NLSSORT","NLS_UPPER","REGEXP_REPLACE","REGEXP_SUBSTR","REPLACE","RPAD","RTRIM","SOUNDEX","SUBSTR","TRANSLATE","TREAT","TRIM","UPPER","NLS_CHARSET_DECL_LEN","NLS_CHARSET_ID","NLS_CHARSET_NAME","ASCII","INSTR","LENGTH","REGEXP_INSTR","ADD_MONTHS","CURRENT_DATE","CURRENT_TIMESTAMP","DBTIMEZONE","EXTRACT","FROM_TZ","LAST_DAY","LOCALTIMESTAMP","MONTHS_BETWEEN","NEW_TIME","NEXT_DAY","NUMTODSINTERVAL","NUMTOYMINTERVAL","ROUND","SESSIONTIMEZONE","SYS_EXTRACT_UTC","SYSDATE","SYSTIMESTAMP","TO_CHAR","TO_TIMESTAMP","TO_TIMESTAMP_TZ","TO_DSINTERVAL","TO_YMINTERVAL","TRUNC","TZ_OFFSET","GREATEST","LEAST","ASCIISTR","BIN_TO_NUM","CAST","CHARTOROWID","COMPOSE","CONVERT","DECOMPOSE","HEXTORAW","NUMTODSINTERVAL","NUMTOYMINTERVAL","RAWTOHEX","RAWTONHEX","ROWIDTOCHAR","ROWIDTONCHAR","SCN_TO_TIMESTAMP","TIMESTAMP_TO_SCN","TO_BINARY_DOUBLE","TO_BINARY_FLOAT","TO_CHAR","TO_CLOB","TO_DATE","TO_DSINTERVAL","TO_LOB","TO_MULTI_BYTE","TO_NCHAR","TO_NCLOB","TO_NUMBER","TO_DSINTERVAL","TO_SINGLE_BYTE","TO_TIMESTAMP","TO_TIMESTAMP_TZ","TO_YMINTERVAL","TO_YMINTERVAL","TRANSLATE","UNISTR","BFILENAME","EMPTY_BLOB,","EMPTY_CLOB","CARDINALITY","COLLECT","POWERMULTISET","POWERMULTISET_BY_CARDINALITY","SET","SYS_CONNECT_BY_PATH","CLUSTER_ID","CLUSTER_PROBABILITY","CLUSTER_SET","FEATURE_ID","FEATURE_SET","FEATURE_VALUE","PREDICTION","PREDICTION_COST","PREDICTION_DETAILS","PREDICTION_PROBABILITY","PREDICTION_SET","APPENDCHILDXML","DELETEXML","DEPTH","EXTRACT","EXISTSNODE","EXTRACTVALUE","INSERTCHILDXML","INSERTXMLBEFORE","PATH","SYS_DBURIGEN","SYS_XMLAGG","SYS_XMLGEN","UPDATEXML","XMLAGG","XMLCDATA","XMLCOLATTVAL","XMLCOMMENT","XMLCONCAT","XMLFOREST","XMLPARSE","XMLPI","XMLQUERY","XMLROOT","XMLSEQUENCE","XMLSERIALIZE","XMLTABLE","XMLTRANSFORM","DECODE","DUMP","ORA_HASH","VSIZE","COALESCE","LNNVL","NULLIF","NVL","NVL2","SYS_CONTEXT","SYS_GUID","SYS_TYPEID","UID","USER","USERENV","AVG","COLLECT","CORR","CORR_S","CORR_K","COUNT","COVAR_POP","COVAR_SAMP","CUME_DIST","DENSE_RANK","FIRST","GROUP_ID","GROUPING","GROUPING_ID","LAST","MAX","MEDIAN","MIN","PERCENTILE_CONT","PERCENTILE_DISC","PERCENT_RANK","RANK","REGR_SLOPE","REGR_INTERCEPT","REGR_COUNT","REGR_R2","REGR_AVGX","REGR_AVGY","REGR_SXX","REGR_SYY","REGR_SXY","STATS_BINOMIAL_TEST","STATS_CROSSTAB","STATS_F_TEST","STATS_KS_TEST","STATS_MODE","STATS_MW_TEST","STATS_ONE_WAY_ANOVA","STATS_T_TEST_ONE","STATS_T_TEST_PAIRED","STATS_T_TEST_INDEP","STATS_T_TEST_INDEPU","STATS_WSR_TEST","STDDEV","STDDEV_POP","STDDEV_SAMP","SUM","VAR_POP","VAR_SAMP","VARIANCE","FIRST_VALUE","LAG","LAST_VALUE","LEAD","NTILE","RATIO_TO_REPORT","ROW_NUMBER","DEREF","MAKE_REF","REF","REFTOHEX","VALUE","CV","ITERATION_NUMBER","PRESENTNNV","PRESENTV","PREVIOUS"]},1670:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADD","AGENT","AGGREGATE","ALL","ALTER","AND","ANY","ARROW","AS","ASC","AT","ATTRIBUTE","AUTHID","AVG","BEGIN","BETWEEN","BLOCK","BODY","BOTH","BOUND","BULK","BY","BYTE","CALL","CALLING","CASCADE","CASE","CHARSET","CHARSETFORM","CHARSETID","CHECK","CLOSE","CLUSTER","CLUSTERS","COLAUTH","COLLECT","COLUMNS","COMMENT","COMMIT","COMMITTED","COMPILED","COMPRESS","CONNECT","CONSTANT","CONSTRUCTOR","CONTEXT","CONVERT","COUNT","CRASH","CREATE","CURRENT","CURSOR","CUSTOMDATUM","DANGLING","DATA","DAY","DECLARE","DEFAULT","DEFINE","DELETE","DESC","DETERMINISTIC","DISTINCT","DROP","DURATION","ELEMENT","ELSE","ELSIF","EMPTY","END","ESCAPE","EXCEPT","EXCEPTION","EXCEPTIONS","EXCLUSIVE","EXECUTE","EXISTS","EXIT","EXTERNAL","FETCH","FINAL","FIXED","FOR","FORALL","FORCE","FORM","FROM","FUNCTION","GENERAL","GOTO","GRANT","GROUP","HASH","HAVING","HEAP","HIDDEN","HOUR","IDENTIFIED","IF","IMMEDIATE","IN","INCLUDING","INDEX","INDEXES","INDICATOR","INDICES","INFINITE","INSERT","INSTANTIABLE","INTERFACE","INTERSECT","INTERVAL","INTO","INVALIDATE","IS","ISOLATION","JAVA","LANGUAGE","LARGE","LEADING","LENGTH","LEVEL","LIBRARY","LIKE","LIKE2","LIKE4","LIKEC","LIMIT","LIMITED","LOCAL","LOCK","LOOP","MAP","MAX","MAXLEN","MEMBER","MERGE","MIN","MINUS","MINUTE","MOD","MODE","MODIFY","MONTH","MULTISET","NAME","NAN","NATIONAL","NATIVE","NEW","NOCOMPRESS","NOCOPY","NOT","NOWAIT","NULL","OBJECT","OCICOLL","OCIDATE","OCIDATETIME","OCIDURATION","OCIINTERVAL","OCILOBLOCATOR","OCINUMBER","OCIRAW","OCIREF","OCIREFCURSOR","OCIROWID","OCISTRING","OCITYPE","OF","ON","ONLY","OPAQUE","OPEN","OPERATOR","OPTION","OR","ORACLE","ORADATA","ORDER","OVERLAPS","ORGANIZATION","ORLANY","ORLVARY","OTHERS","OUT","OVERRIDING","PACKAGE","PARALLEL_ENABLE","PARAMETER","PARAMETERS","PARTITION","PASCAL","PIPE","PIPELINED","PRAGMA","PRIOR","PRIVATE","PROCEDURE","PUBLIC","RAISE","RANGE","READ","RECORD","REF","REFERENCE","REM","REMAINDER","RENAME","RESOURCE","RESULT","RETURN","RETURNING","REVERSE","REVOKE","ROLLBACK","ROW","SAMPLE","SAVE","SAVEPOINT","SB1","SB2","SB4","SECOND","SEGMENT","SELECT","SELF","SEPARATE","SEQUENCE","SERIALIZABLE","SET","SHARE","SHORT","SIZE","SIZE_T","SOME","SPARSE","SQL","SQLCODE","SQLDATA","SQLNAME","SQLSTATE","STANDARD","START","STATIC","STDDEV","STORED","STRING","STRUCT","STYLE","SUBMULTISET","SUBPARTITION","SUBSTITUTABLE","SUBTYPE","SUM","SYNONYM","TABAUTH","TABLE","TDO","THE","THEN","TIME","TIMEZONE_ABBR","TIMEZONE_HOUR","TIMEZONE_MINUTE","TIMEZONE_REGION","TO","TRAILING","TRANSAC","TRANSACTIONAL","TRUSTED","TYPE","UB1","UB2","UB4","UNDER","UNION","UNIQUE","UNSIGNED","UNTRUSTED","UPDATE","USE","USING","VALIST","VALUE","VALUES","VARIABLE","VARIANCE","VARRAY","VIEW","VIEWS","VOID","WHEN","WHERE","WHILE","WITH","WORK","WRAPPED","WRITE","YEAR","ZONE"],T.dataTypes=["ARRAY","BFILE_BASE","BINARY","BLOB_BASE","CHAR VARYING","CHAR_BASE","CHAR","CHARACTER VARYING","CHARACTER","CLOB_BASE","DATE_BASE","DATE","DECIMAL","DOUBLE","FLOAT","INT","INTERVAL DAY","INTERVAL YEAR","LONG","NATIONAL CHAR VARYING","NATIONAL CHAR","NATIONAL CHARACTER VARYING","NATIONAL CHARACTER","NCHAR VARYING","NCHAR","NCHAR","NUMBER_BASE","NUMBER","NUMBERIC","NVARCHAR","PRECISION","RAW","TIMESTAMP","UROWID","VARCHAR","VARCHAR2"]},2912:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.postgresql=void 0;const A=R(7163),e=R(1435),S=R(9256),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY [ALL | DISTINCT]","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","FETCH {FIRST | NEXT}","FOR {UPDATE | NO KEY UPDATE | SHARE | KEY SHARE} [OF]","INSERT INTO","VALUES","DEFAULT VALUES","SET","RETURNING"]),N=(0,A.expandPhrases)(["CREATE [GLOBAL | LOCAL] [TEMPORARY | TEMP | UNLOGGED] TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [TEMP | TEMPORARY] [RECURSIVE] VIEW","CREATE [MATERIALIZED] VIEW [IF NOT EXISTS]","UPDATE [ONLY]","WHERE CURRENT OF","ON CONFLICT","DELETE FROM [ONLY]","DROP TABLE [IF EXISTS]","ALTER TABLE [IF EXISTS] [ONLY]","ALTER TABLE ALL IN TABLESPACE","RENAME [COLUMN]","RENAME TO","ADD [COLUMN] [IF NOT EXISTS]","DROP [COLUMN] [IF EXISTS]","ALTER [COLUMN]","SET DATA TYPE","{SET | DROP} DEFAULT","{SET | DROP} NOT NULL","TRUNCATE [TABLE] [ONLY]","SET SCHEMA","AFTER","ABORT","ALTER AGGREGATE","ALTER COLLATION","ALTER CONVERSION","ALTER DATABASE","ALTER DEFAULT PRIVILEGES","ALTER DOMAIN","ALTER EVENT TRIGGER","ALTER EXTENSION","ALTER FOREIGN DATA WRAPPER","ALTER FOREIGN TABLE","ALTER FUNCTION","ALTER GROUP","ALTER INDEX","ALTER LANGUAGE","ALTER LARGE OBJECT","ALTER MATERIALIZED VIEW","ALTER OPERATOR","ALTER OPERATOR CLASS","ALTER OPERATOR FAMILY","ALTER POLICY","ALTER PROCEDURE","ALTER PUBLICATION","ALTER ROLE","ALTER ROUTINE","ALTER RULE","ALTER SCHEMA","ALTER SEQUENCE","ALTER SERVER","ALTER STATISTICS","ALTER SUBSCRIPTION","ALTER SYSTEM","ALTER TABLESPACE","ALTER TEXT SEARCH CONFIGURATION","ALTER TEXT SEARCH DICTIONARY","ALTER TEXT SEARCH PARSER","ALTER TEXT SEARCH TEMPLATE","ALTER TRIGGER","ALTER TYPE","ALTER USER","ALTER USER MAPPING","ALTER VIEW","ANALYZE","BEGIN","CALL","CHECKPOINT","CLOSE","CLUSTER","COMMENT ON","COMMIT","COMMIT PREPARED","COPY","CREATE ACCESS METHOD","CREATE [OR REPLACE] AGGREGATE","CREATE CAST","CREATE COLLATION","CREATE [DEFAULT] CONVERSION","CREATE DATABASE","CREATE DOMAIN","CREATE EVENT TRIGGER","CREATE EXTENSION","CREATE FOREIGN DATA WRAPPER","CREATE FOREIGN TABLE","CREATE [OR REPLACE] FUNCTION","CREATE GROUP","CREATE [UNIQUE] INDEX","CREATE [OR REPLACE] [TRUSTED] [PROCEDURAL] LANGUAGE","CREATE OPERATOR","CREATE OPERATOR CLASS","CREATE OPERATOR FAMILY","CREATE POLICY","CREATE [OR REPLACE] PROCEDURE","CREATE PUBLICATION","CREATE ROLE","CREATE [OR REPLACE] RULE","CREATE SCHEMA [AUTHORIZATION]","CREATE [TEMPORARY | TEMP | UNLOGGED] SEQUENCE","CREATE SERVER","CREATE STATISTICS","CREATE SUBSCRIPTION","CREATE TABLESPACE","CREATE TEXT SEARCH CONFIGURATION","CREATE TEXT SEARCH DICTIONARY","CREATE TEXT SEARCH PARSER","CREATE TEXT SEARCH TEMPLATE","CREATE [OR REPLACE] TRANSFORM","CREATE [OR REPLACE] [CONSTRAINT] TRIGGER","CREATE TYPE","CREATE USER","CREATE USER MAPPING","DEALLOCATE","DECLARE","DISCARD","DROP ACCESS METHOD","DROP AGGREGATE","DROP CAST","DROP COLLATION","DROP CONVERSION","DROP DATABASE","DROP DOMAIN","DROP EVENT TRIGGER","DROP EXTENSION","DROP FOREIGN DATA WRAPPER","DROP FOREIGN TABLE","DROP FUNCTION","DROP GROUP","DROP IDENTITY","DROP INDEX","DROP LANGUAGE","DROP MATERIALIZED VIEW [IF EXISTS]","DROP OPERATOR","DROP OPERATOR CLASS","DROP OPERATOR FAMILY","DROP OWNED","DROP POLICY","DROP PROCEDURE","DROP PUBLICATION","DROP ROLE","DROP ROUTINE","DROP RULE","DROP SCHEMA","DROP SEQUENCE","DROP SERVER","DROP STATISTICS","DROP SUBSCRIPTION","DROP TABLESPACE","DROP TEXT SEARCH CONFIGURATION","DROP TEXT SEARCH DICTIONARY","DROP TEXT SEARCH PARSER","DROP TEXT SEARCH TEMPLATE","DROP TRANSFORM","DROP TRIGGER","DROP TYPE","DROP USER","DROP USER MAPPING","DROP VIEW","EXECUTE","EXPLAIN","FETCH","GRANT","IMPORT FOREIGN SCHEMA","LISTEN","LOAD","LOCK","MOVE","NOTIFY","OVERRIDING SYSTEM VALUE","PREPARE","PREPARE TRANSACTION","REASSIGN OWNED","REFRESH MATERIALIZED VIEW","REINDEX","RELEASE SAVEPOINT","RESET [ALL|ROLE|SESSION AUTHORIZATION]","REVOKE","ROLLBACK","ROLLBACK PREPARED","ROLLBACK TO SAVEPOINT","SAVEPOINT","SECURITY LABEL","SELECT INTO","SET CONSTRAINTS","SET ROLE","SET SESSION AUTHORIZATION","SET TRANSACTION","SHOW","START TRANSACTION","UNLISTEN","VACUUM"]),L=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","EXCEPT [ALL | DISTINCT]","INTERSECT [ALL | DISTINCT]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN"]),_=(0,A.expandPhrases)(["PRIMARY KEY","GENERATED {ALWAYS | BY DEFAULT} AS IDENTITY","ON {UPDATE | DELETE} [NO ACTION | RESTRICT | CASCADE | SET NULL | SET DEFAULT]","DO {NOTHING | UPDATE}","AS MATERIALIZED","{ROWS | RANGE | GROUPS} BETWEEN","IS [NOT] DISTINCT FROM","NULLS {FIRST | LAST}","WITH ORDINALITY"]),s=(0,A.expandPhrases)(["[TIMESTAMP | TIME] {WITH | WITHOUT} TIME ZONE"]);T.postgresql={name:"postgresql",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,nestedBlockComments:!0,extraParens:["[]"],underscoresInNumbers:!0,stringTypes:["$$",{quote:"''-qq",prefixes:["U&"]},{quote:"''-qq-bs",prefixes:["E"],requirePrefix:!0},{quote:"''-raw",prefixes:["B","X"],requirePrefix:!0}],identTypes:[{quote:'""-qq',prefixes:["U&"]}],identChars:{rest:"$"},paramTypes:{numbered:["$"]},operators:["%","^","|/","||/","@",":=","&","|","#","~","<<",">>","~>~","~<~","~>=~","~<=~","@-@","@@","##","<->","&&","&<","&>","<<|","&<|","|>>","|&>","<^","^>","?#","?-","?|","?-|","?||","@>","<@","~=","?","@?","?&","->","->>","#>","#>>","#-","=>",">>=","<<=","~~","~~*","!~~","!~~*","~","~*","!~","!~*","-|-","||","@@@","!!","^@","<%","%>","<<%","%>>","<<->","<->>","<<<->","<->>>","::",":","<#>","<=>","<+>","<~>","<%>"],operatorKeyword:!0},formatOptions:{alwaysDenseOperators:["::",":"],onelineClauses:[...N,...t],tabularOnelineClauses:t}}},1435:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ACOSD","ACOSH","ASIN","ASIND","ASINH","ATAN","ATAN2","ATAN2D","ATAND","ATANH","CBRT","CEIL","CEILING","COS","COSD","COSH","COT","COTD","DEGREES","DIV","EXP","FACTORIAL","FLOOR","GCD","LCM","LN","LOG","LOG10","MIN_SCALE","MOD","PI","POWER","RADIANS","RANDOM","ROUND","SCALE","SETSEED","SIGN","SIN","SIND","SINH","SQRT","TAN","TAND","TANH","TRIM_SCALE","TRUNC","WIDTH_BUCKET","ABS","ASCII","BIT_LENGTH","BTRIM","CHARACTER_LENGTH","CHAR_LENGTH","CHR","CONCAT","CONCAT_WS","FORMAT","INITCAP","LEFT","LENGTH","LOWER","LPAD","LTRIM","MD5","NORMALIZE","OCTET_LENGTH","OVERLAY","PARSE_IDENT","PG_CLIENT_ENCODING","POSITION","QUOTE_IDENT","QUOTE_LITERAL","QUOTE_NULLABLE","REGEXP_MATCH","REGEXP_MATCHES","REGEXP_REPLACE","REGEXP_SPLIT_TO_ARRAY","REGEXP_SPLIT_TO_TABLE","REPEAT","REPLACE","REVERSE","RIGHT","RPAD","RTRIM","SPLIT_PART","SPRINTF","STARTS_WITH","STRING_AGG","STRING_TO_ARRAY","STRING_TO_TABLE","STRPOS","SUBSTR","SUBSTRING","TO_ASCII","TO_HEX","TRANSLATE","TRIM","UNISTR","UPPER","BIT_COUNT","BIT_LENGTH","BTRIM","CONVERT","CONVERT_FROM","CONVERT_TO","DECODE","ENCODE","GET_BIT","GET_BYTE","LENGTH","LTRIM","MD5","OCTET_LENGTH","OVERLAY","POSITION","RTRIM","SET_BIT","SET_BYTE","SHA224","SHA256","SHA384","SHA512","STRING_AGG","SUBSTR","SUBSTRING","TRIM","BIT_COUNT","BIT_LENGTH","GET_BIT","LENGTH","OCTET_LENGTH","OVERLAY","POSITION","SET_BIT","SUBSTRING","REGEXP_MATCH","REGEXP_MATCHES","REGEXP_REPLACE","REGEXP_SPLIT_TO_ARRAY","REGEXP_SPLIT_TO_TABLE","TO_CHAR","TO_DATE","TO_NUMBER","TO_TIMESTAMP","CLOCK_TIMESTAMP","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","DATE_BIN","DATE_PART","DATE_TRUNC","EXTRACT","ISFINITE","JUSTIFY_DAYS","JUSTIFY_HOURS","JUSTIFY_INTERVAL","LOCALTIME","LOCALTIMESTAMP","MAKE_DATE","MAKE_INTERVAL","MAKE_TIME","MAKE_TIMESTAMP","MAKE_TIMESTAMPTZ","NOW","PG_SLEEP","PG_SLEEP_FOR","PG_SLEEP_UNTIL","STATEMENT_TIMESTAMP","TIMEOFDAY","TO_TIMESTAMP","TRANSACTION_TIMESTAMP","ENUM_FIRST","ENUM_LAST","ENUM_RANGE","AREA","BOUND_BOX","BOX","CENTER","CIRCLE","DIAGONAL","DIAMETER","HEIGHT","ISCLOSED","ISOPEN","LENGTH","LINE","LSEG","NPOINTS","PATH","PCLOSE","POINT","POLYGON","POPEN","RADIUS","SLOPE","WIDTH","ABBREV","BROADCAST","FAMILY","HOST","HOSTMASK","INET_MERGE","INET_SAME_FAMILY","MACADDR8_SET7BIT","MASKLEN","NETMASK","NETWORK","SET_MASKLEN","TRUNC","ARRAY_TO_TSVECTOR","GET_CURRENT_TS_CONFIG","JSONB_TO_TSVECTOR","JSON_TO_TSVECTOR","LENGTH","NUMNODE","PHRASETO_TSQUERY","PLAINTO_TSQUERY","QUERYTREE","SETWEIGHT","STRIP","TO_TSQUERY","TO_TSVECTOR","TSQUERY_PHRASE","TSVECTOR_TO_ARRAY","TS_DEBUG","TS_DELETE","TS_FILTER","TS_HEADLINE","TS_LEXIZE","TS_PARSE","TS_RANK","TS_RANK_CD","TS_REWRITE","TS_STAT","TS_TOKEN_TYPE","WEBSEARCH_TO_TSQUERY","GEN_RANDOM_UUID","UUIDV4","UUIDV7","UUID_EXTRACT_TIMESTAMP","UUID_EXTRACT_VERSION","CURSOR_TO_XML","CURSOR_TO_XMLSCHEMA","DATABASE_TO_XML","DATABASE_TO_XMLSCHEMA","DATABASE_TO_XML_AND_XMLSCHEMA","NEXTVAL","QUERY_TO_XML","QUERY_TO_XMLSCHEMA","QUERY_TO_XML_AND_XMLSCHEMA","SCHEMA_TO_XML","SCHEMA_TO_XMLSCHEMA","SCHEMA_TO_XML_AND_XMLSCHEMA","STRING","TABLE_TO_XML","TABLE_TO_XMLSCHEMA","TABLE_TO_XML_AND_XMLSCHEMA","XMLAGG","XMLCOMMENT","XMLCONCAT","XMLELEMENT","XMLEXISTS","XMLFOREST","XMLPARSE","XMLPI","XMLROOT","XMLSERIALIZE","XMLTABLE","XML_IS_WELL_FORMED","XML_IS_WELL_FORMED_CONTENT","XML_IS_WELL_FORMED_DOCUMENT","XPATH","XPATH_EXISTS","ARRAY_TO_JSON","JSONB_AGG","JSONB_ARRAY_ELEMENTS","JSONB_ARRAY_ELEMENTS_TEXT","JSONB_ARRAY_LENGTH","JSONB_BUILD_ARRAY","JSONB_BUILD_OBJECT","JSONB_EACH","JSONB_EACH_TEXT","JSONB_EXTRACT_PATH","JSONB_EXTRACT_PATH_TEXT","JSONB_INSERT","JSONB_OBJECT","JSONB_OBJECT_AGG","JSONB_OBJECT_KEYS","JSONB_PATH_EXISTS","JSONB_PATH_EXISTS_TZ","JSONB_PATH_MATCH","JSONB_PATH_MATCH_TZ","JSONB_PATH_QUERY","JSONB_PATH_QUERY_ARRAY","JSONB_PATH_QUERY_ARRAY_TZ","JSONB_PATH_QUERY_FIRST","JSONB_PATH_QUERY_FIRST_TZ","JSONB_PATH_QUERY_TZ","JSONB_POPULATE_RECORD","JSONB_POPULATE_RECORDSET","JSONB_PRETTY","JSONB_SET","JSONB_SET_LAX","JSONB_STRIP_NULLS","JSONB_TO_RECORD","JSONB_TO_RECORDSET","JSONB_TYPEOF","JSON_AGG","JSON_ARRAY_ELEMENTS","JSON_ARRAY_ELEMENTS_TEXT","JSON_ARRAY_LENGTH","JSON_BUILD_ARRAY","JSON_BUILD_OBJECT","JSON_EACH","JSON_EACH_TEXT","JSON_EXTRACT_PATH","JSON_EXTRACT_PATH_TEXT","JSON_OBJECT","JSON_OBJECT_AGG","JSON_OBJECT_KEYS","JSON_POPULATE_RECORD","JSON_POPULATE_RECORDSET","JSON_STRIP_NULLS","JSON_TO_RECORD","JSON_TO_RECORDSET","JSON_TYPEOF","ROW_TO_JSON","TO_JSON","TO_JSONB","TO_TIMESTAMP","CURRVAL","LASTVAL","NEXTVAL","SETVAL","COALESCE","GREATEST","LEAST","NULLIF","ARRAY_AGG","ARRAY_APPEND","ARRAY_CAT","ARRAY_DIMS","ARRAY_FILL","ARRAY_LENGTH","ARRAY_LOWER","ARRAY_NDIMS","ARRAY_POSITION","ARRAY_POSITIONS","ARRAY_PREPEND","ARRAY_REMOVE","ARRAY_REPLACE","ARRAY_TO_STRING","ARRAY_UPPER","CARDINALITY","STRING_TO_ARRAY","TRIM_ARRAY","UNNEST","ISEMPTY","LOWER","LOWER_INC","LOWER_INF","MULTIRANGE","RANGE_MERGE","UPPER","UPPER_INC","UPPER_INF","ARRAY_AGG","AVG","BIT_AND","BIT_OR","BIT_XOR","BOOL_AND","BOOL_OR","COALESCE","CORR","COUNT","COVAR_POP","COVAR_SAMP","CUME_DIST","DENSE_RANK","EVERY","GROUPING","JSONB_AGG","JSONB_OBJECT_AGG","JSON_AGG","JSON_OBJECT_AGG","MAX","MIN","MODE","PERCENTILE_CONT","PERCENTILE_DISC","PERCENT_RANK","RANGE_AGG","RANGE_INTERSECT_AGG","RANK","REGR_AVGX","REGR_AVGY","REGR_COUNT","REGR_INTERCEPT","REGR_R2","REGR_SLOPE","REGR_SXX","REGR_SXY","REGR_SYY","STDDEV","STDDEV_POP","STDDEV_SAMP","STRING_AGG","SUM","TO_JSON","TO_JSONB","VARIANCE","VAR_POP","VAR_SAMP","XMLAGG","CUME_DIST","DENSE_RANK","FIRST_VALUE","LAG","LAST_VALUE","LEAD","NTH_VALUE","NTILE","PERCENT_RANK","RANK","ROW_NUMBER","GENERATE_SERIES","GENERATE_SUBSCRIPTS","ACLDEFAULT","ACLEXPLODE","COL_DESCRIPTION","CURRENT_CATALOG","CURRENT_DATABASE","CURRENT_QUERY","CURRENT_ROLE","CURRENT_SCHEMA","CURRENT_SCHEMAS","CURRENT_USER","FORMAT_TYPE","HAS_ANY_COLUMN_PRIVILEGE","HAS_COLUMN_PRIVILEGE","HAS_DATABASE_PRIVILEGE","HAS_FOREIGN_DATA_WRAPPER_PRIVILEGE","HAS_FUNCTION_PRIVILEGE","HAS_LANGUAGE_PRIVILEGE","HAS_SCHEMA_PRIVILEGE","HAS_SEQUENCE_PRIVILEGE","HAS_SERVER_PRIVILEGE","HAS_TABLESPACE_PRIVILEGE","HAS_TABLE_PRIVILEGE","HAS_TYPE_PRIVILEGE","INET_CLIENT_ADDR","INET_CLIENT_PORT","INET_SERVER_ADDR","INET_SERVER_PORT","MAKEACLITEM","OBJ_DESCRIPTION","PG_BACKEND_PID","PG_BLOCKING_PIDS","PG_COLLATION_IS_VISIBLE","PG_CONF_LOAD_TIME","PG_CONTROL_CHECKPOINT","PG_CONTROL_INIT","PG_CONTROL_SYSTEM","PG_CONVERSION_IS_VISIBLE","PG_CURRENT_LOGFILE","PG_CURRENT_SNAPSHOT","PG_CURRENT_XACT_ID","PG_CURRENT_XACT_ID_IF_ASSIGNED","PG_DESCRIBE_OBJECT","PG_FUNCTION_IS_VISIBLE","PG_GET_CATALOG_FOREIGN_KEYS","PG_GET_CONSTRAINTDEF","PG_GET_EXPR","PG_GET_FUNCTIONDEF","PG_GET_FUNCTION_ARGUMENTS","PG_GET_FUNCTION_IDENTITY_ARGUMENTS","PG_GET_FUNCTION_RESULT","PG_GET_INDEXDEF","PG_GET_KEYWORDS","PG_GET_OBJECT_ADDRESS","PG_GET_OWNED_SEQUENCE","PG_GET_RULEDEF","PG_GET_SERIAL_SEQUENCE","PG_GET_STATISTICSOBJDEF","PG_GET_TRIGGERDEF","PG_GET_USERBYID","PG_GET_VIEWDEF","PG_HAS_ROLE","PG_IDENTIFY_OBJECT","PG_IDENTIFY_OBJECT_AS_ADDRESS","PG_INDEXAM_HAS_PROPERTY","PG_INDEX_COLUMN_HAS_PROPERTY","PG_INDEX_HAS_PROPERTY","PG_IS_OTHER_TEMP_SCHEMA","PG_JIT_AVAILABLE","PG_LAST_COMMITTED_XACT","PG_LISTENING_CHANNELS","PG_MY_TEMP_SCHEMA","PG_NOTIFICATION_QUEUE_USAGE","PG_OPCLASS_IS_VISIBLE","PG_OPERATOR_IS_VISIBLE","PG_OPFAMILY_IS_VISIBLE","PG_OPTIONS_TO_TABLE","PG_POSTMASTER_START_TIME","PG_SAFE_SNAPSHOT_BLOCKING_PIDS","PG_SNAPSHOT_XIP","PG_SNAPSHOT_XMAX","PG_SNAPSHOT_XMIN","PG_STATISTICS_OBJ_IS_VISIBLE","PG_TABLESPACE_DATABASES","PG_TABLESPACE_LOCATION","PG_TABLE_IS_VISIBLE","PG_TRIGGER_DEPTH","PG_TS_CONFIG_IS_VISIBLE","PG_TS_DICT_IS_VISIBLE","PG_TS_PARSER_IS_VISIBLE","PG_TS_TEMPLATE_IS_VISIBLE","PG_TYPEOF","PG_TYPE_IS_VISIBLE","PG_VISIBLE_IN_SNAPSHOT","PG_XACT_COMMIT_TIMESTAMP","PG_XACT_COMMIT_TIMESTAMP_ORIGIN","PG_XACT_STATUS","PQSERVERVERSION","ROW_SECURITY_ACTIVE","SESSION_USER","SHOBJ_DESCRIPTION","TO_REGCLASS","TO_REGCOLLATION","TO_REGNAMESPACE","TO_REGOPER","TO_REGOPERATOR","TO_REGPROC","TO_REGPROCEDURE","TO_REGROLE","TO_REGTYPE","TXID_CURRENT","TXID_CURRENT_IF_ASSIGNED","TXID_CURRENT_SNAPSHOT","TXID_SNAPSHOT_XIP","TXID_SNAPSHOT_XMAX","TXID_SNAPSHOT_XMIN","TXID_STATUS","TXID_VISIBLE_IN_SNAPSHOT","USER","VERSION","BRIN_DESUMMARIZE_RANGE","BRIN_SUMMARIZE_NEW_VALUES","BRIN_SUMMARIZE_RANGE","CONVERT_FROM","CURRENT_SETTING","GIN_CLEAN_PENDING_LIST","PG_ADVISORY_LOCK","PG_ADVISORY_LOCK_SHARED","PG_ADVISORY_UNLOCK","PG_ADVISORY_UNLOCK_ALL","PG_ADVISORY_UNLOCK_SHARED","PG_ADVISORY_XACT_LOCK","PG_ADVISORY_XACT_LOCK_SHARED","PG_BACKUP_START_TIME","PG_CANCEL_BACKEND","PG_COLLATION_ACTUAL_VERSION","PG_COLUMN_COMPRESSION","PG_COLUMN_SIZE","PG_COPY_LOGICAL_REPLICATION_SLOT","PG_COPY_PHYSICAL_REPLICATION_SLOT","PG_CREATE_LOGICAL_REPLICATION_SLOT","PG_CREATE_PHYSICAL_REPLICATION_SLOT","PG_CREATE_RESTORE_POINT","PG_CURRENT_WAL_FLUSH_LSN","PG_CURRENT_WAL_INSERT_LSN","PG_CURRENT_WAL_LSN","PG_DATABASE_SIZE","PG_DROP_REPLICATION_SLOT","PG_EXPORT_SNAPSHOT","PG_FILENODE_RELATION","PG_GET_WAL_REPLAY_PAUSE_STATE","PG_IMPORT_SYSTEM_COLLATIONS","PG_INDEXES_SIZE","PG_IS_IN_BACKUP","PG_IS_IN_RECOVERY","PG_IS_WAL_REPLAY_PAUSED","PG_LAST_WAL_RECEIVE_LSN","PG_LAST_WAL_REPLAY_LSN","PG_LAST_XACT_REPLAY_TIMESTAMP","PG_LOGICAL_EMIT_MESSAGE","PG_LOGICAL_SLOT_GET_BINARY_CHANGES","PG_LOGICAL_SLOT_GET_CHANGES","PG_LOGICAL_SLOT_PEEK_BINARY_CHANGES","PG_LOGICAL_SLOT_PEEK_CHANGES","PG_LOG_BACKEND_MEMORY_CONTEXTS","PG_LS_ARCHIVE_STATUSDIR","PG_LS_DIR","PG_LS_LOGDIR","PG_LS_TMPDIR","PG_LS_WALDIR","PG_PARTITION_ANCESTORS","PG_PARTITION_ROOT","PG_PARTITION_TREE","PG_PROMOTE","PG_READ_BINARY_FILE","PG_READ_FILE","PG_RELATION_FILENODE","PG_RELATION_FILEPATH","PG_RELATION_SIZE","PG_RELOAD_CONF","PG_REPLICATION_ORIGIN_ADVANCE","PG_REPLICATION_ORIGIN_CREATE","PG_REPLICATION_ORIGIN_DROP","PG_REPLICATION_ORIGIN_OID","PG_REPLICATION_ORIGIN_PROGRESS","PG_REPLICATION_ORIGIN_SESSION_IS_SETUP","PG_REPLICATION_ORIGIN_SESSION_PROGRESS","PG_REPLICATION_ORIGIN_SESSION_RESET","PG_REPLICATION_ORIGIN_SESSION_SETUP","PG_REPLICATION_ORIGIN_XACT_RESET","PG_REPLICATION_ORIGIN_XACT_SETUP","PG_REPLICATION_SLOT_ADVANCE","PG_ROTATE_LOGFILE","PG_SIZE_BYTES","PG_SIZE_PRETTY","PG_START_BACKUP","PG_STAT_FILE","PG_STOP_BACKUP","PG_SWITCH_WAL","PG_TABLESPACE_SIZE","PG_TABLE_SIZE","PG_TERMINATE_BACKEND","PG_TOTAL_RELATION_SIZE","PG_TRY_ADVISORY_LOCK","PG_TRY_ADVISORY_LOCK_SHARED","PG_TRY_ADVISORY_XACT_LOCK","PG_TRY_ADVISORY_XACT_LOCK_SHARED","PG_WALFILE_NAME","PG_WALFILE_NAME_OFFSET","PG_WAL_LSN_DIFF","PG_WAL_REPLAY_PAUSE","PG_WAL_REPLAY_RESUME","SET_CONFIG","SUPPRESS_REDUNDANT_UPDATES_TRIGGER","TSVECTOR_UPDATE_TRIGGER","TSVECTOR_UPDATE_TRIGGER_COLUMN","PG_EVENT_TRIGGER_DDL_COMMANDS","PG_EVENT_TRIGGER_DROPPED_OBJECTS","PG_EVENT_TRIGGER_TABLE_REWRITE_OID","PG_EVENT_TRIGGER_TABLE_REWRITE_REASON","PG_GET_OBJECT_ADDRESS","PG_MCV_LIST_ITEMS","CAST"]},9256:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ALL","ANALYSE","ANALYZE","AND","ANY","AS","ASC","ASYMMETRIC","AUTHORIZATION","BETWEEN","BINARY","BOTH","CASE","CAST","CHECK","COLLATE","COLLATION","COLUMN","CONCURRENTLY","CONSTRAINT","CREATE","CROSS","CURRENT_CATALOG","CURRENT_DATE","CURRENT_ROLE","CURRENT_SCHEMA","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","DAY","DEFAULT","DEFERRABLE","DESC","DISTINCT","DO","ELSE","END","EXCEPT","EXISTS","FALSE","FETCH","FILTER","FOR","FOREIGN","FREEZE","FROM","FULL","GRANT","GROUP","HAVING","HOUR","ILIKE","IN","INITIALLY","INNER","INOUT","INTERSECT","INTO","IS","ISNULL","JOIN","LATERAL","LEADING","LEFT","LIKE","LIMIT","LOCALTIME","LOCALTIMESTAMP","MINUTE","MONTH","NATURAL","NOT","NOTNULL","NULL","NULLIF","OFFSET","ON","ONLY","OR","ORDER","OUT","OUTER","OVER","OVERLAPS","PLACING","PRIMARY","REFERENCES","RETURNING","RIGHT","ROW","SECOND","SELECT","SESSION_USER","SIMILAR","SOME","SYMMETRIC","TABLE","TABLESAMPLE","THEN","TO","TRAILING","TRUE","UNION","UNIQUE","USER","USING","VALUES","VARIADIC","VERBOSE","WHEN","WHERE","WINDOW","WITH","WITHIN","WITHOUT","YEAR"],T.dataTypes=["ARRAY","BIGINT","BIT","BIT VARYING","BOOL","BOOLEAN","CHAR","CHARACTER","CHARACTER VARYING","DECIMAL","DEC","DOUBLE","ENUM","FLOAT","INT","INTEGER","INTERVAL","NCHAR","NUMERIC","JSON","JSONB","PRECISION","REAL","SMALLINT","TEXT","TIME","TIMESTAMP","TIMESTAMPTZ","UUID","VARCHAR","XML","ZONE"]},1642:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.redshift=void 0;const A=R(7163),e=R(9349),S=R(8298),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","QUALIFY","PARTITION BY","ORDER BY","LIMIT","OFFSET","INSERT INTO","VALUES","SET"]),N=(0,A.expandPhrases)(["CREATE [TEMPORARY | TEMP | LOCAL TEMPORARY | LOCAL TEMP] TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE | MATERIALIZED] VIEW","UPDATE","DELETE [FROM]","DROP TABLE [IF EXISTS]","ALTER TABLE","ALTER TABLE APPEND","ADD [COLUMN]","DROP [COLUMN]","RENAME TO","RENAME COLUMN","ALTER COLUMN","TYPE","ENCODE","TRUNCATE [TABLE]","ABORT","ALTER DATABASE","ALTER DATASHARE","ALTER DEFAULT PRIVILEGES","ALTER GROUP","ALTER MATERIALIZED VIEW","ALTER PROCEDURE","ALTER SCHEMA","ALTER USER","ANALYSE","ANALYZE","ANALYSE COMPRESSION","ANALYZE COMPRESSION","BEGIN","CALL","CANCEL","CLOSE","COMMIT","COPY","CREATE DATABASE","CREATE DATASHARE","CREATE EXTERNAL FUNCTION","CREATE EXTERNAL SCHEMA","CREATE EXTERNAL TABLE","CREATE FUNCTION","CREATE GROUP","CREATE LIBRARY","CREATE MODEL","CREATE PROCEDURE","CREATE SCHEMA","CREATE USER","DEALLOCATE","DECLARE","DESC DATASHARE","DROP DATABASE","DROP DATASHARE","DROP FUNCTION","DROP GROUP","DROP LIBRARY","DROP MODEL","DROP MATERIALIZED VIEW","DROP PROCEDURE","DROP SCHEMA","DROP USER","DROP VIEW","DROP","EXECUTE","EXPLAIN","FETCH","GRANT","LOCK","PREPARE","REFRESH MATERIALIZED VIEW","RESET","REVOKE","ROLLBACK","SELECT INTO","SET SESSION AUTHORIZATION","SET SESSION CHARACTERISTICS","SHOW","SHOW EXTERNAL TABLE","SHOW MODEL","SHOW DATASHARES","SHOW PROCEDURE","SHOW TABLE","SHOW VIEW","START TRANSACTION","UNLOAD","VACUUM"]),L=(0,A.expandPhrases)(["UNION [ALL]","EXCEPT","INTERSECT","MINUS"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN"]),_=(0,A.expandPhrases)(["NULL AS","DATA CATALOG","HIVE METASTORE","{ROWS | RANGE} BETWEEN"]),s=(0,A.expandPhrases)([]);T.redshift={name:"redshift",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,extraParens:["[]"],stringTypes:["''-qq"],identTypes:['""-qq'],identChars:{first:"#"},paramTypes:{numbered:["$"]},operators:["^","%","@","|/","||/","&","|","~","<<",">>","||","::"]},formatOptions:{alwaysDenseOperators:["::"],onelineClauses:[...N,...t],tabularOnelineClauses:t}}},9349:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ANY_VALUE","APPROXIMATE PERCENTILE_DISC","AVG","COUNT","LISTAGG","MAX","MEDIAN","MIN","PERCENTILE_CONT","STDDEV_SAMP","STDDEV_POP","SUM","VAR_SAMP","VAR_POP","array_concat","array_flatten","get_array_length","split_to_array","subarray","BIT_AND","BIT_OR","BOOL_AND","BOOL_OR","COALESCE","DECODE","GREATEST","LEAST","NVL","NVL2","NULLIF","ADD_MONTHS","AT TIME ZONE","CONVERT_TIMEZONE","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","DATE_CMP","DATE_CMP_TIMESTAMP","DATE_CMP_TIMESTAMPTZ","DATE_PART_YEAR","DATEADD","DATEDIFF","DATE_PART","DATE_TRUNC","EXTRACT","GETDATE","INTERVAL_CMP","LAST_DAY","MONTHS_BETWEEN","NEXT_DAY","SYSDATE","TIMEOFDAY","TIMESTAMP_CMP","TIMESTAMP_CMP_DATE","TIMESTAMP_CMP_TIMESTAMPTZ","TIMESTAMPTZ_CMP","TIMESTAMPTZ_CMP_DATE","TIMESTAMPTZ_CMP_TIMESTAMP","TIMEZONE","TO_TIMESTAMP","TRUNC","AddBBox","DropBBox","GeometryType","ST_AddPoint","ST_Angle","ST_Area","ST_AsBinary","ST_AsEWKB","ST_AsEWKT","ST_AsGeoJSON","ST_AsText","ST_Azimuth","ST_Boundary","ST_Collect","ST_Contains","ST_ContainsProperly","ST_ConvexHull","ST_CoveredBy","ST_Covers","ST_Crosses","ST_Dimension","ST_Disjoint","ST_Distance","ST_DistanceSphere","ST_DWithin","ST_EndPoint","ST_Envelope","ST_Equals","ST_ExteriorRing","ST_Force2D","ST_Force3D","ST_Force3DM","ST_Force3DZ","ST_Force4D","ST_GeometryN","ST_GeometryType","ST_GeomFromEWKB","ST_GeomFromEWKT","ST_GeomFromText","ST_GeomFromWKB","ST_InteriorRingN","ST_Intersects","ST_IsPolygonCCW","ST_IsPolygonCW","ST_IsClosed","ST_IsCollection","ST_IsEmpty","ST_IsSimple","ST_IsValid","ST_Length","ST_LengthSphere","ST_Length2D","ST_LineFromMultiPoint","ST_LineInterpolatePoint","ST_M","ST_MakeEnvelope","ST_MakeLine","ST_MakePoint","ST_MakePolygon","ST_MemSize","ST_MMax","ST_MMin","ST_Multi","ST_NDims","ST_NPoints","ST_NRings","ST_NumGeometries","ST_NumInteriorRings","ST_NumPoints","ST_Perimeter","ST_Perimeter2D","ST_Point","ST_PointN","ST_Points","ST_Polygon","ST_RemovePoint","ST_Reverse","ST_SetPoint","ST_SetSRID","ST_Simplify","ST_SRID","ST_StartPoint","ST_Touches","ST_Within","ST_X","ST_XMax","ST_XMin","ST_Y","ST_YMax","ST_YMin","ST_Z","ST_ZMax","ST_ZMin","SupportsBBox","CHECKSUM","FUNC_SHA1","FNV_HASH","MD5","SHA","SHA1","SHA2","HLL","HLL_CREATE_SKETCH","HLL_CARDINALITY","HLL_COMBINE","IS_VALID_JSON","IS_VALID_JSON_ARRAY","JSON_ARRAY_LENGTH","JSON_EXTRACT_ARRAY_ELEMENT_TEXT","JSON_EXTRACT_PATH_TEXT","JSON_PARSE","JSON_SERIALIZE","ABS","ACOS","ASIN","ATAN","ATAN2","CBRT","CEILING","CEIL","COS","COT","DEGREES","DEXP","DLOG1","DLOG10","EXP","FLOOR","LN","LOG","MOD","PI","POWER","RADIANS","RANDOM","ROUND","SIN","SIGN","SQRT","TAN","TO_HEX","TRUNC","EXPLAIN_MODEL","ASCII","BPCHARCMP","BTRIM","BTTEXT_PATTERN_CMP","CHAR_LENGTH","CHARACTER_LENGTH","CHARINDEX","CHR","COLLATE","CONCAT","CRC32","DIFFERENCE","INITCAP","LEFT","RIGHT","LEN","LENGTH","LOWER","LPAD","RPAD","LTRIM","OCTETINDEX","OCTET_LENGTH","POSITION","QUOTE_IDENT","QUOTE_LITERAL","REGEXP_COUNT","REGEXP_INSTR","REGEXP_REPLACE","REGEXP_SUBSTR","REPEAT","REPLACE","REPLICATE","REVERSE","RTRIM","SOUNDEX","SPLIT_PART","STRPOS","STRTOL","SUBSTRING","TEXTLEN","TRANSLATE","TRIM","UPPER","decimal_precision","decimal_scale","is_array","is_bigint","is_boolean","is_char","is_decimal","is_float","is_integer","is_object","is_scalar","is_smallint","is_varchar","json_typeof","AVG","COUNT","CUME_DIST","DENSE_RANK","FIRST_VALUE","LAST_VALUE","LAG","LEAD","LISTAGG","MAX","MEDIAN","MIN","NTH_VALUE","NTILE","PERCENT_RANK","PERCENTILE_CONT","PERCENTILE_DISC","RANK","RATIO_TO_REPORT","ROW_NUMBER","STDDEV_SAMP","STDDEV_POP","SUM","VAR_SAMP","VAR_POP","CAST","CONVERT","TO_CHAR","TO_DATE","TO_NUMBER","TEXT_TO_INT_ALT","TEXT_TO_NUMERIC_ALT","CHANGE_QUERY_PRIORITY","CHANGE_SESSION_PRIORITY","CHANGE_USER_PRIORITY","CURRENT_SETTING","PG_CANCEL_BACKEND","PG_TERMINATE_BACKEND","REBOOT_CLUSTER","SET_CONFIG","CURRENT_AWS_ACCOUNT","CURRENT_DATABASE","CURRENT_NAMESPACE","CURRENT_SCHEMA","CURRENT_SCHEMAS","CURRENT_USER","CURRENT_USER_ID","HAS_ASSUMEROLE_PRIVILEGE","HAS_DATABASE_PRIVILEGE","HAS_SCHEMA_PRIVILEGE","HAS_TABLE_PRIVILEGE","PG_BACKEND_PID","PG_GET_COLS","PG_GET_GRANTEE_BY_IAM_ROLE","PG_GET_IAM_ROLE_BY_USER","PG_GET_LATE_BINDING_VIEW_COLS","PG_LAST_COPY_COUNT","PG_LAST_COPY_ID","PG_LAST_UNLOAD_ID","PG_LAST_QUERY_ID","PG_LAST_UNLOAD_COUNT","SESSION_USER","SLICE_NUM","USER","VERSION"]},8298:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["AES128","AES256","ALL","ALLOWOVERWRITE","ANY","AS","ASC","AUTHORIZATION","BACKUP","BETWEEN","BINARY","BOTH","CHECK","COLUMN","CONSTRAINT","CREATE","CROSS","DEFAULT","DEFERRABLE","DEFLATE","DEFRAG","DESC","DISABLE","DISTINCT","DO","ENABLE","ENCODE","ENCRYPT","ENCRYPTION","EXPLICIT","FALSE","FOR","FOREIGN","FREEZE","FROM","FULL","GLOBALDICT256","GLOBALDICT64K","GROUP","IDENTITY","IGNORE","ILIKE","IN","INITIALLY","INNER","INTO","IS","ISNULL","LANGUAGE","LEADING","LIKE","LIMIT","LOCALTIME","LOCALTIMESTAMP","LUN","LUNS","MINUS","NATURAL","NEW","NOT","NOTNULL","NULL","NULLS","OFF","OFFLINE","OFFSET","OID","OLD","ON","ONLY","OPEN","ORDER","OUTER","OVERLAPS","PARALLEL","PARTITION","PERCENT","PERMISSIONS","PLACING","PRIMARY","RECOVER","REFERENCES","REJECTLOG","RESORT","RESPECT","RESTORE","SIMILAR","SNAPSHOT","SOME","SYSTEM","TABLE","TAG","TDES","THEN","TIMESTAMP","TO","TOP","TRAILING","TRUE","UNIQUE","USING","VERBOSE","WALLET","WITHOUT","ACCEPTANYDATE","ACCEPTINVCHARS","BLANKSASNULL","DATEFORMAT","EMPTYASNULL","ENCODING","ESCAPE","EXPLICIT_IDS","FILLRECORD","IGNOREBLANKLINES","IGNOREHEADER","REMOVEQUOTES","ROUNDEC","TIMEFORMAT","TRIMBLANKS","TRUNCATECOLUMNS","COMPROWS","COMPUPDATE","MAXERROR","NOLOAD","STATUPDATE","FORMAT","CSV","DELIMITER","FIXEDWIDTH","SHAPEFILE","AVRO","JSON","PARQUET","ORC","ACCESS_KEY_ID","CREDENTIALS","ENCRYPTED","IAM_ROLE","MASTER_SYMMETRIC_KEY","SECRET_ACCESS_KEY","SESSION_TOKEN","BZIP2","GZIP","LZOP","ZSTD","MANIFEST","READRATIO","REGION","SSH","RAW","AZ64","BYTEDICT","DELTA","DELTA32K","LZO","MOSTLY8","MOSTLY16","MOSTLY32","RUNLENGTH","TEXT255","TEXT32K","CATALOG_ROLE","SECRET_ARN","EXTERNAL","AUTO","EVEN","KEY","PREDICATE","COMPRESSION"],T.dataTypes=["ARRAY","BIGINT","BPCHAR","CHAR","CHARACTER VARYING","CHARACTER","DECIMAL","INT","INT2","INT4","INT8","INTEGER","NCHAR","NUMERIC","NVARCHAR","SMALLINT","TEXT","VARBYTE","VARCHAR"]},7146:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.singlestoredb=void 0;const A=R(7163),e=R(6831),S=R(7882),I=R(389),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT | DISTINCTROW]"]),N=(0,A.expandPhrases)(["WITH","FROM","WHERE","GROUP BY","HAVING","PARTITION BY","ORDER BY","LIMIT","OFFSET","INSERT [IGNORE] [INTO]","VALUES","REPLACE [INTO]","ON DUPLICATE KEY UPDATE","SET","CREATE [OR REPLACE] [TEMPORARY] PROCEDURE [IF NOT EXISTS]","CREATE [OR REPLACE] [EXTERNAL] FUNCTION"]),t=(0,A.expandPhrases)(["CREATE [ROWSTORE] [REFERENCE | TEMPORARY | GLOBAL TEMPORARY] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["CREATE VIEW","UPDATE","DELETE [FROM]","DROP [TEMPORARY] TABLE [IF EXISTS]","ALTER [ONLINE] TABLE","ADD [COLUMN]","ADD [UNIQUE] {INDEX | KEY}","DROP [COLUMN]","MODIFY [COLUMN]","CHANGE","RENAME [TO | AS]","TRUNCATE [TABLE]","ADD AGGREGATOR","ADD LEAF","AGGREGATOR SET AS MASTER","ALTER DATABASE","ALTER PIPELINE","ALTER RESOURCE POOL","ALTER USER","ALTER VIEW","ANALYZE TABLE","ATTACH DATABASE","ATTACH LEAF","ATTACH LEAF ALL","BACKUP DATABASE","BINLOG","BOOTSTRAP AGGREGATOR","CACHE INDEX","CALL","CHANGE","CHANGE MASTER TO","CHANGE REPLICATION FILTER","CHANGE REPLICATION SOURCE TO","CHECK BLOB CHECKSUM","CHECK TABLE","CHECKSUM TABLE","CLEAR ORPHAN DATABASES","CLONE","COMMIT","CREATE DATABASE","CREATE GROUP","CREATE INDEX","CREATE LINK","CREATE MILESTONE","CREATE PIPELINE","CREATE RESOURCE POOL","CREATE ROLE","CREATE USER","DEALLOCATE PREPARE","DESCRIBE","DETACH DATABASE","DETACH PIPELINE","DROP DATABASE","DROP FUNCTION","DROP INDEX","DROP LINK","DROP PIPELINE","DROP PROCEDURE","DROP RESOURCE POOL","DROP ROLE","DROP USER","DROP VIEW","EXECUTE","EXPLAIN","FLUSH","FORCE","GRANT","HANDLER","HELP","KILL CONNECTION","KILLALL QUERIES","LOAD DATA","LOAD INDEX INTO CACHE","LOAD XML","LOCK INSTANCE FOR BACKUP","LOCK TABLES","MASTER_POS_WAIT","OPTIMIZE TABLE","PREPARE","PURGE BINARY LOGS","REBALANCE PARTITIONS","RELEASE SAVEPOINT","REMOVE AGGREGATOR","REMOVE LEAF","REPAIR TABLE","REPLACE","REPLICATE DATABASE","RESET","RESET MASTER","RESET PERSIST","RESET REPLICA","RESET SLAVE","RESTART","RESTORE DATABASE","RESTORE REDUNDANCY","REVOKE","ROLLBACK","ROLLBACK TO SAVEPOINT","SAVEPOINT","SET CHARACTER SET","SET DEFAULT ROLE","SET NAMES","SET PASSWORD","SET RESOURCE GROUP","SET ROLE","SET TRANSACTION","SHOW","SHOW CHARACTER SET","SHOW COLLATION","SHOW COLUMNS","SHOW CREATE DATABASE","SHOW CREATE FUNCTION","SHOW CREATE PIPELINE","SHOW CREATE PROCEDURE","SHOW CREATE TABLE","SHOW CREATE USER","SHOW CREATE VIEW","SHOW DATABASES","SHOW ENGINE","SHOW ENGINES","SHOW ERRORS","SHOW FUNCTION CODE","SHOW FUNCTION STATUS","SHOW GRANTS","SHOW INDEX","SHOW MASTER STATUS","SHOW OPEN TABLES","SHOW PLUGINS","SHOW PRIVILEGES","SHOW PROCEDURE CODE","SHOW PROCEDURE STATUS","SHOW PROCESSLIST","SHOW PROFILE","SHOW PROFILES","SHOW RELAYLOG EVENTS","SHOW REPLICA STATUS","SHOW REPLICAS","SHOW SLAVE","SHOW SLAVE HOSTS","SHOW STATUS","SHOW TABLE STATUS","SHOW TABLES","SHOW VARIABLES","SHOW WARNINGS","SHUTDOWN","SNAPSHOT DATABASE","SOURCE_POS_WAIT","START GROUP_REPLICATION","START PIPELINE","START REPLICA","START SLAVE","START TRANSACTION","STOP GROUP_REPLICATION","STOP PIPELINE","STOP REPLICA","STOP REPLICATING","STOP SLAVE","TEST PIPELINE","UNLOCK INSTANCE","UNLOCK TABLES","USE","XA","ITERATE","LEAVE","LOOP","REPEAT","RETURN","WHILE"]),C=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","EXCEPT","INTERSECT","MINUS"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL {LEFT | RIGHT} [OUTER] JOIN","STRAIGHT_JOIN"]),s=(0,A.expandPhrases)(["ON DELETE","ON UPDATE","CHARACTER SET","{ROWS | RANGE} BETWEEN","IDENTIFIED BY"]),r=(0,A.expandPhrases)([]);T.singlestoredb={name:"singlestoredb",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:I.functions,stringTypes:['""-qq-bs',"''-qq-bs",{quote:"''-raw",prefixes:["B","X"],requirePrefix:!0}],identTypes:["``"],identChars:{first:"$",rest:"$",allowFirstCharNumber:!0},variableTypes:[{regex:"@@?[A-Za-z0-9_$]+"},{quote:"``",prefixes:["@"],requirePrefix:!0}],lineCommentTypes:["--","#"],operators:[":=","&","|","^","~","<<",">>","<=>","&&","||","::","::$","::%",":>","!:>","*.*"],postProcess:e.postProcess},formatOptions:{alwaysDenseOperators:["::","::$","::%"],onelineClauses:[...t,...L],tabularOnelineClauses:L}}},389:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ADDDATE","ADDTIME","AES_DECRYPT","AES_ENCRYPT","ANY_VALUE","APPROX_COUNT_DISTINCT","APPROX_COUNT_DISTINCT_ACCUMULATE","APPROX_COUNT_DISTINCT_COMBINE","APPROX_COUNT_DISTINCT_ESTIMATE","APPROX_GEOGRAPHY_INTERSECTS","APPROX_PERCENTILE","ASCII","ASIN","ATAN","ATAN2","AVG","BIN","BINARY","BIT_AND","BIT_COUNT","BIT_OR","BIT_XOR","CAST","CEIL","CEILING","CHAR","CHARACTER_LENGTH","CHAR_LENGTH","CHARSET","COALESCE","COERCIBILITY","COLLATION","COLLECT","CONCAT","CONCAT_WS","CONNECTION_ID","CONV","CONVERT","CONVERT_TZ","COS","COT","COUNT","CUME_DIST","CURDATE","CURRENT_DATE","CURRENT_ROLE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURTIME","DATABASE","DATE","DATE_ADD","DATEDIFF","DATE_FORMAT","DATE_SUB","DATE_TRUNC","DAY","DAYNAME","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","DECODE","DEFAULT","DEGREES","DENSE_RANK","DIV","DOT_PRODUCT","ELT","EUCLIDEAN_DISTANCE","EXP","EXTRACT","FIELD","FIRST","FIRST_VALUE","FLOOR","FORMAT","FOUND_ROWS","FROM_BASE64","FROM_DAYS","FROM_UNIXTIME","GEOGRAPHY_AREA","GEOGRAPHY_CONTAINS","GEOGRAPHY_DISTANCE","GEOGRAPHY_INTERSECTS","GEOGRAPHY_LATITUDE","GEOGRAPHY_LENGTH","GEOGRAPHY_LONGITUDE","GEOGRAPHY_POINT","GEOGRAPHY_WITHIN_DISTANCE","GEOMETRY_AREA","GEOMETRY_CONTAINS","GEOMETRY_DISTANCE","GEOMETRY_FILTER","GEOMETRY_INTERSECTS","GEOMETRY_LENGTH","GEOMETRY_POINT","GEOMETRY_WITHIN_DISTANCE","GEOMETRY_X","GEOMETRY_Y","GREATEST","GROUPING","GROUP_CONCAT","HEX","HIGHLIGHT","HOUR","ICU_VERSION","IF","IFNULL","INET_ATON","INET_NTOA","INET6_ATON","INET6_NTOA","INITCAP","INSERT","INSTR","INTERVAL","IS","IS NULL","JSON_AGG","JSON_ARRAY_CONTAINS_DOUBLE","JSON_ARRAY_CONTAINS_JSON","JSON_ARRAY_CONTAINS_STRING","JSON_ARRAY_PUSH_DOUBLE","JSON_ARRAY_PUSH_JSON","JSON_ARRAY_PUSH_STRING","JSON_DELETE_KEY","JSON_EXTRACT_DOUBLE","JSON_EXTRACT_JSON","JSON_EXTRACT_STRING","JSON_EXTRACT_BIGINT","JSON_GET_TYPE","JSON_LENGTH","JSON_SET_DOUBLE","JSON_SET_JSON","JSON_SET_STRING","JSON_SPLICE_DOUBLE","JSON_SPLICE_JSON","JSON_SPLICE_STRING","LAG","LAST_DAY","LAST_VALUE","LCASE","LEAD","LEAST","LEFT","LENGTH","LIKE","LN","LOCALTIME","LOCALTIMESTAMP","LOCATE","LOG","LOG10","LOG2","LPAD","LTRIM","MATCH","MAX","MD5","MEDIAN","MICROSECOND","MIN","MINUTE","MOD","MONTH","MONTHNAME","MONTHS_BETWEEN","NOT","NOW","NTH_VALUE","NTILE","NULLIF","OCTET_LENGTH","PERCENT_RANK","PERCENTILE_CONT","PERCENTILE_DISC","PI","PIVOT","POSITION","POW","POWER","QUARTER","QUOTE","RADIANS","RAND","RANK","REGEXP","REPEAT","REPLACE","REVERSE","RIGHT","RLIKE","ROUND","ROW_COUNT","ROW_NUMBER","RPAD","RTRIM","SCALAR","SCHEMA","SEC_TO_TIME","SHA1","SHA2","SIGMOID","SIGN","SIN","SLEEP","SPLIT","SOUNDEX","SOUNDS LIKE","SOURCE_POS_WAIT","SPACE","SQRT","STDDEV","STDDEV_POP","STDDEV_SAMP","STR_TO_DATE","SUBDATE","SUBSTR","SUBSTRING","SUBSTRING_INDEX","SUM","SYS_GUID","TAN","TIME","TIMEDIFF","TIME_BUCKET","TIME_FORMAT","TIMESTAMP","TIMESTAMPADD","TIMESTAMPDIFF","TIME_TO_SEC","TO_BASE64","TO_CHAR","TO_DAYS","TO_JSON","TO_NUMBER","TO_SECONDS","TO_TIMESTAMP","TRIM","TRUNC","TRUNCATE","UCASE","UNHEX","UNIX_TIMESTAMP","UPDATEXML","UPPER","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","UUID","VALUES","VARIANCE","VAR_POP","VAR_SAMP","VECTOR_SUB","VERSION","WEEK","WEEKDAY","WEEKOFYEAR","YEAR"]},7882:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADD","ALL","ALTER","ANALYZE","AND","AS","ASC","ASENSITIVE","BEFORE","BETWEEN","_BINARY","BOTH","BY","CALL","CASCADE","CASE","CHANGE","CHECK","COLLATE","COLUMN","CONDITION","CONSTRAINT","CONTINUE","CONVERT","CREATE","CROSS","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURSOR","DATABASE","DATABASES","DAY_HOUR","DAY_MICROSECOND","DAY_MINUTE","DAY_SECOND","DECLARE","DEFAULT","DELAYED","DELETE","DESC","DESCRIBE","DETERMINISTIC","DISTINCT","DISTINCTROW","DIV","DROP","DUAL","EACH","ELSE","ELSEIF","ENCLOSED","ESCAPED","EXCEPT","EXISTS","EXIT","EXPLAIN","EXTRA_JOIN","FALSE","FETCH","FOR","FORCE","FORCE_COMPILED_MODE","FORCE_INTERPRETER_MODE","FOREIGN","FROM","FULL","FULLTEXT","GRANT","GROUP","HAVING","HEARTBEAT_NO_LOGGING","HIGH_PRIORITY","HOUR_MICROSECOND","HOUR_MINUTE","HOUR_SECOND","IF","IGNORE","IN","INDEX","INFILE","INNER","INOUT","INSENSITIVE","INSERT","IN","_INTERNAL_DYNAMIC_TYPECAST","INTERSECT","INTERVAL","INTO","ITERATE","JOIN","KEY","KEYS","KILL","LEADING","LEAVE","LEFT","LIKE","LIMIT","LINES","LOAD","LOCALTIME","LOCALTIMESTAMP","LOCK","LOOP","LOW_PRIORITY","MATCH","MAXVALUE","MINUS","MINUTE_MICROSECOND","MINUTE_SECOND","MOD","MODIFIES","NATURAL","NO_QUERY_REWRITE","NOT","NO_WRITE_TO_BINLOG","NO_QUERY_REWRITE","NULL","ON","OPTIMIZE","OPTION","OPTIONALLY","OR","ORDER","OUT","OUTER","OUTFILE","OVER","PRIMARY","PROCEDURE","PURGE","RANGE","READ","READS","REFERENCES","REGEXP","RELEASE","RENAME","REPEAT","REPLACE","REQUIRE","RESTRICT","RETURN","REVOKE","RIGHT","RIGHT_ANTI_JOIN","RIGHT_SEMI_JOIN","RIGHT_STRAIGHT_JOIN","RLIKE","SCHEMA","SCHEMAS","SECOND_MICROSECOND","SELECT","SEMI_JOIN","SENSITIVE","SEPARATOR","SET","SHOW","SIGNAL","SPATIAL","SPECIFIC","SQL","SQL_BIG_RESULT","SQL_BUFFER_RESULT","SQL_CACHE","SQL_CALC_FOUND_ROWS","SQLEXCEPTION","SQL_NO_CACHE","SQL_NO_LOGGING","SQL_SMALL_RESULT","SQLSTATE","SQLWARNING","STRAIGHT_JOIN","TABLE","TERMINATED","THEN","TO","TRAILING","TRIGGER","TRUE","UNBOUNDED","UNDO","UNION","UNIQUE","UNLOCK","UPDATE","USAGE","USE","USING","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","_UTF8","VALUES","WHEN","WHERE","WHILE","WINDOW","WITH","WITHIN","WRITE","XOR","YEAR_MONTH","ZEROFILL"],T.dataTypes=["BIGINT","BINARY","BIT","BLOB","CHAR","CHARACTER","DATETIME","DEC","DECIMAL","DOUBLE PRECISION","DOUBLE","ENUM","FIXED","FLOAT","FLOAT4","FLOAT8","INT","INT1","INT2","INT3","INT4","INT8","INTEGER","LONG","LONGBLOB","LONGTEXT","MEDIUMBLOB","MEDIUMINT","MEDIUMTEXT","MIDDLEINT","NATIONAL CHAR","NATIONAL VARCHAR","NUMERIC","PRECISION","REAL","SMALLINT","TEXT","TIME","TIMESTAMP","TINYBLOB","TINYINT","TINYTEXT","UNSIGNED","VARBINARY","VARCHAR","VARCHARACTER","YEAR"]},3686:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.snowflake=void 0;const A=R(7163),e=R(7105),S=R(5390),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","PARTITION BY","ORDER BY","QUALIFY","LIMIT","OFFSET","FETCH [FIRST | NEXT]","INSERT [OVERWRITE] [ALL INTO | INTO | ALL | FIRST]","{THEN | ELSE} INTO","VALUES","SET","CLUSTER BY","[WITH] {MASKING POLICY | TAG | ROW ACCESS POLICY}","COPY GRANTS","USING TEMPLATE","MERGE INTO","WHEN MATCHED [AND]","THEN {UPDATE SET | DELETE}","WHEN NOT MATCHED THEN INSERT"]),N=(0,A.expandPhrases)(["CREATE [OR REPLACE] [VOLATILE] TABLE [IF NOT EXISTS]","CREATE [OR REPLACE] [LOCAL | GLOBAL] {TEMP|TEMPORARY} TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [SECURE] [RECURSIVE] VIEW [IF NOT EXISTS]","UPDATE","DELETE FROM","DROP TABLE [IF EXISTS]","ALTER TABLE [IF EXISTS]","RENAME TO","SWAP WITH","[SUSPEND | RESUME] RECLUSTER","DROP CLUSTERING KEY","ADD [COLUMN]","RENAME COLUMN","{ALTER | MODIFY} [COLUMN]","DROP [COLUMN]","{ADD | ALTER | MODIFY | DROP} [CONSTRAINT]","RENAME CONSTRAINT","{ADD | DROP} SEARCH OPTIMIZATION","{SET | UNSET} TAG","{ADD | DROP} ROW ACCESS POLICY","DROP ALL ROW ACCESS POLICIES","{SET | DROP} DEFAULT","{SET | DROP} NOT NULL","SET DATA TYPE","UNSET COMMENT","{SET | UNSET} MASKING POLICY","TRUNCATE [TABLE] [IF EXISTS]","ALTER ACCOUNT","ALTER API INTEGRATION","ALTER CONNECTION","ALTER DATABASE","ALTER EXTERNAL TABLE","ALTER FAILOVER GROUP","ALTER FILE FORMAT","ALTER FUNCTION","ALTER INTEGRATION","ALTER MASKING POLICY","ALTER MATERIALIZED VIEW","ALTER NETWORK POLICY","ALTER NOTIFICATION INTEGRATION","ALTER PIPE","ALTER PROCEDURE","ALTER REPLICATION GROUP","ALTER RESOURCE MONITOR","ALTER ROLE","ALTER ROW ACCESS POLICY","ALTER SCHEMA","ALTER SECURITY INTEGRATION","ALTER SEQUENCE","ALTER SESSION","ALTER SESSION POLICY","ALTER SHARE","ALTER STAGE","ALTER STORAGE INTEGRATION","ALTER STREAM","ALTER TAG","ALTER TASK","ALTER USER","ALTER VIEW","ALTER WAREHOUSE","BEGIN","CALL","COMMIT","COPY INTO","CREATE ACCOUNT","CREATE API INTEGRATION","CREATE CONNECTION","CREATE DATABASE","CREATE EXTERNAL FUNCTION","CREATE EXTERNAL TABLE","CREATE FAILOVER GROUP","CREATE FILE FORMAT","CREATE FUNCTION","CREATE INTEGRATION","CREATE MANAGED ACCOUNT","CREATE MASKING POLICY","CREATE MATERIALIZED VIEW","CREATE NETWORK POLICY","CREATE NOTIFICATION INTEGRATION","CREATE PIPE","CREATE PROCEDURE","CREATE REPLICATION GROUP","CREATE RESOURCE MONITOR","CREATE ROLE","CREATE ROW ACCESS POLICY","CREATE SCHEMA","CREATE SECURITY INTEGRATION","CREATE SEQUENCE","CREATE SESSION POLICY","CREATE SHARE","CREATE STAGE","CREATE STORAGE INTEGRATION","CREATE STREAM","CREATE TAG","CREATE TASK","CREATE USER","CREATE WAREHOUSE","DELETE","DESCRIBE DATABASE","DESCRIBE EXTERNAL TABLE","DESCRIBE FILE FORMAT","DESCRIBE FUNCTION","DESCRIBE INTEGRATION","DESCRIBE MASKING POLICY","DESCRIBE MATERIALIZED VIEW","DESCRIBE NETWORK POLICY","DESCRIBE PIPE","DESCRIBE PROCEDURE","DESCRIBE RESULT","DESCRIBE ROW ACCESS POLICY","DESCRIBE SCHEMA","DESCRIBE SEQUENCE","DESCRIBE SESSION POLICY","DESCRIBE SHARE","DESCRIBE STAGE","DESCRIBE STREAM","DESCRIBE TABLE","DESCRIBE TASK","DESCRIBE TRANSACTION","DESCRIBE USER","DESCRIBE VIEW","DESCRIBE WAREHOUSE","DROP CONNECTION","DROP DATABASE","DROP EXTERNAL TABLE","DROP FAILOVER GROUP","DROP FILE FORMAT","DROP FUNCTION","DROP INTEGRATION","DROP MANAGED ACCOUNT","DROP MASKING POLICY","DROP MATERIALIZED VIEW","DROP NETWORK POLICY","DROP PIPE","DROP PROCEDURE","DROP REPLICATION GROUP","DROP RESOURCE MONITOR","DROP ROLE","DROP ROW ACCESS POLICY","DROP SCHEMA","DROP SEQUENCE","DROP SESSION POLICY","DROP SHARE","DROP STAGE","DROP STREAM","DROP TAG","DROP TASK","DROP USER","DROP VIEW","DROP WAREHOUSE","EXECUTE IMMEDIATE","EXECUTE TASK","EXPLAIN","GET","GRANT OWNERSHIP","GRANT ROLE","INSERT","LIST","MERGE","PUT","REMOVE","REVOKE ROLE","ROLLBACK","SHOW COLUMNS","SHOW CONNECTIONS","SHOW DATABASES","SHOW DATABASES IN FAILOVER GROUP","SHOW DATABASES IN REPLICATION GROUP","SHOW DELEGATED AUTHORIZATIONS","SHOW EXTERNAL FUNCTIONS","SHOW EXTERNAL TABLES","SHOW FAILOVER GROUPS","SHOW FILE FORMATS","SHOW FUNCTIONS","SHOW GLOBAL ACCOUNTS","SHOW GRANTS","SHOW INTEGRATIONS","SHOW LOCKS","SHOW MANAGED ACCOUNTS","SHOW MASKING POLICIES","SHOW MATERIALIZED VIEWS","SHOW NETWORK POLICIES","SHOW OBJECTS","SHOW ORGANIZATION ACCOUNTS","SHOW PARAMETERS","SHOW PIPES","SHOW PRIMARY KEYS","SHOW PROCEDURES","SHOW REGIONS","SHOW REPLICATION ACCOUNTS","SHOW REPLICATION DATABASES","SHOW REPLICATION GROUPS","SHOW RESOURCE MONITORS","SHOW ROLES","SHOW ROW ACCESS POLICIES","SHOW SCHEMAS","SHOW SEQUENCES","SHOW SESSION POLICIES","SHOW SHARES","SHOW SHARES IN FAILOVER GROUP","SHOW SHARES IN REPLICATION GROUP","SHOW STAGES","SHOW STREAMS","SHOW TABLES","SHOW TAGS","SHOW TASKS","SHOW TRANSACTIONS","SHOW USER FUNCTIONS","SHOW USERS","SHOW VARIABLES","SHOW VIEWS","SHOW WAREHOUSES","TRUNCATE MATERIALIZED VIEW","UNDROP DATABASE","UNDROP SCHEMA","UNDROP TABLE","UNDROP TAG","UNSET","USE DATABASE","USE ROLE","USE SCHEMA","USE SECONDARY ROLES","USE WAREHOUSE"]),L=(0,A.expandPhrases)(["UNION [ALL]","MINUS","EXCEPT","INTERSECT"]),C=(0,A.expandPhrases)(["[INNER] JOIN","[NATURAL] {LEFT | RIGHT | FULL} [OUTER] JOIN","{CROSS | NATURAL} JOIN"]),_=(0,A.expandPhrases)(["{ROWS | RANGE} BETWEEN","ON {UPDATE | DELETE} [SET NULL | SET DEFAULT]"]),s=(0,A.expandPhrases)([]);T.snowflake={name:"snowflake",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,stringTypes:["$$","''-qq-bs"],identTypes:['""-qq'],variableTypes:[{regex:"[$][1-9]\\d*"},{regex:"[$][_a-zA-Z][_a-zA-Z0-9$]*"}],extraParens:["[]"],identChars:{rest:"$"},lineCommentTypes:["--","//"],operators:["%","::","||","=>",":=","->"],propertyAccessOperators:[":"]},formatOptions:{alwaysDenseOperators:["::"],onelineClauses:[...N,...t],tabularOnelineClauses:t}}},7105:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ACOSH","ADD_MONTHS","ALL_USER_NAMES","ANY_VALUE","APPROX_COUNT_DISTINCT","APPROX_PERCENTILE","APPROX_PERCENTILE_ACCUMULATE","APPROX_PERCENTILE_COMBINE","APPROX_PERCENTILE_ESTIMATE","APPROX_TOP_K","APPROX_TOP_K_ACCUMULATE","APPROX_TOP_K_COMBINE","APPROX_TOP_K_ESTIMATE","APPROXIMATE_JACCARD_INDEX","APPROXIMATE_SIMILARITY","ARRAY_AGG","ARRAY_APPEND","ARRAY_CAT","ARRAY_COMPACT","ARRAY_CONSTRUCT","ARRAY_CONSTRUCT_COMPACT","ARRAY_CONTAINS","ARRAY_INSERT","ARRAY_INTERSECTION","ARRAY_POSITION","ARRAY_PREPEND","ARRAY_SIZE","ARRAY_SLICE","ARRAY_TO_STRING","ARRAY_UNION_AGG","ARRAY_UNIQUE_AGG","ARRAYS_OVERLAP","AS_ARRAY","AS_BINARY","AS_BOOLEAN","AS_CHAR","AS_VARCHAR","AS_DATE","AS_DECIMAL","AS_NUMBER","AS_DOUBLE","AS_REAL","AS_INTEGER","AS_OBJECT","AS_TIME","AS_TIMESTAMP_LTZ","AS_TIMESTAMP_NTZ","AS_TIMESTAMP_TZ","ASCII","ASIN","ASINH","ATAN","ATAN2","ATANH","AUTO_REFRESH_REGISTRATION_HISTORY","AUTOMATIC_CLUSTERING_HISTORY","AVG","BASE64_DECODE_BINARY","BASE64_DECODE_STRING","BASE64_ENCODE","BIT_LENGTH","BITAND","BITAND_AGG","BITMAP_BIT_POSITION","BITMAP_BUCKET_NUMBER","BITMAP_CONSTRUCT_AGG","BITMAP_COUNT","BITMAP_OR_AGG","BITNOT","BITOR","BITOR_AGG","BITSHIFTLEFT","BITSHIFTRIGHT","BITXOR","BITXOR_AGG","BOOLAND","BOOLAND_AGG","BOOLNOT","BOOLOR","BOOLOR_AGG","BOOLXOR","BOOLXOR_AGG","BUILD_SCOPED_FILE_URL","BUILD_STAGE_FILE_URL","CASE","CAST","CBRT","CEIL","CHARINDEX","CHECK_JSON","CHECK_XML","CHR","CHAR","COALESCE","COLLATE","COLLATION","COMPLETE_TASK_GRAPHS","COMPRESS","CONCAT","CONCAT_WS","CONDITIONAL_CHANGE_EVENT","CONDITIONAL_TRUE_EVENT","CONTAINS","CONVERT_TIMEZONE","COPY_HISTORY","CORR","COS","COSH","COT","COUNT","COUNT_IF","COVAR_POP","COVAR_SAMP","CUME_DIST","CURRENT_ACCOUNT","CURRENT_AVAILABLE_ROLES","CURRENT_CLIENT","CURRENT_DATABASE","CURRENT_DATE","CURRENT_IP_ADDRESS","CURRENT_REGION","CURRENT_ROLE","CURRENT_SCHEMA","CURRENT_SCHEMAS","CURRENT_SECONDARY_ROLES","CURRENT_SESSION","CURRENT_STATEMENT","CURRENT_TASK_GRAPHS","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_TRANSACTION","CURRENT_USER","CURRENT_VERSION","CURRENT_WAREHOUSE","DATA_TRANSFER_HISTORY","DATABASE_REFRESH_HISTORY","DATABASE_REFRESH_PROGRESS","DATABASE_REFRESH_PROGRESS_BY_JOB","DATABASE_STORAGE_USAGE_HISTORY","DATE_FROM_PARTS","DATE_PART","DATE_TRUNC","DATEADD","DATEDIFF","DAYNAME","DECODE","DECOMPRESS_BINARY","DECOMPRESS_STRING","DECRYPT","DECRYPT_RAW","DEGREES","DENSE_RANK","DIV0","EDITDISTANCE","ENCRYPT","ENCRYPT_RAW","ENDSWITH","EQUAL_NULL","EXP","EXPLAIN_JSON","EXTERNAL_FUNCTIONS_HISTORY","EXTERNAL_TABLE_FILES","EXTERNAL_TABLE_FILE_REGISTRATION_HISTORY","EXTRACT","EXTRACT_SEMANTIC_CATEGORIES","FACTORIAL","FILTER","FIRST_VALUE","FLATTEN","FLOOR","GENERATE_COLUMN_DESCRIPTION","GENERATOR","GET","GET_ABSOLUTE_PATH","GET_DDL","GET_IGNORE_CASE","GET_OBJECT_REFERENCES","GET_PATH","GET_PRESIGNED_URL","GET_RELATIVE_PATH","GET_STAGE_LOCATION","GETBIT","GREATEST","GREATEST_IGNORE_NULLS","GROUPING","GROUPING_ID","HASH","HASH_AGG","HAVERSINE","HEX_DECODE_BINARY","HEX_DECODE_STRING","HEX_ENCODE","HLL","HLL_ACCUMULATE","HLL_COMBINE","HLL_ESTIMATE","HLL_EXPORT","HLL_IMPORT","HOUR","MINUTE","SECOND","IDENTIFIER","IFF","IFNULL","ILIKE","ILIKE ANY","INFER_SCHEMA","INITCAP","INSERT","INVOKER_ROLE","INVOKER_SHARE","IS_ARRAY","IS_BINARY","IS_BOOLEAN","IS_CHAR","IS_VARCHAR","IS_DATE","IS_DATE_VALUE","IS_DECIMAL","IS_DOUBLE","IS_REAL","IS_GRANTED_TO_INVOKER_ROLE","IS_INTEGER","IS_NULL_VALUE","IS_OBJECT","IS_ROLE_IN_SESSION","IS_TIME","IS_TIMESTAMP_LTZ","IS_TIMESTAMP_NTZ","IS_TIMESTAMP_TZ","JAROWINKLER_SIMILARITY","JSON_EXTRACT_PATH_TEXT","KURTOSIS","LAG","LAST_DAY","LAST_QUERY_ID","LAST_TRANSACTION","LAST_VALUE","LEAD","LEAST","LEFT","LENGTH","LEN","LIKE","LIKE ALL","LIKE ANY","LISTAGG","LN","LOCALTIME","LOCALTIMESTAMP","LOG","LOGIN_HISTORY","LOGIN_HISTORY_BY_USER","LOWER","LPAD","LTRIM","MATERIALIZED_VIEW_REFRESH_HISTORY","MD5","MD5_HEX","MD5_BINARY","MD5_NUMBER — Obsoleted","MD5_NUMBER_LOWER64","MD5_NUMBER_UPPER64","MEDIAN","MIN","MAX","MINHASH","MINHASH_COMBINE","MOD","MODE","MONTHNAME","MONTHS_BETWEEN","NEXT_DAY","NORMAL","NTH_VALUE","NTILE","NULLIF","NULLIFZERO","NVL","NVL2","OBJECT_AGG","OBJECT_CONSTRUCT","OBJECT_CONSTRUCT_KEEP_NULL","OBJECT_DELETE","OBJECT_INSERT","OBJECT_KEYS","OBJECT_PICK","OCTET_LENGTH","PARSE_IP","PARSE_JSON","PARSE_URL","PARSE_XML","PERCENT_RANK","PERCENTILE_CONT","PERCENTILE_DISC","PI","PIPE_USAGE_HISTORY","POLICY_CONTEXT","POLICY_REFERENCES","POSITION","POW","POWER","PREVIOUS_DAY","QUERY_ACCELERATION_HISTORY","QUERY_HISTORY","QUERY_HISTORY_BY_SESSION","QUERY_HISTORY_BY_USER","QUERY_HISTORY_BY_WAREHOUSE","RADIANS","RANDOM","RANDSTR","RANK","RATIO_TO_REPORT","REGEXP","REGEXP_COUNT","REGEXP_INSTR","REGEXP_LIKE","REGEXP_REPLACE","REGEXP_SUBSTR","REGEXP_SUBSTR_ALL","REGR_AVGX","REGR_AVGY","REGR_COUNT","REGR_INTERCEPT","REGR_R2","REGR_SLOPE","REGR_SXX","REGR_SXY","REGR_SYY","REGR_VALX","REGR_VALY","REPEAT","REPLACE","REPLICATION_GROUP_REFRESH_HISTORY","REPLICATION_GROUP_REFRESH_PROGRESS","REPLICATION_GROUP_REFRESH_PROGRESS_BY_JOB","REPLICATION_GROUP_USAGE_HISTORY","REPLICATION_USAGE_HISTORY","REST_EVENT_HISTORY","RESULT_SCAN","REVERSE","RIGHT","RLIKE","ROUND","ROW_NUMBER","RPAD","RTRIM","RTRIMMED_LENGTH","SEARCH_OPTIMIZATION_HISTORY","SEQ1","SEQ2","SEQ4","SEQ8","SERVERLESS_TASK_HISTORY","SHA1","SHA1_HEX","SHA1_BINARY","SHA2","SHA2_HEX","SHA2_BINARY","SIGN","SIN","SINH","SKEW","SOUNDEX","SPACE","SPLIT","SPLIT_PART","SPLIT_TO_TABLE","SQRT","SQUARE","ST_AREA","ST_ASEWKB","ST_ASEWKT","ST_ASGEOJSON","ST_ASWKB","ST_ASBINARY","ST_ASWKT","ST_ASTEXT","ST_AZIMUTH","ST_CENTROID","ST_COLLECT","ST_CONTAINS","ST_COVEREDBY","ST_COVERS","ST_DIFFERENCE","ST_DIMENSION","ST_DISJOINT","ST_DISTANCE","ST_DWITHIN","ST_ENDPOINT","ST_ENVELOPE","ST_GEOGFROMGEOHASH","ST_GEOGPOINTFROMGEOHASH","ST_GEOGRAPHYFROMWKB","ST_GEOGRAPHYFROMWKT","ST_GEOHASH","ST_GEOMETRYFROMWKB","ST_GEOMETRYFROMWKT","ST_HAUSDORFFDISTANCE","ST_INTERSECTION","ST_INTERSECTS","ST_LENGTH","ST_MAKEGEOMPOINT","ST_GEOM_POINT","ST_MAKELINE","ST_MAKEPOINT","ST_POINT","ST_MAKEPOLYGON","ST_POLYGON","ST_NPOINTS","ST_NUMPOINTS","ST_PERIMETER","ST_POINTN","ST_SETSRID","ST_SIMPLIFY","ST_SRID","ST_STARTPOINT","ST_SYMDIFFERENCE","ST_UNION","ST_WITHIN","ST_X","ST_XMAX","ST_XMIN","ST_Y","ST_YMAX","ST_YMIN","STAGE_DIRECTORY_FILE_REGISTRATION_HISTORY","STAGE_STORAGE_USAGE_HISTORY","STARTSWITH","STDDEV","STDDEV_POP","STDDEV_SAMP","STRIP_NULL_VALUE","STRTOK","STRTOK_SPLIT_TO_TABLE","STRTOK_TO_ARRAY","SUBSTR","SUBSTRING","SUM","SYSDATE","SYSTEM$ABORT_SESSION","SYSTEM$ABORT_TRANSACTION","SYSTEM$AUTHORIZE_PRIVATELINK","SYSTEM$AUTHORIZE_STAGE_PRIVATELINK_ACCESS","SYSTEM$BEHAVIOR_CHANGE_BUNDLE_STATUS","SYSTEM$CANCEL_ALL_QUERIES","SYSTEM$CANCEL_QUERY","SYSTEM$CLUSTERING_DEPTH","SYSTEM$CLUSTERING_INFORMATION","SYSTEM$CLUSTERING_RATIO ","SYSTEM$CURRENT_USER_TASK_NAME","SYSTEM$DATABASE_REFRESH_HISTORY ","SYSTEM$DATABASE_REFRESH_PROGRESS","SYSTEM$DATABASE_REFRESH_PROGRESS_BY_JOB ","SYSTEM$DISABLE_BEHAVIOR_CHANGE_BUNDLE","SYSTEM$DISABLE_DATABASE_REPLICATION","SYSTEM$ENABLE_BEHAVIOR_CHANGE_BUNDLE","SYSTEM$ESTIMATE_QUERY_ACCELERATION","SYSTEM$ESTIMATE_SEARCH_OPTIMIZATION_COSTS","SYSTEM$EXPLAIN_JSON_TO_TEXT","SYSTEM$EXPLAIN_PLAN_JSON","SYSTEM$EXTERNAL_TABLE_PIPE_STATUS","SYSTEM$GENERATE_SAML_CSR","SYSTEM$GENERATE_SCIM_ACCESS_TOKEN","SYSTEM$GET_AWS_SNS_IAM_POLICY","SYSTEM$GET_PREDECESSOR_RETURN_VALUE","SYSTEM$GET_PRIVATELINK","SYSTEM$GET_PRIVATELINK_AUTHORIZED_ENDPOINTS","SYSTEM$GET_PRIVATELINK_CONFIG","SYSTEM$GET_SNOWFLAKE_PLATFORM_INFO","SYSTEM$GET_TAG","SYSTEM$GET_TAG_ALLOWED_VALUES","SYSTEM$GET_TAG_ON_CURRENT_COLUMN","SYSTEM$GET_TAG_ON_CURRENT_TABLE","SYSTEM$GLOBAL_ACCOUNT_SET_PARAMETER","SYSTEM$LAST_CHANGE_COMMIT_TIME","SYSTEM$LINK_ACCOUNT_OBJECTS_BY_NAME","SYSTEM$MIGRATE_SAML_IDP_REGISTRATION","SYSTEM$PIPE_FORCE_RESUME","SYSTEM$PIPE_STATUS","SYSTEM$REVOKE_PRIVATELINK","SYSTEM$REVOKE_STAGE_PRIVATELINK_ACCESS","SYSTEM$SET_RETURN_VALUE","SYSTEM$SHOW_OAUTH_CLIENT_SECRETS","SYSTEM$STREAM_GET_TABLE_TIMESTAMP","SYSTEM$STREAM_HAS_DATA","SYSTEM$TASK_DEPENDENTS_ENABLE","SYSTEM$TYPEOF","SYSTEM$USER_TASK_CANCEL_ONGOING_EXECUTIONS","SYSTEM$VERIFY_EXTERNAL_OAUTH_TOKEN","SYSTEM$WAIT","SYSTEM$WHITELIST","SYSTEM$WHITELIST_PRIVATELINK","TAG_REFERENCES","TAG_REFERENCES_ALL_COLUMNS","TAG_REFERENCES_WITH_LINEAGE","TAN","TANH","TASK_DEPENDENTS","TASK_HISTORY","TIME_FROM_PARTS","TIME_SLICE","TIMEADD","TIMEDIFF","TIMESTAMP_FROM_PARTS","TIMESTAMPADD","TIMESTAMPDIFF","TO_ARRAY","TO_BINARY","TO_BOOLEAN","TO_CHAR","TO_VARCHAR","TO_DATE","DATE","TO_DECIMAL","TO_NUMBER","TO_NUMERIC","TO_DOUBLE","TO_GEOGRAPHY","TO_GEOMETRY","TO_JSON","TO_OBJECT","TO_TIME","TIME","TO_TIMESTAMP","TO_TIMESTAMP_LTZ","TO_TIMESTAMP_NTZ","TO_TIMESTAMP_TZ","TO_VARIANT","TO_XML","TRANSLATE","TRIM","TRUNCATE","TRUNC","TRUNC","TRY_BASE64_DECODE_BINARY","TRY_BASE64_DECODE_STRING","TRY_CAST","TRY_HEX_DECODE_BINARY","TRY_HEX_DECODE_STRING","TRY_PARSE_JSON","TRY_TO_BINARY","TRY_TO_BOOLEAN","TRY_TO_DATE","TRY_TO_DECIMAL","TRY_TO_NUMBER","TRY_TO_NUMERIC","TRY_TO_DOUBLE","TRY_TO_GEOGRAPHY","TRY_TO_GEOMETRY","TRY_TO_TIME","TRY_TO_TIMESTAMP","TRY_TO_TIMESTAMP_LTZ","TRY_TO_TIMESTAMP_NTZ","TRY_TO_TIMESTAMP_TZ","TYPEOF","UNICODE","UNIFORM","UPPER","UUID_STRING","VALIDATE","VALIDATE_PIPE_LOAD","VAR_POP","VAR_SAMP","VARIANCE","VARIANCE_SAMP","VARIANCE_POP","WAREHOUSE_LOAD_HISTORY","WAREHOUSE_METERING_HISTORY","WIDTH_BUCKET","XMLGET","YEAR","YEAROFWEEK","YEAROFWEEKISO","DAY","DAYOFMONTH","DAYOFWEEK","DAYOFWEEKISO","DAYOFYEAR","WEEK","WEEK","WEEKOFYEAR","WEEKISO","MONTH","QUARTER","ZEROIFNULL","ZIPF"]},5390:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ACCOUNT","ALL","ALTER","AND","ANY","AS","BETWEEN","BY","CASE","CAST","CHECK","COLUMN","CONNECT","CONNECTION","CONSTRAINT","CREATE","CROSS","CURRENT","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","DATABASE","DELETE","DISTINCT","DROP","ELSE","EXISTS","FALSE","FOLLOWING","FOR","FROM","FULL","GRANT","GROUP","GSCLUSTER","HAVING","ILIKE","IN","INCREMENT","INNER","INSERT","INTERSECT","INTO","IS","ISSUE","JOIN","LATERAL","LEFT","LIKE","LOCALTIME","LOCALTIMESTAMP","MINUS","NATURAL","NOT","NULL","OF","ON","OR","ORDER","ORGANIZATION","QUALIFY","REGEXP","REVOKE","RIGHT","RLIKE","ROW","ROWS","SAMPLE","SCHEMA","SELECT","SET","SOME","START","TABLE","TABLESAMPLE","THEN","TO","TRIGGER","TRUE","TRY_CAST","UNION","UNIQUE","UPDATE","USING","VALUES","VIEW","WHEN","WHENEVER","WHERE","WITH","COMMENT"],T.dataTypes=["NUMBER","DECIMAL","NUMERIC","INT","INTEGER","BIGINT","SMALLINT","TINYINT","BYTEINT","FLOAT","FLOAT4","FLOAT8","DOUBLE","DOUBLE PRECISION","REAL","VARCHAR","CHAR","CHARACTER","STRING","TEXT","BINARY","VARBINARY","BOOLEAN","DATE","DATETIME","TIME","TIMESTAMP","TIMESTAMP_LTZ","TIMESTAMP_NTZ","TIMESTAMP","TIMESTAMP_TZ","VARIANT","OBJECT","ARRAY","GEOGRAPHY","GEOMETRY"]},9774:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.spark=void 0;const A=R(7163),e=R(2437),S=R(3526),I=R(9177),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),N=(0,A.expandPhrases)(["WITH","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","SORT BY","CLUSTER BY","DISTRIBUTE BY","LIMIT","INSERT [INTO | OVERWRITE] [TABLE]","VALUES","INSERT OVERWRITE [LOCAL] DIRECTORY","LOAD DATA [LOCAL] INPATH","[OVERWRITE] INTO TABLE"]),t=(0,A.expandPhrases)(["CREATE [EXTERNAL] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["CREATE [OR REPLACE] [GLOBAL TEMPORARY | TEMPORARY] VIEW [IF NOT EXISTS]","DROP TABLE [IF EXISTS]","ALTER TABLE","ADD COLUMNS","DROP {COLUMN | COLUMNS}","RENAME TO","RENAME COLUMN","ALTER COLUMN","TRUNCATE TABLE","LATERAL VIEW","ALTER DATABASE","ALTER VIEW","CREATE DATABASE","CREATE FUNCTION","DROP DATABASE","DROP FUNCTION","DROP VIEW","REPAIR TABLE","USE DATABASE","TABLESAMPLE","PIVOT","TRANSFORM","EXPLAIN","ADD FILE","ADD JAR","ANALYZE TABLE","CACHE TABLE","CLEAR CACHE","DESCRIBE DATABASE","DESCRIBE FUNCTION","DESCRIBE QUERY","DESCRIBE TABLE","LIST FILE","LIST JAR","REFRESH","REFRESH TABLE","REFRESH FUNCTION","RESET","SHOW COLUMNS","SHOW CREATE TABLE","SHOW DATABASES","SHOW FUNCTIONS","SHOW PARTITIONS","SHOW TABLE EXTENDED","SHOW TABLES","SHOW TBLPROPERTIES","SHOW VIEWS","UNCACHE TABLE"]),C=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","EXCEPT [ALL | DISTINCT]","INTERSECT [ALL | DISTINCT]"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN","[LEFT] {ANTI | SEMI} JOIN","NATURAL [LEFT] {ANTI | SEMI} JOIN"]),s=(0,A.expandPhrases)(["ON DELETE","ON UPDATE","CURRENT ROW","{ROWS | RANGE} BETWEEN"]),r=(0,A.expandPhrases)([]);T.spark={name:"spark",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:I.functions,extraParens:["[]"],stringTypes:["''-bs",'""-bs',{quote:"''-raw",prefixes:["R","X"],requirePrefix:!0},{quote:'""-raw',prefixes:["R","X"],requirePrefix:!0}],identTypes:["``"],identChars:{allowFirstCharNumber:!0},variableTypes:[{quote:"{}",prefixes:["$"],requirePrefix:!0}],operators:["%","~","^","|","&","<=>","==","!","||","->"],postProcess:function(E){return E.map(((T,R)=>{const A=E[R-1]||e.EOF_TOKEN,S=E[R+1]||e.EOF_TOKEN;return e.isToken.WINDOW(T)&&S.type===e.TokenType.OPEN_PAREN?Object.assign(Object.assign({},T),{type:e.TokenType.RESERVED_FUNCTION_NAME}):"ITEMS"!==T.text||T.type!==e.TokenType.RESERVED_KEYWORD||"COLLECTION"===A.text&&"TERMINATED"===S.text?T:Object.assign(Object.assign({},T),{type:e.TokenType.IDENTIFIER,text:T.raw})}))}},formatOptions:{onelineClauses:[...t,...L],tabularOnelineClauses:L}}},9177:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["APPROX_COUNT_DISTINCT","APPROX_PERCENTILE","AVG","BIT_AND","BIT_OR","BIT_XOR","BOOL_AND","BOOL_OR","COLLECT_LIST","COLLECT_SET","CORR","COUNT","COUNT","COUNT","COUNT_IF","COUNT_MIN_SKETCH","COVAR_POP","COVAR_SAMP","EVERY","FIRST","FIRST_VALUE","GROUPING","GROUPING_ID","KURTOSIS","LAST","LAST_VALUE","MAX","MAX_BY","MEAN","MIN","MIN_BY","PERCENTILE","PERCENTILE","PERCENTILE_APPROX","SKEWNESS","STD","STDDEV","STDDEV_POP","STDDEV_SAMP","SUM","VAR_POP","VAR_SAMP","VARIANCE","CUME_DIST","DENSE_RANK","LAG","LEAD","NTH_VALUE","NTILE","PERCENT_RANK","RANK","ROW_NUMBER","ARRAY","ARRAY_CONTAINS","ARRAY_DISTINCT","ARRAY_EXCEPT","ARRAY_INTERSECT","ARRAY_JOIN","ARRAY_MAX","ARRAY_MIN","ARRAY_POSITION","ARRAY_REMOVE","ARRAY_REPEAT","ARRAY_UNION","ARRAYS_OVERLAP","ARRAYS_ZIP","FLATTEN","SEQUENCE","SHUFFLE","SLICE","SORT_ARRAY","ELEMENT_AT","ELEMENT_AT","MAP_CONCAT","MAP_ENTRIES","MAP_FROM_ARRAYS","MAP_FROM_ENTRIES","MAP_KEYS","MAP_VALUES","STR_TO_MAP","ADD_MONTHS","CURRENT_DATE","CURRENT_DATE","CURRENT_TIMESTAMP","CURRENT_TIMESTAMP","CURRENT_TIMEZONE","DATE_ADD","DATE_FORMAT","DATE_FROM_UNIX_DATE","DATE_PART","DATE_SUB","DATE_TRUNC","DATEDIFF","DAY","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","EXTRACT","FROM_UNIXTIME","FROM_UTC_TIMESTAMP","HOUR","LAST_DAY","MAKE_DATE","MAKE_DT_INTERVAL","MAKE_INTERVAL","MAKE_TIMESTAMP","MAKE_YM_INTERVAL","MINUTE","MONTH","MONTHS_BETWEEN","NEXT_DAY","NOW","QUARTER","SECOND","SESSION_WINDOW","TIMESTAMP_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_SECONDS","TO_DATE","TO_TIMESTAMP","TO_UNIX_TIMESTAMP","TO_UTC_TIMESTAMP","TRUNC","UNIX_DATE","UNIX_MICROS","UNIX_MILLIS","UNIX_SECONDS","UNIX_TIMESTAMP","WEEKDAY","WEEKOFYEAR","WINDOW","YEAR","FROM_JSON","GET_JSON_OBJECT","JSON_ARRAY_LENGTH","JSON_OBJECT_KEYS","JSON_TUPLE","SCHEMA_OF_JSON","TO_JSON","ABS","ACOS","ACOSH","AGGREGATE","ARRAY_SORT","ASCII","ASIN","ASINH","ASSERT_TRUE","ATAN","ATAN2","ATANH","BASE64","BIN","BIT_COUNT","BIT_GET","BIT_LENGTH","BROUND","BTRIM","CARDINALITY","CBRT","CEIL","CEILING","CHAR_LENGTH","CHARACTER_LENGTH","CHR","CONCAT","CONCAT_WS","CONV","COS","COSH","COT","CRC32","CURRENT_CATALOG","CURRENT_DATABASE","CURRENT_USER","DEGREES","ELT","EXP","EXPM1","FACTORIAL","FIND_IN_SET","FLOOR","FORALL","FORMAT_NUMBER","FORMAT_STRING","FROM_CSV","GETBIT","HASH","HEX","HYPOT","INITCAP","INLINE","INLINE_OUTER","INPUT_FILE_BLOCK_LENGTH","INPUT_FILE_BLOCK_START","INPUT_FILE_NAME","INSTR","ISNAN","ISNOTNULL","ISNULL","JAVA_METHOD","LCASE","LEFT","LENGTH","LEVENSHTEIN","LN","LOCATE","LOG","LOG10","LOG1P","LOG2","LOWER","LPAD","LTRIM","MAP_FILTER","MAP_ZIP_WITH","MD5","MOD","MONOTONICALLY_INCREASING_ID","NAMED_STRUCT","NANVL","NEGATIVE","NVL","NVL2","OCTET_LENGTH","OVERLAY","PARSE_URL","PI","PMOD","POSEXPLODE","POSEXPLODE_OUTER","POSITION","POSITIVE","POW","POWER","PRINTF","RADIANS","RAISE_ERROR","RAND","RANDN","RANDOM","REFLECT","REGEXP_EXTRACT","REGEXP_EXTRACT_ALL","REGEXP_LIKE","REGEXP_REPLACE","REPEAT","REPLACE","REVERSE","RIGHT","RINT","ROUND","RPAD","RTRIM","SCHEMA_OF_CSV","SENTENCES","SHA","SHA1","SHA2","SHIFTLEFT","SHIFTRIGHT","SHIFTRIGHTUNSIGNED","SIGN","SIGNUM","SIN","SINH","SOUNDEX","SPACE","SPARK_PARTITION_ID","SPLIT","SQRT","STACK","SUBSTR","SUBSTRING","SUBSTRING_INDEX","TAN","TANH","TO_CSV","TRANSFORM_KEYS","TRANSFORM_VALUES","TRANSLATE","TRIM","TRY_ADD","TRY_DIVIDE","TYPEOF","UCASE","UNBASE64","UNHEX","UPPER","UUID","VERSION","WIDTH_BUCKET","XPATH","XPATH_BOOLEAN","XPATH_DOUBLE","XPATH_FLOAT","XPATH_INT","XPATH_LONG","XPATH_NUMBER","XPATH_SHORT","XPATH_STRING","XXHASH64","ZIP_WITH","CAST","COALESCE","NULLIF"]},3526:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADD","AFTER","ALL","ALTER","ANALYZE","AND","ANTI","ANY","ARCHIVE","AS","ASC","AT","AUTHORIZATION","BETWEEN","BOTH","BUCKET","BUCKETS","BY","CACHE","CASCADE","CAST","CHANGE","CHECK","CLEAR","CLUSTER","CLUSTERED","CODEGEN","COLLATE","COLLECTION","COLUMN","COLUMNS","COMMENT","COMMIT","COMPACT","COMPACTIONS","COMPUTE","CONCATENATE","CONSTRAINT","COST","CREATE","CROSS","CUBE","CURRENT","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","DATA","DATABASE","DATABASES","DAY","DBPROPERTIES","DEFINED","DELETE","DELIMITED","DESC","DESCRIBE","DFS","DIRECTORIES","DIRECTORY","DISTINCT","DISTRIBUTE","DIV","DROP","ESCAPE","ESCAPED","EXCEPT","EXCHANGE","EXISTS","EXPORT","EXTENDED","EXTERNAL","EXTRACT","FALSE","FETCH","FIELDS","FILTER","FILEFORMAT","FIRST","FIRST_VALUE","FOLLOWING","FOR","FOREIGN","FORMAT","FORMATTED","FULL","FUNCTION","FUNCTIONS","GLOBAL","GRANT","GROUP","GROUPING","HOUR","IF","IGNORE","IMPORT","IN","INDEX","INDEXES","INNER","INPATH","INPUTFORMAT","INTERSECT","INTO","IS","ITEMS","KEYS","LAST","LAST_VALUE","LATERAL","LAZY","LEADING","LEFT","LIKE","LINES","LIST","LOCAL","LOCATION","LOCK","LOCKS","LOGICAL","MACRO","MATCHED","MERGE","MINUTE","MONTH","MSCK","NAMESPACE","NAMESPACES","NATURAL","NO","NOT","NULL","NULLS","OF","ONLY","OPTION","OPTIONS","OR","ORDER","OUT","OUTER","OUTPUTFORMAT","OVER","OVERLAPS","OVERLAY","OVERWRITE","OWNER","PARTITION","PARTITIONED","PARTITIONS","PERCENT","PLACING","POSITION","PRECEDING","PRIMARY","PRINCIPALS","PROPERTIES","PURGE","QUERY","RANGE","RECORDREADER","RECORDWRITER","RECOVER","REDUCE","REFERENCES","RENAME","REPAIR","REPLACE","RESPECT","RESTRICT","REVOKE","RIGHT","RLIKE","ROLE","ROLES","ROLLBACK","ROLLUP","ROW","ROWS","SCHEMA","SECOND","SELECT","SEMI","SEPARATED","SERDE","SERDEPROPERTIES","SESSION_USER","SETS","SHOW","SKEWED","SOME","SORT","SORTED","START","STATISTICS","STORED","STRATIFY","SUBSTR","SUBSTRING","TABLE","TABLES","TBLPROPERTIES","TEMPORARY","TERMINATED","THEN","TO","TOUCH","TRAILING","TRANSACTION","TRANSACTIONS","TRIM","TRUE","TRUNCATE","UNARCHIVE","UNBOUNDED","UNCACHE","UNIQUE","UNKNOWN","UNLOCK","UNSET","USE","USER","USING","VIEW","WINDOW","YEAR","ANALYSE","ARRAY_ZIP","COALESCE","CONTAINS","CONVERT","DAYS","DAY_HOUR","DAY_MINUTE","DAY_SECOND","DECODE","DEFAULT","DISTINCTROW","ENCODE","EXPLODE","EXPLODE_OUTER","FIXED","GREATEST","GROUP_CONCAT","HOURS","HOUR_MINUTE","HOUR_SECOND","IFNULL","LEAST","LEVEL","MINUTE_SECOND","NULLIF","OFFSET","ON","OPTIMIZE","REGEXP","SEPARATOR","SIZE","TYPE","TYPES","UNSIGNED","VARIABLES","YEAR_MONTH"],T.dataTypes=["ARRAY","BIGINT","BINARY","BOOLEAN","BYTE","CHAR","DATE","DEC","DECIMAL","DOUBLE","FLOAT","INT","INTEGER","INTERVAL","LONG","MAP","NUMERIC","REAL","SHORT","SMALLINT","STRING","STRUCT","TIMESTAMP_LTZ","TIMESTAMP_NTZ","TIMESTAMP","TINYINT","VARCHAR"]},3446:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.sql=void 0;const A=R(7163),e=R(3889),S=R(3550),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY [ALL | DISTINCT]","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","FETCH {FIRST | NEXT}","INSERT INTO","VALUES","SET"]),N=(0,A.expandPhrases)(["CREATE [GLOBAL TEMPORARY | LOCAL TEMPORARY] TABLE"]),t=(0,A.expandPhrases)(["CREATE [RECURSIVE] VIEW","UPDATE","WHERE CURRENT OF","DELETE FROM","DROP TABLE","ALTER TABLE","ADD COLUMN","DROP [COLUMN]","RENAME COLUMN","RENAME TO","ALTER [COLUMN]","{SET | DROP} DEFAULT","ADD SCOPE","DROP SCOPE {CASCADE | RESTRICT}","RESTART WITH","TRUNCATE TABLE","SET SCHEMA"]),L=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","EXCEPT [ALL | DISTINCT]","INTERSECT [ALL | DISTINCT]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN"]),_=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL | SET DEFAULT]","{ROWS | RANGE} BETWEEN"]),s=(0,A.expandPhrases)([]);T.sql={name:"sql",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,stringTypes:[{quote:"''-qq-bs",prefixes:["N","U&"]},{quote:"''-raw",prefixes:["X"],requirePrefix:!0}],identTypes:['""-qq',"``"],paramTypes:{positional:!0},operators:["||"]},formatOptions:{onelineClauses:[...N,...t],tabularOnelineClauses:t}}},3889:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["GROUPING","RANK","DENSE_RANK","PERCENT_RANK","CUME_DIST","ROW_NUMBER","POSITION","OCCURRENCES_REGEX","POSITION_REGEX","EXTRACT","CHAR_LENGTH","CHARACTER_LENGTH","OCTET_LENGTH","CARDINALITY","ABS","MOD","LN","EXP","POWER","SQRT","FLOOR","CEIL","CEILING","WIDTH_BUCKET","SUBSTRING","SUBSTRING_REGEX","UPPER","LOWER","CONVERT","TRANSLATE","TRANSLATE_REGEX","TRIM","OVERLAY","NORMALIZE","SPECIFICTYPE","CURRENT_DATE","CURRENT_TIME","LOCALTIME","CURRENT_TIMESTAMP","LOCALTIMESTAMP","COUNT","AVG","MAX","MIN","SUM","STDDEV_POP","STDDEV_SAMP","VAR_SAMP","VAR_POP","COLLECT","FUSION","INTERSECTION","COVAR_POP","COVAR_SAMP","CORR","REGR_SLOPE","REGR_INTERCEPT","REGR_COUNT","REGR_R2","REGR_AVGX","REGR_AVGY","REGR_SXX","REGR_SYY","REGR_SXY","PERCENTILE_CONT","PERCENTILE_DISC","CAST","COALESCE","NULLIF","ROUND","SIN","COS","TAN","ASIN","ACOS","ATAN"]},3550:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ALL","ALLOCATE","ALTER","ANY","ARE","AS","ASC","ASENSITIVE","ASYMMETRIC","AT","ATOMIC","AUTHORIZATION","BEGIN","BETWEEN","BOTH","BY","CALL","CALLED","CASCADED","CAST","CHECK","CLOSE","COALESCE","COLLATE","COLUMN","COMMIT","CONDITION","CONNECT","CONSTRAINT","CORRESPONDING","CREATE","CROSS","CUBE","CURRENT","CURRENT_CATALOG","CURRENT_DEFAULT_TRANSFORM_GROUP","CURRENT_PATH","CURRENT_ROLE","CURRENT_SCHEMA","CURRENT_TRANSFORM_GROUP_FOR_TYPE","CURRENT_USER","CURSOR","CYCLE","DEALLOCATE","DAY","DECLARE","DEFAULT","DELETE","DEREF","DESC","DESCRIBE","DETERMINISTIC","DISCONNECT","DISTINCT","DROP","DYNAMIC","EACH","ELEMENT","END-EXEC","ESCAPE","EVERY","EXCEPT","EXEC","EXECUTE","EXISTS","EXTERNAL","FALSE","FETCH","FILTER","FOR","FOREIGN","FREE","FROM","FULL","FUNCTION","GET","GLOBAL","GRANT","GROUP","HAVING","HOLD","HOUR","IDENTITY","IN","INDICATOR","INNER","INOUT","INSENSITIVE","INSERT","INTERSECT","INTO","IS","LANGUAGE","LARGE","LATERAL","LEADING","LEFT","LIKE","LIKE_REGEX","LOCAL","MATCH","MEMBER","MERGE","METHOD","MINUTE","MODIFIES","MODULE","MONTH","NATURAL","NEW","NO","NONE","NOT","NULL","NULLIF","OF","OLD","ON","ONLY","OPEN","ORDER","OUT","OUTER","OVER","OVERLAPS","PARAMETER","PARTITION","PRECISION","PREPARE","PRIMARY","PROCEDURE","RANGE","READS","REAL","RECURSIVE","REF","REFERENCES","REFERENCING","RELEASE","RESULT","RETURN","RETURNS","REVOKE","RIGHT","ROLLBACK","ROLLUP","ROW","ROWS","SAVEPOINT","SCOPE","SCROLL","SEARCH","SECOND","SELECT","SENSITIVE","SESSION_USER","SET","SIMILAR","SOME","SPECIFIC","SQL","SQLEXCEPTION","SQLSTATE","SQLWARNING","START","STATIC","SUBMULTISET","SYMMETRIC","SYSTEM","SYSTEM_USER","TABLE","TABLESAMPLE","THEN","TIMEZONE_HOUR","TIMEZONE_MINUTE","TO","TRAILING","TRANSLATION","TREAT","TRIGGER","TRUE","UESCAPE","UNION","UNIQUE","UNKNOWN","UNNEST","UPDATE","USER","USING","VALUE","VALUES","WHENEVER","WINDOW","WITHIN","WITHOUT","YEAR"],T.dataTypes=["ARRAY","BIGINT","BINARY LARGE OBJECT","BINARY VARYING","BINARY","BLOB","BOOLEAN","CHAR LARGE OBJECT","CHAR VARYING","CHAR","CHARACTER LARGE OBJECT","CHARACTER VARYING","CHARACTER","CLOB","DATE","DEC","DECIMAL","DOUBLE","FLOAT","INT","INTEGER","INTERVAL","MULTISET","NATIONAL CHAR VARYING","NATIONAL CHAR","NATIONAL CHARACTER LARGE OBJECT","NATIONAL CHARACTER VARYING","NATIONAL CHARACTER","NCHAR LARGE OBJECT","NCHAR VARYING","NCHAR","NCLOB","NUMERIC","SMALLINT","TIME","TIMESTAMP","VARBINARY","VARCHAR"]},5784:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.sqlite=void 0;const A=R(7163),e=R(3171),S=R(4976),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","INSERT [OR ABORT | OR FAIL | OR IGNORE | OR REPLACE | OR ROLLBACK] INTO","REPLACE INTO","VALUES","SET","RETURNING"]),N=(0,A.expandPhrases)(["CREATE [TEMPORARY | TEMP] TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [TEMPORARY | TEMP] VIEW [IF NOT EXISTS]","UPDATE [OR ABORT | OR FAIL | OR IGNORE | OR REPLACE | OR ROLLBACK]","ON CONFLICT","DELETE FROM","DROP TABLE [IF EXISTS]","ALTER TABLE","ADD [COLUMN]","DROP [COLUMN]","RENAME [COLUMN]","RENAME TO","SET SCHEMA"]),L=(0,A.expandPhrases)(["UNION [ALL]","EXCEPT","INTERSECT"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN"]),_=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL | SET DEFAULT]","{ROWS | RANGE | GROUPS} BETWEEN","DO UPDATE"]),s=(0,A.expandPhrases)([]);T.sqlite={name:"sqlite",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,stringTypes:["''-qq",{quote:"''-raw",prefixes:["X"],requirePrefix:!0}],identTypes:['""-qq',"``","[]"],paramTypes:{positional:!0,numbered:["?"],named:[":","@","$"]},operators:["%","~","&","|","<<",">>","==","->","->>","||"]},formatOptions:{onelineClauses:[...N,...t],tabularOnelineClauses:t}}},3171:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","CHANGES","CHAR","COALESCE","FORMAT","GLOB","HEX","IFNULL","IIF","INSTR","LAST_INSERT_ROWID","LENGTH","LIKE","LIKELIHOOD","LIKELY","LOAD_EXTENSION","LOWER","LTRIM","NULLIF","PRINTF","QUOTE","RANDOM","RANDOMBLOB","REPLACE","ROUND","RTRIM","SIGN","SOUNDEX","SQLITE_COMPILEOPTION_GET","SQLITE_COMPILEOPTION_USED","SQLITE_OFFSET","SQLITE_SOURCE_ID","SQLITE_VERSION","SUBSTR","SUBSTRING","TOTAL_CHANGES","TRIM","TYPEOF","UNICODE","UNLIKELY","UPPER","ZEROBLOB","AVG","COUNT","GROUP_CONCAT","MAX","MIN","SUM","TOTAL","DATE","TIME","DATETIME","JULIANDAY","UNIXEPOCH","STRFTIME","row_number","rank","dense_rank","percent_rank","cume_dist","ntile","lag","lead","first_value","last_value","nth_value","ACOS","ACOSH","ASIN","ASINH","ATAN","ATAN2","ATANH","CEIL","CEILING","COS","COSH","DEGREES","EXP","FLOOR","LN","LOG","LOG","LOG10","LOG2","MOD","PI","POW","POWER","RADIANS","SIN","SINH","SQRT","TAN","TANH","TRUNC","JSON","JSON_ARRAY","JSON_ARRAY_LENGTH","JSON_ARRAY_LENGTH","JSON_EXTRACT","JSON_INSERT","JSON_OBJECT","JSON_PATCH","JSON_REMOVE","JSON_REPLACE","JSON_SET","JSON_TYPE","JSON_TYPE","JSON_VALID","JSON_QUOTE","JSON_GROUP_ARRAY","JSON_GROUP_OBJECT","JSON_EACH","JSON_TREE","CAST"]},4976:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ABORT","ACTION","ADD","AFTER","ALL","ALTER","AND","ARE","ALWAYS","ANALYZE","AS","ASC","ATTACH","AUTOINCREMENT","BEFORE","BEGIN","BETWEEN","BY","CASCADE","CASE","CAST","CHECK","COLLATE","COLUMN","COMMIT","CONFLICT","CONSTRAINT","CREATE","CROSS","CURRENT","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","DATABASE","DEFAULT","DEFERRABLE","DEFERRED","DELETE","DESC","DETACH","DISTINCT","DO","DROP","EACH","ELSE","END","ESCAPE","EXCEPT","EXCLUDE","EXCLUSIVE","EXISTS","EXPLAIN","FAIL","FILTER","FIRST","FOLLOWING","FOR","FOREIGN","FROM","FULL","GENERATED","GLOB","GROUP","HAVING","IF","IGNORE","IMMEDIATE","IN","INDEX","INDEXED","INITIALLY","INNER","INSERT","INSTEAD","INTERSECT","INTO","IS","ISNULL","JOIN","KEY","LAST","LEFT","LIKE","LIMIT","MATCH","MATERIALIZED","NATURAL","NO","NOT","NOTHING","NOTNULL","NULL","NULLS","OF","OFFSET","ON","ONLY","OPEN","OR","ORDER","OTHERS","OUTER","OVER","PARTITION","PLAN","PRAGMA","PRECEDING","PRIMARY","QUERY","RAISE","RANGE","RECURSIVE","REFERENCES","REGEXP","REINDEX","RELEASE","RENAME","REPLACE","RESTRICT","RETURNING","RIGHT","ROLLBACK","ROW","ROWS","SAVEPOINT","SELECT","SET","TABLE","TEMP","TEMPORARY","THEN","TIES","TO","TRANSACTION","TRIGGER","UNBOUNDED","UNION","UNIQUE","UPDATE","USING","VACUUM","VALUES","VIEW","VIRTUAL","WHEN","WHERE","WINDOW","WITH","WITHOUT"],T.dataTypes=["ANY","ARRAY","BLOB","CHARACTER","DECIMAL","INT","INTEGER","NATIVE CHARACTER","NCHAR","NUMERIC","NVARCHAR","REAL","TEXT","VARCHAR","VARYING CHARACTER"]},2066:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.tidb=void 0;const A=R(7163),e=R(6831),S=R(930),I=R(3069),O=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT | DISTINCTROW]"]),N=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","INSERT [LOW_PRIORITY | DELAYED | HIGH_PRIORITY] [IGNORE] [INTO]","REPLACE [LOW_PRIORITY | DELAYED] [INTO]","VALUES","ON DUPLICATE KEY UPDATE","SET"]),t=(0,A.expandPhrases)(["CREATE [TEMPORARY] TABLE [IF NOT EXISTS]"]),L=(0,A.expandPhrases)(["CREATE [OR REPLACE] [SQL SECURITY DEFINER | SQL SECURITY INVOKER] VIEW [IF NOT EXISTS]","UPDATE [LOW_PRIORITY] [IGNORE]","DELETE [LOW_PRIORITY] [QUICK] [IGNORE] FROM","DROP [TEMPORARY] TABLE [IF EXISTS]","ALTER TABLE","ADD [COLUMN]","{CHANGE | MODIFY} [COLUMN]","DROP [COLUMN]","RENAME [TO | AS]","RENAME COLUMN","ALTER [COLUMN]","{SET | DROP} DEFAULT","TRUNCATE [TABLE]","ALTER DATABASE","ALTER INSTANCE","ALTER RESOURCE GROUP","ALTER SEQUENCE","ALTER USER","ALTER VIEW","ANALYZE TABLE","CHECK TABLE","CHECKSUM TABLE","COMMIT","CREATE DATABASE","CREATE INDEX","CREATE RESOURCE GROUP","CREATE ROLE","CREATE SEQUENCE","CREATE USER","DEALLOCATE PREPARE","DESCRIBE","DROP DATABASE","DROP INDEX","DROP RESOURCE GROUP","DROP ROLE","DROP TABLESPACE","DROP USER","DROP VIEW","EXPLAIN","FLUSH","GRANT","IMPORT TABLE","INSTALL COMPONENT","INSTALL PLUGIN","KILL","LOAD DATA","LOCK INSTANCE FOR BACKUP","LOCK TABLES","OPTIMIZE TABLE","PREPARE","RELEASE SAVEPOINT","RENAME TABLE","RENAME USER","REPAIR TABLE","RESET","REVOKE","ROLLBACK","ROLLBACK TO SAVEPOINT","SAVEPOINT","SET CHARACTER SET","SET DEFAULT ROLE","SET NAMES","SET PASSWORD","SET RESOURCE GROUP","SET ROLE","SET TRANSACTION","SHOW","SHOW BINARY LOGS","SHOW BINLOG EVENTS","SHOW CHARACTER SET","SHOW COLLATION","SHOW COLUMNS","SHOW CREATE DATABASE","SHOW CREATE TABLE","SHOW CREATE USER","SHOW CREATE VIEW","SHOW DATABASES","SHOW ENGINE","SHOW ENGINES","SHOW ERRORS","SHOW EVENTS","SHOW GRANTS","SHOW INDEX","SHOW MASTER STATUS","SHOW OPEN TABLES","SHOW PLUGINS","SHOW PRIVILEGES","SHOW PROCESSLIST","SHOW PROFILE","SHOW PROFILES","SHOW STATUS","SHOW TABLE STATUS","SHOW TABLES","SHOW TRIGGERS","SHOW VARIABLES","SHOW WARNINGS","TABLE","UNINSTALL COMPONENT","UNINSTALL PLUGIN","UNLOCK INSTANCE","UNLOCK TABLES","USE"]),C=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]"]),_=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT} [OUTER] JOIN","STRAIGHT_JOIN"]),s=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL]","CHARACTER SET","{ROWS | RANGE} BETWEEN","IDENTIFIED BY"]),r=(0,A.expandPhrases)([]);T.tidb={name:"tidb",tokenizerOptions:{reservedSelect:O,reservedClauses:[...N,...t,...L],reservedSetOperations:C,reservedJoins:_,reservedKeywordPhrases:s,reservedDataTypePhrases:r,supportsXor:!0,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:I.functions,stringTypes:['""-qq-bs',{quote:"''-qq-bs",prefixes:["N"]},{quote:"''-raw",prefixes:["B","X"],requirePrefix:!0}],identTypes:["``"],identChars:{first:"$",rest:"$",allowFirstCharNumber:!0},variableTypes:[{regex:"@@?[A-Za-z0-9_.$]+"},{quote:'""-qq-bs',prefixes:["@"],requirePrefix:!0},{quote:"''-qq-bs",prefixes:["@"],requirePrefix:!0},{quote:"``",prefixes:["@"],requirePrefix:!0}],paramTypes:{positional:!0},lineCommentTypes:["--","#"],operators:["%",":=","&","|","^","~","<<",">>","<=>","->","->>","&&","||","!","*.*"],postProcess:e.postProcess},formatOptions:{onelineClauses:[...t,...L],tabularOnelineClauses:L}}},3069:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ADDDATE","ADDTIME","AES_DECRYPT","AES_ENCRYPT","ANY_VALUE","ASCII","ASIN","ATAN","ATAN2","AVG","BENCHMARK","BIN","BIN_TO_UUID","BIT_AND","BIT_COUNT","BIT_LENGTH","BIT_OR","BIT_XOR","BITAND","BITNEG","BITOR","BITXOR","CASE","CAST","CEIL","CEILING","CHAR_FUNC","CHAR_LENGTH","CHARACTER_LENGTH","CHARSET","COALESCE","COERCIBILITY","COLLATION","COMPRESS","CONCAT","CONCAT_WS","CONNECTION_ID","CONV","CONVERT","CONVERT_TZ","COS","COT","COUNT","CRC32","CUME_DIST","CURDATE","CURRENT_DATE","CURRENT_RESOURCE_GROUP","CURRENT_ROLE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURTIME","DATABASE","DATE","DATE_ADD","DATE_FORMAT","DATE_SUB","DATEDIFF","DAY","DAYNAME","DAYOFMONTH","DAYOFWEEK","DAYOFYEAR","DECODE","DEFAULT_FUNC","DEGREES","DENSE_RANK","DES_DECRYPT","DES_ENCRYPT","DIV","ELT","ENCODE","ENCRYPT","EQ","EXP","EXPORT_SET","EXTRACT","FIELD","FIND_IN_SET","FIRST_VALUE","FLOOR","FORMAT","FORMAT_BYTES","FORMAT_NANO_TIME","FOUND_ROWS","FROM_BASE64","FROM_DAYS","FROM_UNIXTIME","GE","GET_FORMAT","GET_LOCK","GETPARAM","GREATEST","GROUP_CONCAT","GROUPING","GT","HEX","HOUR","IF","IFNULL","ILIKE","INET6_ATON","INET6_NTOA","INET_ATON","INET_NTOA","INSERT_FUNC","INSTR","INTDIV","INTERVAL","IS_FREE_LOCK","IS_IPV4","IS_IPV4_COMPAT","IS_IPV4_MAPPED","IS_IPV6","IS_USED_LOCK","IS_UUID","ISFALSE","ISNULL","ISTRUE","JSON_ARRAY","JSON_ARRAYAGG","JSON_ARRAY_APPEND","JSON_ARRAY_INSERT","JSON_CONTAINS","JSON_CONTAINS_PATH","JSON_DEPTH","JSON_EXTRACT","JSON_INSERT","JSON_KEYS","JSON_LENGTH","JSON_MEMBEROF","JSON_MERGE","JSON_MERGE_PATCH","JSON_MERGE_PRESERVE","JSON_OBJECT","JSON_OBJECTAGG","JSON_OVERLAPS","JSON_PRETTY","JSON_QUOTE","JSON_REMOVE","JSON_REPLACE","JSON_SEARCH","JSON_SET","JSON_STORAGE_FREE","JSON_STORAGE_SIZE","JSON_TYPE","JSON_UNQUOTE","JSON_VALID","LAG","LAST_DAY","LAST_INSERT_ID","LAST_VALUE","LASTVAL","LCASE","LE","LEAD","LEAST","LEFT","LEFTSHIFT","LENGTH","LIKE","LN","LOAD_FILE","LOCALTIME","LOCALTIMESTAMP","LOCATE","LOG","LOG10","LOG2","LOWER","LPAD","LT","LTRIM","MAKE_SET","MAKEDATE","MAKETIME","MASTER_POS_WAIT","MAX","MD5","MICROSECOND","MID","MIN","MINUS","MINUTE","MOD","MONTH","MONTHNAME","MUL","NAME_CONST","NE","NEXTVAL","NOT","NOW","NTH_VALUE","NTILE","NULLEQ","OCT","OCTET_LENGTH","OLD_PASSWORD","ORD","PASSWORD_FUNC","PERCENT_RANK","PERIOD_ADD","PERIOD_DIFF","PI","PLUS","POSITION","POW","POWER","QUARTER","QUOTE","RADIANS","RAND","RANDOM_BYTES","RANK","REGEXP","REGEXP_INSTR","REGEXP_LIKE","REGEXP_REPLACE","REGEXP_SUBSTR","RELEASE_ALL_LOCKS","RELEASE_LOCK","REPEAT","REPLACE","REVERSE","RIGHT","RIGHTSHIFT","ROUND","ROW_COUNT","ROW_NUMBER","RPAD","RTRIM","SCHEMA","SEC_TO_TIME","SECOND","SESSION_USER","SETVAL","SETVAR","SHA","SHA1","SHA2","SIGN","SIN","SLEEP","SM3","SPACE","SQRT","STD","STDDEV","STDDEV_POP","STDDEV_SAMP","STR_TO_DATE","STRCMP","SUBDATE","SUBSTR","SUBSTRING","SUBSTRING_INDEX","SUBTIME","SUM","SYSDATE","SYSTEM_USER","TAN","TIDB_BOUNDED_STALENESS","TIDB_CURRENT_TSO","TIDB_DECODE_BINARY_PLAN","TIDB_DECODE_KEY","TIDB_DECODE_PLAN","TIDB_DECODE_SQL_DIGESTS","TIDB_ENCODE_SQL_DIGEST","TIDB_IS_DDL_OWNER","TIDB_PARSE_TSO","TIDB_PARSE_TSO_LOGICAL","TIDB_ROW_CHECKSUM","TIDB_SHARD","TIDB_VERSION","TIME","TIME_FORMAT","TIME_TO_SEC","TIMEDIFF","TIMESTAMP","TIMESTAMPADD","TIMESTAMPDIFF","TO_BASE64","TO_DAYS","TO_SECONDS","TRANSLATE","TRIM","TRUNCATE","UCASE","UNARYMINUS","UNCOMPRESS","UNCOMPRESSED_LENGTH","UNHEX","UNIX_TIMESTAMP","UPPER","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","UUID","UUID_SHORT","UUID_TO_BIN","VALIDATE_PASSWORD_STRENGTH","VAR_POP","VAR_SAMP","VARIANCE","VERSION","VITESS_HASH","WEEK","WEEKDAY","WEEKOFYEAR","WEIGHT_STRING","YEAR","YEARWEEK"]},930:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADD","ALL","ALTER","ANALYZE","AND","ARRAY","AS","ASC","BETWEEN","BOTH","BY","CALL","CASCADE","CASE","CHANGE","CHECK","COLLATE","COLUMN","CONSTRAINT","CONTINUE","CONVERT","CREATE","CROSS","CURRENT_DATE","CURRENT_ROLE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURSOR","DATABASE","DATABASES","DAY_HOUR","DAY_MICROSECOND","DAY_MINUTE","DAY_SECOND","DEFAULT","DELAYED","DELETE","DESC","DESCRIBE","DISTINCT","DISTINCTROW","DIV","DOUBLE","DROP","DUAL","ELSE","ELSEIF","ENCLOSED","ESCAPED","EXCEPT","EXISTS","EXIT","EXPLAIN","FALSE","FETCH","FOR","FORCE","FOREIGN","FROM","FULLTEXT","GENERATED","GRANT","GROUP","GROUPS","HAVING","HIGH_PRIORITY","HOUR_MICROSECOND","HOUR_MINUTE","HOUR_SECOND","IF","IGNORE","ILIKE","IN","INDEX","INFILE","INNER","INOUT","INSERT","INTERSECT","INTERVAL","INTO","IS","ITERATE","JOIN","KEY","KEYS","KILL","LEADING","LEAVE","LEFT","LIKE","LIMIT","LINEAR","LINES","LOAD","LOCALTIME","LOCALTIMESTAMP","LOCK","LONG","LOW_PRIORITY","MATCH","MAXVALUE","MINUTE_MICROSECOND","MINUTE_SECOND","MOD","NATURAL","NOT","NO_WRITE_TO_BINLOG","NULL","OF","ON","OPTIMIZE","OPTION","OPTIONALLY","OR","ORDER","OUT","OUTER","OUTFILE","OVER","PARTITION","PRIMARY","PROCEDURE","RANGE","READ","RECURSIVE","REFERENCES","REGEXP","RELEASE","RENAME","REPEAT","REPLACE","REQUIRE","RESTRICT","REVOKE","RIGHT","RLIKE","ROW","ROWS","SECOND_MICROSECOND","SELECT","SET","SHOW","SPATIAL","SQL","SQLEXCEPTION","SQLSTATE","SQLWARNING","SQL_BIG_RESULT","SQL_CALC_FOUND_ROWS","SQL_SMALL_RESULT","SSL","STARTING","STATS_EXTENDED","STORED","STRAIGHT_JOIN","TABLE","TABLESAMPLE","TERMINATED","THEN","TO","TRAILING","TRIGGER","TRUE","TiDB_CURRENT_TSO","UNION","UNIQUE","UNLOCK","UNSIGNED","UNTIL","UPDATE","USAGE","USE","USING","UTC_DATE","UTC_TIME","UTC_TIMESTAMP","VALUES","VIRTUAL","WHEN","WHERE","WHILE","WINDOW","WITH","WRITE","XOR","YEAR_MONTH","ZEROFILL"],T.dataTypes=["BIGINT","BINARY","BIT","BLOB","BOOL","BOOLEAN","CHAR","CHARACTER","DATE","DATETIME","DEC","DECIMAL","DOUBLE PRECISION","DOUBLE","ENUM","FIXED","INT","INT1","INT2","INT3","INT4","INT8","INTEGER","LONGBLOB","LONGTEXT","MEDIUMBLOB","MEDIUMINT","MIDDLEINT","NATIONAL CHAR","NATIONAL VARCHAR","NUMERIC","PRECISION","SMALLINT","TEXT","TIME","TIMESTAMP","TINYBLOB","TINYINT","TINYTEXT","VARBINARY","VARCHAR","VARCHARACTER","VARYING","YEAR"]},918:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.transactsql=void 0;const A=R(7163),e=R(6673),S=R(846),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH","INTO","FROM","WHERE","GROUP BY","HAVING","WINDOW","PARTITION BY","ORDER BY","OFFSET","FETCH {FIRST | NEXT}","FOR {BROWSE | XML | JSON}","OPTION","INSERT [INTO]","VALUES","SET","MERGE [INTO]","WHEN [NOT] MATCHED [BY TARGET | BY SOURCE] [THEN]","UPDATE SET"]),N=(0,A.expandPhrases)(["CREATE TABLE"]),t=(0,A.expandPhrases)(["CREATE [OR ALTER] [MATERIALIZED] VIEW","UPDATE","WHERE CURRENT OF","DELETE [FROM]","DROP TABLE [IF EXISTS]","ALTER TABLE","ADD","DROP COLUMN [IF EXISTS]","ALTER COLUMN","TRUNCATE TABLE","CREATE [UNIQUE] [CLUSTERED] INDEX","CREATE DATABASE","ALTER DATABASE","DROP DATABASE [IF EXISTS]","CREATE [OR ALTER] [PARTITION] {FUNCTION | PROCEDURE | PROC}","ALTER [PARTITION] {FUNCTION | PROCEDURE | PROC}","DROP [PARTITION] {FUNCTION | PROCEDURE | PROC} [IF EXISTS]","GO","USE","ADD SENSITIVITY CLASSIFICATION","ADD SIGNATURE","AGGREGATE","ANSI_DEFAULTS","ANSI_NULLS","ANSI_NULL_DFLT_OFF","ANSI_NULL_DFLT_ON","ANSI_PADDING","ANSI_WARNINGS","APPLICATION ROLE","ARITHABORT","ARITHIGNORE","ASSEMBLY","ASYMMETRIC KEY","AUTHORIZATION","AVAILABILITY GROUP","BACKUP","BACKUP CERTIFICATE","BACKUP MASTER KEY","BACKUP SERVICE MASTER KEY","BEGIN CONVERSATION TIMER","BEGIN DIALOG CONVERSATION","BROKER PRIORITY","BULK INSERT","CERTIFICATE","CLOSE MASTER KEY","CLOSE SYMMETRIC KEY","COLUMN ENCRYPTION KEY","COLUMN MASTER KEY","COLUMNSTORE INDEX","CONCAT_NULL_YIELDS_NULL","CONTEXT_INFO","CONTRACT","CREDENTIAL","CRYPTOGRAPHIC PROVIDER","CURSOR_CLOSE_ON_COMMIT","DATABASE","DATABASE AUDIT SPECIFICATION","DATABASE ENCRYPTION KEY","DATABASE HADR","DATABASE SCOPED CONFIGURATION","DATABASE SCOPED CREDENTIAL","DATABASE SET","DATEFIRST","DATEFORMAT","DEADLOCK_PRIORITY","DENY","DENY XML","DISABLE TRIGGER","ENABLE TRIGGER","END CONVERSATION","ENDPOINT","EVENT NOTIFICATION","EVENT SESSION","EXECUTE AS","EXTERNAL DATA SOURCE","EXTERNAL FILE FORMAT","EXTERNAL LANGUAGE","EXTERNAL LIBRARY","EXTERNAL RESOURCE POOL","EXTERNAL TABLE","FIPS_FLAGGER","FMTONLY","FORCEPLAN","FULLTEXT CATALOG","FULLTEXT INDEX","FULLTEXT STOPLIST","GET CONVERSATION GROUP","GET_TRANSMISSION_STATUS","GRANT","GRANT XML","IDENTITY_INSERT","IMPLICIT_TRANSACTIONS","INDEX","LANGUAGE","LOCK_TIMEOUT","LOGIN","MASTER KEY","MESSAGE TYPE","MOVE CONVERSATION","NOCOUNT","NOEXEC","NUMERIC_ROUNDABORT","OFFSETS","OPEN MASTER KEY","OPEN SYMMETRIC KEY","PARSEONLY","PARTITION SCHEME","QUERY_GOVERNOR_COST_LIMIT","QUEUE","QUOTED_IDENTIFIER","RECEIVE","REMOTE SERVICE BINDING","REMOTE_PROC_TRANSACTIONS","RESOURCE GOVERNOR","RESOURCE POOL","RESTORE","RESTORE FILELISTONLY","RESTORE HEADERONLY","RESTORE LABELONLY","RESTORE MASTER KEY","RESTORE REWINDONLY","RESTORE SERVICE MASTER KEY","RESTORE VERIFYONLY","REVERT","REVOKE","REVOKE XML","ROLE","ROUTE","ROWCOUNT","RULE","SCHEMA","SEARCH PROPERTY LIST","SECURITY POLICY","SELECTIVE XML INDEX","SEND","SENSITIVITY CLASSIFICATION","SEQUENCE","SERVER AUDIT","SERVER AUDIT SPECIFICATION","SERVER CONFIGURATION","SERVER ROLE","SERVICE","SERVICE MASTER KEY","SETUSER","SHOWPLAN_ALL","SHOWPLAN_TEXT","SHOWPLAN_XML","SIGNATURE","SPATIAL INDEX","STATISTICS","STATISTICS IO","STATISTICS PROFILE","STATISTICS TIME","STATISTICS XML","SYMMETRIC KEY","SYNONYM","TABLE","TABLE IDENTITY","TEXTSIZE","TRANSACTION ISOLATION LEVEL","TRIGGER","TYPE","UPDATE STATISTICS","USER","WORKLOAD GROUP","XACT_ABORT","XML INDEX","XML SCHEMA COLLECTION"]),L=(0,A.expandPhrases)(["UNION [ALL]","EXCEPT","INTERSECT"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","{CROSS | OUTER} APPLY"]),_=(0,A.expandPhrases)(["ON {UPDATE | DELETE} [SET NULL | SET DEFAULT]","{ROWS | RANGE} BETWEEN"]),s=(0,A.expandPhrases)([]);T.transactsql={name:"transactsql",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,nestedBlockComments:!0,stringTypes:[{quote:"''-qq",prefixes:["N"]},"{}"],identTypes:['""-qq',"[]"],identChars:{first:"#@",rest:"#@$"},paramTypes:{named:["@"],quoted:["@"]},operators:["%","&","|","^","~","!<","!>","+=","-=","*=","/=","%=","|=","&=","^=","::",":"],propertyAccessOperators:[".."]},formatOptions:{alwaysDenseOperators:["::"],onelineClauses:[...N,...t],tabularOnelineClauses:t}}},6673:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["APPROX_COUNT_DISTINCT","AVG","CHECKSUM_AGG","COUNT","COUNT_BIG","GROUPING","GROUPING_ID","MAX","MIN","STDEV","STDEVP","SUM","VAR","VARP","CUME_DIST","FIRST_VALUE","LAG","LAST_VALUE","LEAD","PERCENTILE_CONT","PERCENTILE_DISC","PERCENT_RANK","Collation - COLLATIONPROPERTY","Collation - TERTIARY_WEIGHTS","@@DBTS","@@LANGID","@@LANGUAGE","@@LOCK_TIMEOUT","@@MAX_CONNECTIONS","@@MAX_PRECISION","@@NESTLEVEL","@@OPTIONS","@@REMSERVER","@@SERVERNAME","@@SERVICENAME","@@SPID","@@TEXTSIZE","@@VERSION","CAST","CONVERT","PARSE","TRY_CAST","TRY_CONVERT","TRY_PARSE","ASYMKEY_ID","ASYMKEYPROPERTY","CERTPROPERTY","CERT_ID","CRYPT_GEN_RANDOM","DECRYPTBYASYMKEY","DECRYPTBYCERT","DECRYPTBYKEY","DECRYPTBYKEYAUTOASYMKEY","DECRYPTBYKEYAUTOCERT","DECRYPTBYPASSPHRASE","ENCRYPTBYASYMKEY","ENCRYPTBYCERT","ENCRYPTBYKEY","ENCRYPTBYPASSPHRASE","HASHBYTES","IS_OBJECTSIGNED","KEY_GUID","KEY_ID","KEY_NAME","SIGNBYASYMKEY","SIGNBYCERT","SYMKEYPROPERTY","VERIFYSIGNEDBYCERT","VERIFYSIGNEDBYASYMKEY","@@CURSOR_ROWS","@@FETCH_STATUS","CURSOR_STATUS","DATALENGTH","IDENT_CURRENT","IDENT_INCR","IDENT_SEED","IDENTITY","SQL_VARIANT_PROPERTY","@@DATEFIRST","CURRENT_TIMESTAMP","CURRENT_TIMEZONE","CURRENT_TIMEZONE_ID","DATEADD","DATEDIFF","DATEDIFF_BIG","DATEFROMPARTS","DATENAME","DATEPART","DATETIME2FROMPARTS","DATETIMEFROMPARTS","DATETIMEOFFSETFROMPARTS","DAY","EOMONTH","GETDATE","GETUTCDATE","ISDATE","MONTH","SMALLDATETIMEFROMPARTS","SWITCHOFFSET","SYSDATETIME","SYSDATETIMEOFFSET","SYSUTCDATETIME","TIMEFROMPARTS","TODATETIMEOFFSET","YEAR","JSON","ISJSON","JSON_VALUE","JSON_QUERY","JSON_MODIFY","ABS","ACOS","ASIN","ATAN","ATN2","CEILING","COS","COT","DEGREES","EXP","FLOOR","LOG","LOG10","PI","POWER","RADIANS","RAND","ROUND","SIGN","SIN","SQRT","SQUARE","TAN","CHOOSE","GREATEST","IIF","LEAST","@@PROCID","APP_NAME","APPLOCK_MODE","APPLOCK_TEST","ASSEMBLYPROPERTY","COL_LENGTH","COL_NAME","COLUMNPROPERTY","DATABASEPROPERTYEX","DB_ID","DB_NAME","FILE_ID","FILE_IDEX","FILE_NAME","FILEGROUP_ID","FILEGROUP_NAME","FILEGROUPPROPERTY","FILEPROPERTY","FILEPROPERTYEX","FULLTEXTCATALOGPROPERTY","FULLTEXTSERVICEPROPERTY","INDEX_COL","INDEXKEY_PROPERTY","INDEXPROPERTY","NEXT VALUE FOR","OBJECT_DEFINITION","OBJECT_ID","OBJECT_NAME","OBJECT_SCHEMA_NAME","OBJECTPROPERTY","OBJECTPROPERTYEX","ORIGINAL_DB_NAME","PARSENAME","SCHEMA_ID","SCHEMA_NAME","SCOPE_IDENTITY","SERVERPROPERTY","STATS_DATE","TYPE_ID","TYPE_NAME","TYPEPROPERTY","DENSE_RANK","NTILE","RANK","ROW_NUMBER","PUBLISHINGSERVERNAME","CERTENCODED","CERTPRIVATEKEY","CURRENT_USER","DATABASE_PRINCIPAL_ID","HAS_DBACCESS","HAS_PERMS_BY_NAME","IS_MEMBER","IS_ROLEMEMBER","IS_SRVROLEMEMBER","LOGINPROPERTY","ORIGINAL_LOGIN","PERMISSIONS","PWDENCRYPT","PWDCOMPARE","SESSION_USER","SESSIONPROPERTY","SUSER_ID","SUSER_NAME","SUSER_SID","SUSER_SNAME","SYSTEM_USER","USER","USER_ID","USER_NAME","ASCII","CHARINDEX","CONCAT","CONCAT_WS","DIFFERENCE","FORMAT","LEFT","LEN","LOWER","LTRIM","PATINDEX","QUOTENAME","REPLACE","REPLICATE","REVERSE","RIGHT","RTRIM","SOUNDEX","SPACE","STR","STRING_AGG","STRING_ESCAPE","STUFF","SUBSTRING","TRANSLATE","TRIM","UNICODE","UPPER","$PARTITION","@@ERROR","@@IDENTITY","@@PACK_RECEIVED","@@ROWCOUNT","@@TRANCOUNT","BINARY_CHECKSUM","CHECKSUM","COMPRESS","CONNECTIONPROPERTY","CONTEXT_INFO","CURRENT_REQUEST_ID","CURRENT_TRANSACTION_ID","DECOMPRESS","ERROR_LINE","ERROR_MESSAGE","ERROR_NUMBER","ERROR_PROCEDURE","ERROR_SEVERITY","ERROR_STATE","FORMATMESSAGE","GET_FILESTREAM_TRANSACTION_CONTEXT","GETANSINULL","HOST_ID","HOST_NAME","ISNULL","ISNUMERIC","MIN_ACTIVE_ROWVERSION","NEWID","NEWSEQUENTIALID","ROWCOUNT_BIG","SESSION_CONTEXT","XACT_STATE","@@CONNECTIONS","@@CPU_BUSY","@@IDLE","@@IO_BUSY","@@PACK_SENT","@@PACKET_ERRORS","@@TIMETICKS","@@TOTAL_ERRORS","@@TOTAL_READ","@@TOTAL_WRITE","TEXTPTR","TEXTVALID","COLUMNS_UPDATED","EVENTDATA","TRIGGER_NESTLEVEL","UPDATE","COALESCE","NULLIF"]},846:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ADD","ALL","ALTER","AND","ANY","AS","ASC","AUTHORIZATION","BACKUP","BEGIN","BETWEEN","BREAK","BROWSE","BULK","BY","CASCADE","CHECK","CHECKPOINT","CLOSE","CLUSTERED","COALESCE","COLLATE","COLUMN","COMMIT","COMPUTE","CONSTRAINT","CONTAINS","CONTAINSTABLE","CONTINUE","CONVERT","CREATE","CROSS","CURRENT","CURRENT_DATE","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_USER","CURSOR","DATABASE","DBCC","DEALLOCATE","DECLARE","DEFAULT","DELETE","DENY","DESC","DISK","DISTINCT","DISTRIBUTED","DROP","DUMP","ERRLVL","ESCAPE","EXEC","EXECUTE","EXISTS","EXIT","EXTERNAL","FETCH","FILE","FILLFACTOR","FOR","FOREIGN","FREETEXT","FREETEXTTABLE","FROM","FULL","FUNCTION","GOTO","GRANT","GROUP","HAVING","HOLDLOCK","IDENTITY","IDENTITYCOL","IDENTITY_INSERT","IF","IN","INDEX","INNER","INSERT","INTERSECT","INTO","IS","JOIN","KEY","KILL","LEFT","LIKE","LINENO","LOAD","MERGE","NOCHECK","NONCLUSTERED","NOT","NULL","NULLIF","OF","OFF","OFFSETS","ON","OPEN","OPENDATASOURCE","OPENQUERY","OPENROWSET","OPENXML","OPTION","OR","ORDER","OUTER","OVER","PERCENT","PIVOT","PLAN","PRIMARY","PRINT","PROC","PROCEDURE","PUBLIC","RAISERROR","READ","READTEXT","RECONFIGURE","REFERENCES","REPLICATION","RESTORE","RESTRICT","RETURN","REVERT","REVOKE","RIGHT","ROLLBACK","ROWCOUNT","ROWGUIDCOL","RULE","SAVE","SCHEMA","SECURITYAUDIT","SELECT","SEMANTICKEYPHRASETABLE","SEMANTICSIMILARITYDETAILSTABLE","SEMANTICSIMILARITYTABLE","SESSION_USER","SET","SETUSER","SHUTDOWN","SOME","STATISTICS","SYSTEM_USER","TABLE","TABLESAMPLE","TEXTSIZE","THEN","TO","TOP","TRAN","TRANSACTION","TRIGGER","TRUNCATE","TRY_CONVERT","TSEQUAL","UNION","UNIQUE","UNPIVOT","UPDATE","UPDATETEXT","USE","USER","VALUES","VIEW","WAITFOR","WHERE","WHILE","WITH","WITHIN GROUP","WRITETEXT","$ACTION"],T.dataTypes=["BINARY","BIT","CHAR","CHAR","CHARACTER","DATE","DATETIME2","DATETIMEOFFSET","DEC","DECIMAL","DOUBLE","FLOAT","INT","INTEGER","NATIONAL","NCHAR","NUMERIC","NVARCHAR","PRECISION","REAL","SMALLINT","TIME","TIMESTAMP","VARBINARY","VARCHAR"]},198:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.trino=void 0;const A=R(7163),e=R(2305),S=R(9390),I=(0,A.expandPhrases)(["SELECT [ALL | DISTINCT]"]),O=(0,A.expandPhrases)(["WITH [RECURSIVE]","FROM","WHERE","GROUP BY [ALL | DISTINCT]","HAVING","WINDOW","PARTITION BY","ORDER BY","LIMIT","OFFSET","FETCH {FIRST | NEXT}","INSERT INTO","VALUES","SET","MATCH_RECOGNIZE","MEASURES","ONE ROW PER MATCH","ALL ROWS PER MATCH","AFTER MATCH","PATTERN","SUBSET","DEFINE"]),N=(0,A.expandPhrases)(["CREATE TABLE [IF NOT EXISTS]"]),t=(0,A.expandPhrases)(["CREATE [OR REPLACE] [MATERIALIZED] VIEW","UPDATE","DELETE FROM","DROP TABLE [IF EXISTS]","ALTER TABLE [IF EXISTS]","ADD COLUMN [IF NOT EXISTS]","DROP COLUMN [IF EXISTS]","RENAME COLUMN [IF EXISTS]","RENAME TO","SET AUTHORIZATION [USER | ROLE]","SET PROPERTIES","EXECUTE","TRUNCATE TABLE","ALTER SCHEMA","ALTER MATERIALIZED VIEW","ALTER VIEW","CREATE SCHEMA","CREATE ROLE","DROP SCHEMA","DROP MATERIALIZED VIEW","DROP VIEW","DROP ROLE","EXPLAIN","ANALYZE","EXPLAIN ANALYZE","EXPLAIN ANALYZE VERBOSE","USE","DESCRIBE INPUT","DESCRIBE OUTPUT","REFRESH MATERIALIZED VIEW","RESET SESSION","SET SESSION","SET PATH","SET TIME ZONE","SHOW GRANTS","SHOW CREATE TABLE","SHOW CREATE SCHEMA","SHOW CREATE VIEW","SHOW CREATE MATERIALIZED VIEW","SHOW TABLES","SHOW SCHEMAS","SHOW CATALOGS","SHOW COLUMNS","SHOW STATS FOR","SHOW ROLES","SHOW CURRENT ROLES","SHOW ROLE GRANTS","SHOW FUNCTIONS","SHOW SESSION"]),L=(0,A.expandPhrases)(["UNION [ALL | DISTINCT]","EXCEPT [ALL | DISTINCT]","INTERSECT [ALL | DISTINCT]"]),C=(0,A.expandPhrases)(["JOIN","{LEFT | RIGHT | FULL} [OUTER] JOIN","{INNER | CROSS} JOIN","NATURAL [INNER] JOIN","NATURAL {LEFT | RIGHT | FULL} [OUTER] JOIN"]),_=(0,A.expandPhrases)(["{ROWS | RANGE | GROUPS} BETWEEN","IS [NOT] DISTINCT FROM"]),s=(0,A.expandPhrases)([]);T.trino={name:"trino",tokenizerOptions:{reservedSelect:I,reservedClauses:[...O,...N,...t],reservedSetOperations:L,reservedJoins:C,reservedKeywordPhrases:_,reservedDataTypePhrases:s,reservedKeywords:S.keywords,reservedDataTypes:S.dataTypes,reservedFunctionNames:e.functions,extraParens:["[]","{}"],stringTypes:[{quote:"''-qq",prefixes:["U&"]},{quote:"''-raw",prefixes:["X"],requirePrefix:!0}],identTypes:['""-qq'],paramTypes:{positional:!0},operators:["%","->","=>",":","||","|","^","$"]},formatOptions:{onelineClauses:[...N,...t],tabularOnelineClauses:t}}},2305:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.functions=void 0,T.functions=["ABS","ACOS","ALL_MATCH","ANY_MATCH","APPROX_DISTINCT","APPROX_MOST_FREQUENT","APPROX_PERCENTILE","APPROX_SET","ARBITRARY","ARRAYS_OVERLAP","ARRAY_AGG","ARRAY_DISTINCT","ARRAY_EXCEPT","ARRAY_INTERSECT","ARRAY_JOIN","ARRAY_MAX","ARRAY_MIN","ARRAY_POSITION","ARRAY_REMOVE","ARRAY_SORT","ARRAY_UNION","ASIN","ATAN","ATAN2","AT_TIMEZONE","AVG","BAR","BETA_CDF","BING_TILE","BING_TILES_AROUND","BING_TILE_AT","BING_TILE_COORDINATES","BING_TILE_POLYGON","BING_TILE_QUADKEY","BING_TILE_ZOOM_LEVEL","BITWISE_AND","BITWISE_AND_AGG","BITWISE_LEFT_SHIFT","BITWISE_NOT","BITWISE_OR","BITWISE_OR_AGG","BITWISE_RIGHT_SHIFT","BITWISE_RIGHT_SHIFT_ARITHMETIC","BITWISE_XOR","BIT_COUNT","BOOL_AND","BOOL_OR","CARDINALITY","CAST","CBRT","CEIL","CEILING","CHAR2HEXINT","CHECKSUM","CHR","CLASSIFY","COALESCE","CODEPOINT","COLOR","COMBINATIONS","CONCAT","CONCAT_WS","CONTAINS","CONTAINS_SEQUENCE","CONVEX_HULL_AGG","CORR","COS","COSH","COSINE_SIMILARITY","COUNT","COUNT_IF","COVAR_POP","COVAR_SAMP","CRC32","CUME_DIST","CURRENT_CATALOG","CURRENT_DATE","CURRENT_GROUPS","CURRENT_SCHEMA","CURRENT_TIME","CURRENT_TIMESTAMP","CURRENT_TIMEZONE","CURRENT_USER","DATE","DATE_ADD","DATE_DIFF","DATE_FORMAT","DATE_PARSE","DATE_TRUNC","DAY","DAY_OF_MONTH","DAY_OF_WEEK","DAY_OF_YEAR","DEGREES","DENSE_RANK","DOW","DOY","E","ELEMENT_AT","EMPTY_APPROX_SET","EVALUATE_CLASSIFIER_PREDICTIONS","EVERY","EXP","EXTRACT","FEATURES","FILTER","FIRST_VALUE","FLATTEN","FLOOR","FORMAT","FORMAT_DATETIME","FORMAT_NUMBER","FROM_BASE","FROM_BASE32","FROM_BASE64","FROM_BASE64URL","FROM_BIG_ENDIAN_32","FROM_BIG_ENDIAN_64","FROM_ENCODED_POLYLINE","FROM_GEOJSON_GEOMETRY","FROM_HEX","FROM_IEEE754_32","FROM_IEEE754_64","FROM_ISO8601_DATE","FROM_ISO8601_TIMESTAMP","FROM_ISO8601_TIMESTAMP_NANOS","FROM_UNIXTIME","FROM_UNIXTIME_NANOS","FROM_UTF8","GEOMETRIC_MEAN","GEOMETRY_FROM_HADOOP_SHAPE","GEOMETRY_INVALID_REASON","GEOMETRY_NEAREST_POINTS","GEOMETRY_TO_BING_TILES","GEOMETRY_UNION","GEOMETRY_UNION_AGG","GREATEST","GREAT_CIRCLE_DISTANCE","HAMMING_DISTANCE","HASH_COUNTS","HISTOGRAM","HMAC_MD5","HMAC_SHA1","HMAC_SHA256","HMAC_SHA512","HOUR","HUMAN_READABLE_SECONDS","IF","INDEX","INFINITY","INTERSECTION_CARDINALITY","INVERSE_BETA_CDF","INVERSE_NORMAL_CDF","IS_FINITE","IS_INFINITE","IS_JSON_SCALAR","IS_NAN","JACCARD_INDEX","JSON_ARRAY_CONTAINS","JSON_ARRAY_GET","JSON_ARRAY_LENGTH","JSON_EXISTS","JSON_EXTRACT","JSON_EXTRACT_SCALAR","JSON_FORMAT","JSON_PARSE","JSON_QUERY","JSON_SIZE","JSON_VALUE","KURTOSIS","LAG","LAST_DAY_OF_MONTH","LAST_VALUE","LEAD","LEARN_CLASSIFIER","LEARN_LIBSVM_CLASSIFIER","LEARN_LIBSVM_REGRESSOR","LEARN_REGRESSOR","LEAST","LENGTH","LEVENSHTEIN_DISTANCE","LINE_INTERPOLATE_POINT","LINE_INTERPOLATE_POINTS","LINE_LOCATE_POINT","LISTAGG","LN","LOCALTIME","LOCALTIMESTAMP","LOG","LOG10","LOG2","LOWER","LPAD","LTRIM","LUHN_CHECK","MAKE_SET_DIGEST","MAP","MAP_AGG","MAP_CONCAT","MAP_ENTRIES","MAP_FILTER","MAP_FROM_ENTRIES","MAP_KEYS","MAP_UNION","MAP_VALUES","MAP_ZIP_WITH","MAX","MAX_BY","MD5","MERGE","MERGE_SET_DIGEST","MILLISECOND","MIN","MINUTE","MIN_BY","MOD","MONTH","MULTIMAP_AGG","MULTIMAP_FROM_ENTRIES","MURMUR3","NAN","NGRAMS","NONE_MATCH","NORMALIZE","NORMAL_CDF","NOW","NTH_VALUE","NTILE","NULLIF","NUMERIC_HISTOGRAM","OBJECTID","OBJECTID_TIMESTAMP","PARSE_DATA_SIZE","PARSE_DATETIME","PARSE_DURATION","PERCENT_RANK","PI","POSITION","POW","POWER","QDIGEST_AGG","QUARTER","RADIANS","RAND","RANDOM","RANK","REDUCE","REDUCE_AGG","REGEXP_COUNT","REGEXP_EXTRACT","REGEXP_EXTRACT_ALL","REGEXP_LIKE","REGEXP_POSITION","REGEXP_REPLACE","REGEXP_SPLIT","REGRESS","REGR_INTERCEPT","REGR_SLOPE","RENDER","REPEAT","REPLACE","REVERSE","RGB","ROUND","ROW_NUMBER","RPAD","RTRIM","SECOND","SEQUENCE","SHA1","SHA256","SHA512","SHUFFLE","SIGN","SIMPLIFY_GEOMETRY","SIN","SKEWNESS","SLICE","SOUNDEX","SPATIAL_PARTITIONING","SPATIAL_PARTITIONS","SPLIT","SPLIT_PART","SPLIT_TO_MAP","SPLIT_TO_MULTIMAP","SPOOKY_HASH_V2_32","SPOOKY_HASH_V2_64","SQRT","STARTS_WITH","STDDEV","STDDEV_POP","STDDEV_SAMP","STRPOS","ST_AREA","ST_ASBINARY","ST_ASTEXT","ST_BOUNDARY","ST_BUFFER","ST_CENTROID","ST_CONTAINS","ST_CONVEXHULL","ST_COORDDIM","ST_CROSSES","ST_DIFFERENCE","ST_DIMENSION","ST_DISJOINT","ST_DISTANCE","ST_ENDPOINT","ST_ENVELOPE","ST_ENVELOPEASPTS","ST_EQUALS","ST_EXTERIORRING","ST_GEOMETRIES","ST_GEOMETRYFROMTEXT","ST_GEOMETRYN","ST_GEOMETRYTYPE","ST_GEOMFROMBINARY","ST_INTERIORRINGN","ST_INTERIORRINGS","ST_INTERSECTION","ST_INTERSECTS","ST_ISCLOSED","ST_ISEMPTY","ST_ISRING","ST_ISSIMPLE","ST_ISVALID","ST_LENGTH","ST_LINEFROMTEXT","ST_LINESTRING","ST_MULTIPOINT","ST_NUMGEOMETRIES","ST_NUMINTERIORRING","ST_NUMPOINTS","ST_OVERLAPS","ST_POINT","ST_POINTN","ST_POINTS","ST_POLYGON","ST_RELATE","ST_STARTPOINT","ST_SYMDIFFERENCE","ST_TOUCHES","ST_UNION","ST_WITHIN","ST_X","ST_XMAX","ST_XMIN","ST_Y","ST_YMAX","ST_YMIN","SUBSTR","SUBSTRING","SUM","TAN","TANH","TDIGEST_AGG","TIMESTAMP_OBJECTID","TIMEZONE_HOUR","TIMEZONE_MINUTE","TO_BASE","TO_BASE32","TO_BASE64","TO_BASE64URL","TO_BIG_ENDIAN_32","TO_BIG_ENDIAN_64","TO_CHAR","TO_DATE","TO_ENCODED_POLYLINE","TO_GEOJSON_GEOMETRY","TO_GEOMETRY","TO_HEX","TO_IEEE754_32","TO_IEEE754_64","TO_ISO8601","TO_MILLISECONDS","TO_SPHERICAL_GEOGRAPHY","TO_TIMESTAMP","TO_UNIXTIME","TO_UTF8","TRANSFORM","TRANSFORM_KEYS","TRANSFORM_VALUES","TRANSLATE","TRIM","TRIM_ARRAY","TRUNCATE","TRY","TRY_CAST","TYPEOF","UPPER","URL_DECODE","URL_ENCODE","URL_EXTRACT_FRAGMENT","URL_EXTRACT_HOST","URL_EXTRACT_PARAMETER","URL_EXTRACT_PATH","URL_EXTRACT_PORT","URL_EXTRACT_PROTOCOL","URL_EXTRACT_QUERY","UUID","VALUES_AT_QUANTILES","VALUE_AT_QUANTILE","VARIANCE","VAR_POP","VAR_SAMP","VERSION","WEEK","WEEK_OF_YEAR","WIDTH_BUCKET","WILSON_INTERVAL_LOWER","WILSON_INTERVAL_UPPER","WITH_TIMEZONE","WORD_STEM","XXHASH64","YEAR","YEAR_OF_WEEK","YOW","ZIP","ZIP_WITH","CLASSIFIER","FIRST","LAST","MATCH_NUMBER","NEXT","PERMUTE","PREV"]},9390:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.dataTypes=T.keywords=void 0,T.keywords=["ABSENT","ADD","ADMIN","AFTER","ALL","ALTER","ANALYZE","AND","ANY","AS","ASC","AT","AUTHORIZATION","BERNOULLI","BETWEEN","BOTH","BY","CALL","CASCADE","CASE","CATALOGS","COLUMN","COLUMNS","COMMENT","COMMIT","COMMITTED","CONDITIONAL","CONSTRAINT","COPARTITION","CREATE","CROSS","CUBE","CURRENT","CURRENT_PATH","CURRENT_ROLE","DATA","DEALLOCATE","DEFAULT","DEFINE","DEFINER","DELETE","DENY","DESC","DESCRIBE","DESCRIPTOR","DISTINCT","DISTRIBUTED","DOUBLE","DROP","ELSE","EMPTY","ENCODING","END","ERROR","ESCAPE","EXCEPT","EXCLUDING","EXECUTE","EXISTS","EXPLAIN","FALSE","FETCH","FINAL","FIRST","FOLLOWING","FOR","FROM","FULL","FUNCTIONS","GRANT","GRANTED","GRANTS","GRAPHVIZ","GROUP","GROUPING","GROUPS","HAVING","IGNORE","IN","INCLUDING","INITIAL","INNER","INPUT","INSERT","INTERSECT","INTERVAL","INTO","INVOKER","IO","IS","ISOLATION","JOIN","JSON","JSON_ARRAY","JSON_OBJECT","KEEP","KEY","KEYS","LAST","LATERAL","LEADING","LEFT","LEVEL","LIKE","LIMIT","LOCAL","LOGICAL","MATCH","MATCHED","MATCHES","MATCH_RECOGNIZE","MATERIALIZED","MEASURES","NATURAL","NEXT","NFC","NFD","NFKC","NFKD","NO","NONE","NOT","NULL","NULLS","OBJECT","OF","OFFSET","OMIT","ON","ONE","ONLY","OPTION","OR","ORDER","ORDINALITY","OUTER","OUTPUT","OVER","OVERFLOW","PARTITION","PARTITIONS","PASSING","PAST","PATH","PATTERN","PER","PERMUTE","PRECEDING","PRECISION","PREPARE","PRIVILEGES","PROPERTIES","PRUNE","QUOTES","RANGE","READ","RECURSIVE","REFRESH","RENAME","REPEATABLE","RESET","RESPECT","RESTRICT","RETURNING","REVOKE","RIGHT","ROLE","ROLES","ROLLBACK","ROLLUP","ROW","ROWS","RUNNING","SCALAR","SCHEMA","SCHEMAS","SECURITY","SEEK","SELECT","SERIALIZABLE","SESSION","SET","SETS","SHOW","SKIP","SOME","START","STATS","STRING","SUBSET","SYSTEM","TABLE","TABLES","TABLESAMPLE","TEXT","THEN","TIES","TIME","TIMESTAMP","TO","TRAILING","TRANSACTION","TRUE","TYPE","UESCAPE","UNBOUNDED","UNCOMMITTED","UNCONDITIONAL","UNION","UNIQUE","UNKNOWN","UNMATCHED","UNNEST","UPDATE","USE","USER","USING","UTF16","UTF32","UTF8","VALIDATE","VALUE","VALUES","VERBOSE","VIEW","WHEN","WHERE","WINDOW","WITH","WITHIN","WITHOUT","WORK","WRAPPER","WRITE","ZONE"],T.dataTypes=["BIGINT","INT","INTEGER","SMALLINT","TINYINT","BOOLEAN","DATE","DECIMAL","REAL","DOUBLE","HYPERLOGLOG","QDIGEST","TDIGEST","P4HYPERLOGLOG","INTERVAL","TIMESTAMP","TIME","VARBINARY","VARCHAR","CHAR","ROW","ARRAY","MAP","JSON","JSON2016","IPADDRESS","GEOMETRY","UUID","SETDIGEST","JONIREGEXP","RE2JREGEXP","LIKEPATTERN","COLOR","CODEPOINTS","FUNCTION","JSONPATH"]},1328:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.NestedComment=void 0;const R=/\/\*/uy,A=/[\s\S]/uy,e=/\*\//uy;T.NestedComment=class{constructor(){this.lastIndex=0}exec(E){let T,S="",I=0;if(!(T=this.matchSection(R,E)))return null;for(S+=T,I++;I>0;)if(T=this.matchSection(R,E))S+=T,I++;else if(T=this.matchSection(e,E))S+=T,I--;else{if(!(T=this.matchSection(A,E)))return null;S+=T}return[S]}matchSection(E,T){E.lastIndex=this.lastIndex;const R=E.exec(T);return R&&(this.lastIndex+=R[0].length),R?R[0]:null}}},7165:function(E,T,R){"use strict";var A=this&&this.__createBinding||(Object.create?function(E,T,R,A){void 0===A&&(A=R);var e=Object.getOwnPropertyDescriptor(T,R);e&&!("get"in e?!T.__esModule:e.writable||e.configurable)||(e={enumerable:!0,get:function(){return T[R]}}),Object.defineProperty(E,A,e)}:function(E,T,R,A){void 0===A&&(A=R),E[A]=T[R]}),e=this&&this.__setModuleDefault||(Object.create?function(E,T){Object.defineProperty(E,"default",{enumerable:!0,value:T})}:function(E,T){E.default=T}),S=this&&this.__importStar||function(E){if(E&&E.__esModule)return E;var T={};if(null!=E)for(var R in E)"default"!==R&&Object.prototype.hasOwnProperty.call(E,R)&&A(T,E,R);return e(T,E),T},I=this&&this.__importDefault||function(E){return E&&E.__esModule?E:{default:E}};Object.defineProperty(T,"__esModule",{value:!0});const O=R(2437),N=S(R(8817)),t=I(R(1983)),L=R(3443),C=R(1044),_=R(1328);T.default=class{constructor(E,T){this.cfg=E,this.dialectName=T,this.rulesBeforeParams=this.buildRulesBeforeParams(E),this.rulesAfterParams=this.buildRulesAfterParams(E)}tokenize(E,T){const R=[...this.rulesBeforeParams,...this.buildParamRules(this.cfg,T),...this.rulesAfterParams],A=new t.default(R,this.dialectName).tokenize(E);return this.cfg.postProcess?this.cfg.postProcess(A):A}buildRulesBeforeParams(E){var T,R,A;return this.validRules([{type:O.TokenType.DISABLE_COMMENT,regex:/(\/\* *sql-formatter-disable *\*\/[\s\S]*?(?:\/\* *sql-formatter-enable *\*\/|$))/uy},{type:O.TokenType.BLOCK_COMMENT,regex:E.nestedBlockComments?new _.NestedComment:/(\/\*[^]*?\*\/)/uy},{type:O.TokenType.LINE_COMMENT,regex:N.lineComment(null!==(T=E.lineCommentTypes)&&void 0!==T?T:["--"])},{type:O.TokenType.QUOTED_IDENTIFIER,regex:N.string(E.identTypes)},{type:O.TokenType.NUMBER,regex:E.underscoresInNumbers?/(?:0x[0-9a-fA-F_]+|0b[01_]+|(?:-\s*)?(?:[0-9_]*\.[0-9_]+|[0-9_]+(?:\.[0-9_]*)?)(?:[eE][-+]?[0-9_]+(?:\.[0-9_]+)?)?)(?![\w\p{Alphabetic}])/uy:/(?:0x[0-9a-fA-F]+|0b[01]+|(?:-\s*)?(?:[0-9]*\.[0-9]+|[0-9]+(?:\.[0-9]*)?)(?:[eE][-+]?[0-9]+(?:\.[0-9]+)?)?)(?![\w\p{Alphabetic}])/uy},{type:O.TokenType.RESERVED_KEYWORD_PHRASE,regex:N.reservedWord(null!==(R=E.reservedKeywordPhrases)&&void 0!==R?R:[],E.identChars),text:s},{type:O.TokenType.RESERVED_DATA_TYPE_PHRASE,regex:N.reservedWord(null!==(A=E.reservedDataTypePhrases)&&void 0!==A?A:[],E.identChars),text:s},{type:O.TokenType.CASE,regex:/CASE\b/iuy,text:s},{type:O.TokenType.END,regex:/END\b/iuy,text:s},{type:O.TokenType.BETWEEN,regex:/BETWEEN\b/iuy,text:s},{type:O.TokenType.LIMIT,regex:E.reservedClauses.includes("LIMIT")?/LIMIT\b/iuy:void 0,text:s},{type:O.TokenType.RESERVED_CLAUSE,regex:N.reservedWord(E.reservedClauses,E.identChars),text:s},{type:O.TokenType.RESERVED_SELECT,regex:N.reservedWord(E.reservedSelect,E.identChars),text:s},{type:O.TokenType.RESERVED_SET_OPERATION,regex:N.reservedWord(E.reservedSetOperations,E.identChars),text:s},{type:O.TokenType.WHEN,regex:/WHEN\b/iuy,text:s},{type:O.TokenType.ELSE,regex:/ELSE\b/iuy,text:s},{type:O.TokenType.THEN,regex:/THEN\b/iuy,text:s},{type:O.TokenType.RESERVED_JOIN,regex:N.reservedWord(E.reservedJoins,E.identChars),text:s},{type:O.TokenType.AND,regex:/AND\b/iuy,text:s},{type:O.TokenType.OR,regex:/OR\b/iuy,text:s},{type:O.TokenType.XOR,regex:E.supportsXor?/XOR\b/iuy:void 0,text:s},...E.operatorKeyword?[{type:O.TokenType.OPERATOR,regex:/OPERATOR *\([^)]+\)/iuy}]:[],{type:O.TokenType.RESERVED_FUNCTION_NAME,regex:N.reservedWord(E.reservedFunctionNames,E.identChars),text:s},{type:O.TokenType.RESERVED_DATA_TYPE,regex:N.reservedWord(E.reservedDataTypes,E.identChars),text:s},{type:O.TokenType.RESERVED_KEYWORD,regex:N.reservedWord(E.reservedKeywords,E.identChars),text:s}])}buildRulesAfterParams(E){var T,R;return this.validRules([{type:O.TokenType.VARIABLE,regex:E.variableTypes?N.variable(E.variableTypes):void 0},{type:O.TokenType.STRING,regex:N.string(E.stringTypes)},{type:O.TokenType.IDENTIFIER,regex:N.identifier(E.identChars)},{type:O.TokenType.DELIMITER,regex:/[;]/uy},{type:O.TokenType.COMMA,regex:/[,]/y},{type:O.TokenType.OPEN_PAREN,regex:N.parenthesis("open",E.extraParens)},{type:O.TokenType.CLOSE_PAREN,regex:N.parenthesis("close",E.extraParens)},{type:O.TokenType.OPERATOR,regex:N.operator(["+","-","/",">","<","=","<>","<=",">=","!=",...null!==(T=E.operators)&&void 0!==T?T:[]])},{type:O.TokenType.ASTERISK,regex:/[*]/uy},{type:O.TokenType.PROPERTY_ACCESS_OPERATOR,regex:N.operator([".",...null!==(R=E.propertyAccessOperators)&&void 0!==R?R:[]])}])}buildParamRules(E,T){var R,A,e,S,I;const t={named:(null==T?void 0:T.named)||(null===(R=E.paramTypes)||void 0===R?void 0:R.named)||[],quoted:(null==T?void 0:T.quoted)||(null===(A=E.paramTypes)||void 0===A?void 0:A.quoted)||[],numbered:(null==T?void 0:T.numbered)||(null===(e=E.paramTypes)||void 0===e?void 0:e.numbered)||[],positional:"boolean"==typeof(null==T?void 0:T.positional)?T.positional:null===(S=E.paramTypes)||void 0===S?void 0:S.positional,custom:(null==T?void 0:T.custom)||(null===(I=E.paramTypes)||void 0===I?void 0:I.custom)||[]};return this.validRules([{type:O.TokenType.NAMED_PARAMETER,regex:N.parameter(t.named,N.identifierPattern(E.paramChars||E.identChars)),key:E=>E.slice(1)},{type:O.TokenType.QUOTED_PARAMETER,regex:N.parameter(t.quoted,N.stringPattern(E.identTypes)),key:E=>(({tokenKey:E,quoteChar:T})=>E.replace(new RegExp((0,L.escapeRegExp)("\\"+T),"gu"),T))({tokenKey:E.slice(2,-1),quoteChar:E.slice(-1)})},{type:O.TokenType.NUMBERED_PARAMETER,regex:N.parameter(t.numbered,"[0-9]+"),key:E=>E.slice(1)},{type:O.TokenType.POSITIONAL_PARAMETER,regex:t.positional?/[?]/y:void 0},...t.custom.map((E=>{var T;return{type:O.TokenType.CUSTOM_PARAMETER,regex:(0,L.patternToRegex)(E.regex),key:null!==(T=E.key)&&void 0!==T?T:E=>E}}))])}validRules(E){return E.filter((E=>Boolean(E.regex)))}};const s=E=>(0,C.equalizeWhitespace)(E.toUpperCase());E.exports=T.default},1983:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0});const A=R(4494),e=R(3443);T.default=class{constructor(E,T){this.rules=E,this.dialectName=T,this.input="",this.index=0}tokenize(E){this.input=E,this.index=0;const T=[];let R;for(;this.index{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.disambiguateTokens=void 0;const A=R(2437);T.disambiguateTokens=function(E){return E.map(e).map(S).map(I).map(O).map(N)};const e=(E,T,R)=>{if((0,A.isReserved)(E.type)){const e=t(R,T);if(e&&e.type===A.TokenType.PROPERTY_ACCESS_OPERATOR)return Object.assign(Object.assign({},E),{type:A.TokenType.IDENTIFIER,text:E.raw});const S=L(R,T);if(S&&S.type===A.TokenType.PROPERTY_ACCESS_OPERATOR)return Object.assign(Object.assign({},E),{type:A.TokenType.IDENTIFIER,text:E.raw})}return E},S=(E,T,R)=>{if(E.type===A.TokenType.RESERVED_FUNCTION_NAME){const e=L(R,T);if(!e||!C(e))return Object.assign(Object.assign({},E),{type:A.TokenType.IDENTIFIER,text:E.raw})}return E},I=(E,T,R)=>{if(E.type===A.TokenType.RESERVED_DATA_TYPE){const e=L(R,T);if(e&&C(e))return Object.assign(Object.assign({},E),{type:A.TokenType.RESERVED_PARAMETERIZED_DATA_TYPE})}return E},O=(E,T,R)=>{if(E.type===A.TokenType.IDENTIFIER){const e=L(R,T);if(e&&_(e))return Object.assign(Object.assign({},E),{type:A.TokenType.ARRAY_IDENTIFIER})}return E},N=(E,T,R)=>{if(E.type===A.TokenType.RESERVED_DATA_TYPE){const e=L(R,T);if(e&&_(e))return Object.assign(Object.assign({},E),{type:A.TokenType.ARRAY_KEYWORD})}return E},t=(E,T)=>L(E,T,-1),L=(E,T,R=1)=>{let A=1;for(;E[T+A*R]&&s(E[T+A*R]);)A++;return E[T+A*R]},C=E=>E.type===A.TokenType.OPEN_PAREN&&"("===E.text,_=E=>E.type===A.TokenType.OPEN_PAREN&&"["===E.text,s=E=>E.type===A.TokenType.BLOCK_COMMENT||E.type===A.TokenType.LINE_COMMENT},4494:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.lineColFromIndex=void 0,T.lineColFromIndex=function(E,T){const R=E.slice(0,T).split(/\n/);return{line:R.length,col:R[R.length-1].length+1}}},8817:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.identifierPattern=T.identifier=T.string=T.stringPattern=T.variable=T.quotePatterns=T.parameter=T.reservedWord=T.operator=T.parenthesis=T.lineComment=void 0;const A=R(1044),e=R(3443);T.lineComment=E=>new RegExp(`(?:${E.map(e.escapeRegExp).join("|")}).*?(?=\r\n|\r|\n|$)`,"uy"),T.parenthesis=(E,T=[])=>{const R="open"===E?0:1,A=["()",...T].map((E=>E[R]));return(0,e.patternToRegex)(A.map(e.escapeRegExp).join("|"))},T.operator=E=>(0,e.patternToRegex)(`${(0,A.sortByLengthDesc)(E).map(e.escapeRegExp).join("|")}`),T.reservedWord=(E,T={})=>{if(0===E.length)return/^\b$/u;const R=(({rest:E,dashes:T})=>E||T?`(?![${E||""}${T?"-":""}])`:"")(T),S=(0,A.sortByLengthDesc)(E).map(e.escapeRegExp).join("|").replace(/ /gu,"\\s+");return new RegExp(`(?:${S})${R}\\b`,"iuy")},T.parameter=(E,T)=>{if(!E.length)return;const R=E.map(e.escapeRegExp).join("|");return(0,e.patternToRegex)(`(?:${R})(?:${T})`)},T.quotePatterns={"``":"(?:`[^`]*`)+","[]":String.raw`(?:\[[^\]]*\])(?:\][^\]]*\])*`,'""-qq':String.raw`(?:"[^"]*")+`,'""-bs':String.raw`(?:"[^"\\]*(?:\\.[^"\\]*)*")`,'""-qq-bs':String.raw`(?:"[^"\\]*(?:\\.[^"\\]*)*")+`,'""-raw':String.raw`(?:"[^"]*")`,"''-qq":String.raw`(?:'[^']*')+`,"''-bs":String.raw`(?:'[^'\\]*(?:\\.[^'\\]*)*')`,"''-qq-bs":String.raw`(?:'[^'\\]*(?:\\.[^'\\]*)*')+`,"''-raw":String.raw`(?:'[^']*')`,$$:String.raw`(?\$\w*\$)[\s\S]*?\k`,"'''..'''":String.raw`'''[^\\]*?(?:\\.[^\\]*?)*?'''`,'""".."""':String.raw`"""[^\\]*?(?:\\.[^\\]*?)*?"""`,"{}":String.raw`(?:\{[^\}]*\})`,"q''":(()=>{const E={"<":">","[":"]","(":")","{":"}"},T=Object.entries(E).map((([E,T])=>"{left}(?:(?!{right}').)*?{right}".replace(/{left}/g,(0,e.escapeRegExp)(E)).replace(/{right}/g,(0,e.escapeRegExp)(T)))),R=(0,e.escapeRegExp)(Object.keys(E).join(""));return`[Qq]'(?:${String.raw`(?[^\s${R}])(?:(?!\k').)*?\k`}|${T.join("|")})'`})()};const S=E=>"string"==typeof E?T.quotePatterns[E]:"regex"in E?E.regex:(0,e.prefixesPattern)(E)+T.quotePatterns[E.quote];T.variable=E=>(0,e.patternToRegex)(E.map((E=>"regex"in E?E.regex:S(E))).join("|")),T.stringPattern=E=>E.map(S).join("|"),T.string=E=>(0,e.patternToRegex)((0,T.stringPattern)(E)),T.identifier=(E={})=>(0,e.patternToRegex)((0,T.identifierPattern)(E)),T.identifierPattern=({first:E,rest:T,dashes:R,allowFirstCharNumber:A}={})=>{const S="\\p{Alphabetic}\\p{Mark}_",I="\\p{Decimal_Number}",O=(0,e.escapeRegExp)(null!=E?E:""),N=(0,e.escapeRegExp)(null!=T?T:""),t=A?`[${S}${I}${O}][${S}${I}${N}]*`:`[${S}${O}][${S}${I}${N}]*`;return R?(0,e.withDashes)(t):t}},3443:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.prefixesPattern=T.withDashes=T.toCaseInsensitivePattern=T.patternToRegex=T.WHITESPACE_REGEX=T.escapeRegExp=void 0,T.escapeRegExp=E=>E.replace(/[.*+?^${}()|[\]\\]/gu,"\\$&"),T.WHITESPACE_REGEX=/\s+/uy,T.patternToRegex=E=>new RegExp(`(?:${E})`,"uy"),T.toCaseInsensitivePattern=E=>E.split("").map((E=>/ /gu.test(E)?"\\s+":`[${E.toUpperCase()}${E.toLowerCase()}]`)).join(""),T.withDashes=E=>E+"(?:-"+E+")*",T.prefixesPattern=({prefixes:E,requirePrefix:R})=>`(?:${E.map(T.toCaseInsensitivePattern).join("|")}${R?"":"|"})`},2437:(E,T)=>{"use strict";var R;Object.defineProperty(T,"__esModule",{value:!0}),T.isLogicalOperator=T.isReserved=T.isToken=T.testToken=T.EOF_TOKEN=T.createEofToken=T.TokenType=void 0,function(E){E.QUOTED_IDENTIFIER="QUOTED_IDENTIFIER",E.IDENTIFIER="IDENTIFIER",E.STRING="STRING",E.VARIABLE="VARIABLE",E.RESERVED_DATA_TYPE="RESERVED_DATA_TYPE",E.RESERVED_PARAMETERIZED_DATA_TYPE="RESERVED_PARAMETERIZED_DATA_TYPE",E.RESERVED_KEYWORD="RESERVED_KEYWORD",E.RESERVED_FUNCTION_NAME="RESERVED_FUNCTION_NAME",E.RESERVED_KEYWORD_PHRASE="RESERVED_KEYWORD_PHRASE",E.RESERVED_DATA_TYPE_PHRASE="RESERVED_DATA_TYPE_PHRASE",E.RESERVED_SET_OPERATION="RESERVED_SET_OPERATION",E.RESERVED_CLAUSE="RESERVED_CLAUSE",E.RESERVED_SELECT="RESERVED_SELECT",E.RESERVED_JOIN="RESERVED_JOIN",E.ARRAY_IDENTIFIER="ARRAY_IDENTIFIER",E.ARRAY_KEYWORD="ARRAY_KEYWORD",E.CASE="CASE",E.END="END",E.WHEN="WHEN",E.ELSE="ELSE",E.THEN="THEN",E.LIMIT="LIMIT",E.BETWEEN="BETWEEN",E.AND="AND",E.OR="OR",E.XOR="XOR",E.OPERATOR="OPERATOR",E.COMMA="COMMA",E.ASTERISK="ASTERISK",E.PROPERTY_ACCESS_OPERATOR="PROPERTY_ACCESS_OPERATOR",E.OPEN_PAREN="OPEN_PAREN",E.CLOSE_PAREN="CLOSE_PAREN",E.LINE_COMMENT="LINE_COMMENT",E.BLOCK_COMMENT="BLOCK_COMMENT",E.DISABLE_COMMENT="DISABLE_COMMENT",E.NUMBER="NUMBER",E.NAMED_PARAMETER="NAMED_PARAMETER",E.QUOTED_PARAMETER="QUOTED_PARAMETER",E.NUMBERED_PARAMETER="NUMBERED_PARAMETER",E.POSITIONAL_PARAMETER="POSITIONAL_PARAMETER",E.CUSTOM_PARAMETER="CUSTOM_PARAMETER",E.DELIMITER="DELIMITER",E.EOF="EOF"}(R=T.TokenType||(T.TokenType={})),T.createEofToken=E=>({type:R.EOF,raw:"«EOF»",text:"«EOF»",start:E}),T.EOF_TOKEN=(0,T.createEofToken)(1/0),T.testToken=E=>T=>T.type===E.type&&T.text===E.text,T.isToken={ARRAY:(0,T.testToken)({text:"ARRAY",type:R.RESERVED_DATA_TYPE}),BY:(0,T.testToken)({text:"BY",type:R.RESERVED_KEYWORD}),SET:(0,T.testToken)({text:"SET",type:R.RESERVED_CLAUSE}),STRUCT:(0,T.testToken)({text:"STRUCT",type:R.RESERVED_DATA_TYPE}),WINDOW:(0,T.testToken)({text:"WINDOW",type:R.RESERVED_CLAUSE}),VALUES:(0,T.testToken)({text:"VALUES",type:R.RESERVED_CLAUSE})},T.isReserved=E=>E===R.RESERVED_DATA_TYPE||E===R.RESERVED_KEYWORD||E===R.RESERVED_FUNCTION_NAME||E===R.RESERVED_KEYWORD_PHRASE||E===R.RESERVED_DATA_TYPE_PHRASE||E===R.RESERVED_CLAUSE||E===R.RESERVED_SELECT||E===R.RESERVED_SET_OPERATION||E===R.RESERVED_JOIN||E===R.ARRAY_KEYWORD||E===R.CASE||E===R.END||E===R.WHEN||E===R.ELSE||E===R.THEN||E===R.LIMIT||E===R.BETWEEN||E===R.AND||E===R.OR||E===R.XOR,T.isLogicalOperator=E=>E===R.AND||E===R.OR||E===R.XOR},7688:(E,T,R)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0});const A=R(4494),e=R(2437);T.default=class{constructor(E){this.tokenize=E,this.index=0,this.tokens=[],this.input=""}reset(E,T){this.input=E,this.index=0,this.tokens=this.tokenize(E)}next(){return this.tokens[this.index++]}save(){}formatError(E){const{line:T,col:R}=(0,A.lineColFromIndex)(this.input,E.start);return`Parse error at token: ${E.text} at line ${T} column ${R}`}has(E){return E in e.TokenType}},E.exports=T.default},1301:(E,T)=>{"use strict";var R;Object.defineProperty(T,"__esModule",{value:!0}),T.NodeType=void 0,(R=T.NodeType||(T.NodeType={})).statement="statement",R.clause="clause",R.set_operation="set_operation",R.function_call="function_call",R.parameterized_data_type="parameterized_data_type",R.array_subscript="array_subscript",R.property_access="property_access",R.parenthesis="parenthesis",R.between_predicate="between_predicate",R.case_expression="case_expression",R.case_when="case_when",R.case_else="case_else",R.limit_clause="limit_clause",R.all_columns_asterisk="all_columns_asterisk",R.literal="literal",R.identifier="identifier",R.keyword="keyword",R.data_type="data_type",R.parameter="parameter",R.operator="operator",R.comma="comma",R.line_comment="line_comment",R.block_comment="block_comment",R.disable_comment="disable_comment"},1858:function(E,T,R){"use strict";var A=this&&this.__importDefault||function(E){return E&&E.__esModule?E:{default:E}};Object.defineProperty(T,"__esModule",{value:!0}),T.createParser=void 0;const e=A(R(8515)),S=R(5781),I=A(R(3624)),O=A(R(7688)),N=R(2437),{Parser:t,Grammar:L}=e.default;T.createParser=function(E){let T={};const R=new O.default((R=>[...(0,S.disambiguateTokens)(E.tokenize(R,T)),(0,N.createEofToken)(R.length)])),A=new t(L.fromCompiled(I.default),{lexer:R});return{parse:(E,R)=>{T=R;const{results:e}=A.feed(E);if(1===e.length)return e[0];throw 0===e.length?new Error("Parse error: Invalid SQL"):new Error(`Parse error: Ambiguous grammar\n${JSON.stringify(e,void 0,2)}`)}}}},3624:function(E,T,R){"use strict";var A=this&&this.__importDefault||function(E){return E&&E.__esModule?E:{default:E}};function e(E){return E[0]}Object.defineProperty(T,"__esModule",{value:!0});const S=A(R(7688)),I=R(1301),O=R(2437),N=new S.default((E=>[])),t=([[E]])=>E,L=E=>({type:I.NodeType.keyword,tokenType:E.type,text:E.text,raw:E.raw}),C=E=>({type:I.NodeType.data_type,text:E.text,raw:E.raw}),_=(E,{leading:T,trailing:R})=>((null==T?void 0:T.length)&&(E=Object.assign(Object.assign({},E),{leadingComments:T})),(null==R?void 0:R.length)&&(E=Object.assign(Object.assign({},E),{trailingComments:R})),E),s=(E,{leading:T,trailing:R})=>{if(null==T?void 0:T.length){const[R,...A]=E;E=[_(R,{leading:T}),...A]}if(null==R?void 0:R.length){const T=E.slice(0,-1),A=E[E.length-1];E=[...T,_(A,{trailing:R})]}return E},r={Lexer:N,ParserRules:[{name:"main$ebnf$1",symbols:[]},{name:"main$ebnf$1",symbols:["main$ebnf$1","statement"],postprocess:E=>E[0].concat([E[1]])},{name:"main",symbols:["main$ebnf$1"],postprocess:([E])=>{const T=E[E.length-1];return T&&!T.hasSemicolon?T.children.length>0?E:E.slice(0,-1):E}},{name:"statement$subexpression$1",symbols:[N.has("DELIMITER")?{type:"DELIMITER"}:DELIMITER]},{name:"statement$subexpression$1",symbols:[N.has("EOF")?{type:"EOF"}:EOF]},{name:"statement",symbols:["expressions_or_clauses","statement$subexpression$1"],postprocess:([E,[T]])=>({type:I.NodeType.statement,children:E,hasSemicolon:T.type===O.TokenType.DELIMITER})},{name:"expressions_or_clauses$ebnf$1",symbols:[]},{name:"expressions_or_clauses$ebnf$1",symbols:["expressions_or_clauses$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"expressions_or_clauses$ebnf$2",symbols:[]},{name:"expressions_or_clauses$ebnf$2",symbols:["expressions_or_clauses$ebnf$2","clause"],postprocess:E=>E[0].concat([E[1]])},{name:"expressions_or_clauses",symbols:["expressions_or_clauses$ebnf$1","expressions_or_clauses$ebnf$2"],postprocess:([E,T])=>[...E,...T]},{name:"clause$subexpression$1",symbols:["limit_clause"]},{name:"clause$subexpression$1",symbols:["select_clause"]},{name:"clause$subexpression$1",symbols:["other_clause"]},{name:"clause$subexpression$1",symbols:["set_operation"]},{name:"clause",symbols:["clause$subexpression$1"],postprocess:t},{name:"limit_clause$ebnf$1$subexpression$1$ebnf$1",symbols:["free_form_sql"]},{name:"limit_clause$ebnf$1$subexpression$1$ebnf$1",symbols:["limit_clause$ebnf$1$subexpression$1$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"limit_clause$ebnf$1$subexpression$1",symbols:[N.has("COMMA")?{type:"COMMA"}:COMMA,"limit_clause$ebnf$1$subexpression$1$ebnf$1"]},{name:"limit_clause$ebnf$1",symbols:["limit_clause$ebnf$1$subexpression$1"],postprocess:e},{name:"limit_clause$ebnf$1",symbols:[],postprocess:()=>null},{name:"limit_clause",symbols:[N.has("LIMIT")?{type:"LIMIT"}:LIMIT,"_","expression_chain_","limit_clause$ebnf$1"],postprocess:([E,T,R,A])=>{if(A){const[e,S]=A;return{type:I.NodeType.limit_clause,limitKw:_(L(E),{trailing:T}),offset:R,count:S}}return{type:I.NodeType.limit_clause,limitKw:_(L(E),{trailing:T}),count:R}}},{name:"select_clause$subexpression$1$ebnf$1",symbols:[]},{name:"select_clause$subexpression$1$ebnf$1",symbols:["select_clause$subexpression$1$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"select_clause$subexpression$1",symbols:["all_columns_asterisk","select_clause$subexpression$1$ebnf$1"]},{name:"select_clause$subexpression$1$ebnf$2",symbols:[]},{name:"select_clause$subexpression$1$ebnf$2",symbols:["select_clause$subexpression$1$ebnf$2","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"select_clause$subexpression$1",symbols:["asteriskless_free_form_sql","select_clause$subexpression$1$ebnf$2"]},{name:"select_clause",symbols:[N.has("RESERVED_SELECT")?{type:"RESERVED_SELECT"}:RESERVED_SELECT,"select_clause$subexpression$1"],postprocess:([E,[T,R]])=>({type:I.NodeType.clause,nameKw:L(E),children:[T,...R]})},{name:"select_clause",symbols:[N.has("RESERVED_SELECT")?{type:"RESERVED_SELECT"}:RESERVED_SELECT],postprocess:([E])=>({type:I.NodeType.clause,nameKw:L(E),children:[]})},{name:"all_columns_asterisk",symbols:[N.has("ASTERISK")?{type:"ASTERISK"}:ASTERISK],postprocess:()=>({type:I.NodeType.all_columns_asterisk})},{name:"other_clause$ebnf$1",symbols:[]},{name:"other_clause$ebnf$1",symbols:["other_clause$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"other_clause",symbols:[N.has("RESERVED_CLAUSE")?{type:"RESERVED_CLAUSE"}:RESERVED_CLAUSE,"other_clause$ebnf$1"],postprocess:([E,T])=>({type:I.NodeType.clause,nameKw:L(E),children:T})},{name:"set_operation$ebnf$1",symbols:[]},{name:"set_operation$ebnf$1",symbols:["set_operation$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"set_operation",symbols:[N.has("RESERVED_SET_OPERATION")?{type:"RESERVED_SET_OPERATION"}:RESERVED_SET_OPERATION,"set_operation$ebnf$1"],postprocess:([E,T])=>({type:I.NodeType.set_operation,nameKw:L(E),children:T})},{name:"expression_chain_$ebnf$1",symbols:["expression_with_comments_"]},{name:"expression_chain_$ebnf$1",symbols:["expression_chain_$ebnf$1","expression_with_comments_"],postprocess:E=>E[0].concat([E[1]])},{name:"expression_chain_",symbols:["expression_chain_$ebnf$1"],postprocess:e},{name:"expression_chain$ebnf$1",symbols:[]},{name:"expression_chain$ebnf$1",symbols:["expression_chain$ebnf$1","_expression_with_comments"],postprocess:E=>E[0].concat([E[1]])},{name:"expression_chain",symbols:["expression","expression_chain$ebnf$1"],postprocess:([E,T])=>[E,...T]},{name:"andless_expression_chain$ebnf$1",symbols:[]},{name:"andless_expression_chain$ebnf$1",symbols:["andless_expression_chain$ebnf$1","_andless_expression_with_comments"],postprocess:E=>E[0].concat([E[1]])},{name:"andless_expression_chain",symbols:["andless_expression","andless_expression_chain$ebnf$1"],postprocess:([E,T])=>[E,...T]},{name:"expression_with_comments_",symbols:["expression","_"],postprocess:([E,T])=>_(E,{trailing:T})},{name:"_expression_with_comments",symbols:["_","expression"],postprocess:([E,T])=>_(T,{leading:E})},{name:"_andless_expression_with_comments",symbols:["_","andless_expression"],postprocess:([E,T])=>_(T,{leading:E})},{name:"free_form_sql$subexpression$1",symbols:["asteriskless_free_form_sql"]},{name:"free_form_sql$subexpression$1",symbols:["asterisk"]},{name:"free_form_sql",symbols:["free_form_sql$subexpression$1"],postprocess:t},{name:"asteriskless_free_form_sql$subexpression$1",symbols:["asteriskless_andless_expression"]},{name:"asteriskless_free_form_sql$subexpression$1",symbols:["logic_operator"]},{name:"asteriskless_free_form_sql$subexpression$1",symbols:["comma"]},{name:"asteriskless_free_form_sql$subexpression$1",symbols:["comment"]},{name:"asteriskless_free_form_sql$subexpression$1",symbols:["other_keyword"]},{name:"asteriskless_free_form_sql",symbols:["asteriskless_free_form_sql$subexpression$1"],postprocess:t},{name:"expression$subexpression$1",symbols:["andless_expression"]},{name:"expression$subexpression$1",symbols:["logic_operator"]},{name:"expression",symbols:["expression$subexpression$1"],postprocess:t},{name:"andless_expression$subexpression$1",symbols:["asteriskless_andless_expression"]},{name:"andless_expression$subexpression$1",symbols:["asterisk"]},{name:"andless_expression",symbols:["andless_expression$subexpression$1"],postprocess:t},{name:"asteriskless_andless_expression$subexpression$1",symbols:["atomic_expression"]},{name:"asteriskless_andless_expression$subexpression$1",symbols:["between_predicate"]},{name:"asteriskless_andless_expression$subexpression$1",symbols:["case_expression"]},{name:"asteriskless_andless_expression",symbols:["asteriskless_andless_expression$subexpression$1"],postprocess:t},{name:"atomic_expression$subexpression$1",symbols:["array_subscript"]},{name:"atomic_expression$subexpression$1",symbols:["function_call"]},{name:"atomic_expression$subexpression$1",symbols:["property_access"]},{name:"atomic_expression$subexpression$1",symbols:["parenthesis"]},{name:"atomic_expression$subexpression$1",symbols:["curly_braces"]},{name:"atomic_expression$subexpression$1",symbols:["square_brackets"]},{name:"atomic_expression$subexpression$1",symbols:["operator"]},{name:"atomic_expression$subexpression$1",symbols:["identifier"]},{name:"atomic_expression$subexpression$1",symbols:["parameter"]},{name:"atomic_expression$subexpression$1",symbols:["literal"]},{name:"atomic_expression$subexpression$1",symbols:["data_type"]},{name:"atomic_expression$subexpression$1",symbols:["keyword"]},{name:"atomic_expression",symbols:["atomic_expression$subexpression$1"],postprocess:t},{name:"array_subscript",symbols:[N.has("ARRAY_IDENTIFIER")?{type:"ARRAY_IDENTIFIER"}:ARRAY_IDENTIFIER,"_","square_brackets"],postprocess:([E,T,R])=>({type:I.NodeType.array_subscript,array:_({type:I.NodeType.identifier,quoted:!1,text:E.text},{trailing:T}),parenthesis:R})},{name:"array_subscript",symbols:[N.has("ARRAY_KEYWORD")?{type:"ARRAY_KEYWORD"}:ARRAY_KEYWORD,"_","square_brackets"],postprocess:([E,T,R])=>({type:I.NodeType.array_subscript,array:_(L(E),{trailing:T}),parenthesis:R})},{name:"function_call",symbols:[N.has("RESERVED_FUNCTION_NAME")?{type:"RESERVED_FUNCTION_NAME"}:RESERVED_FUNCTION_NAME,"_","parenthesis"],postprocess:([E,T,R])=>({type:I.NodeType.function_call,nameKw:_(L(E),{trailing:T}),parenthesis:R})},{name:"parenthesis",symbols:[{literal:"("},"expressions_or_clauses",{literal:")"}],postprocess:([E,T,R])=>({type:I.NodeType.parenthesis,children:T,openParen:"(",closeParen:")"})},{name:"curly_braces$ebnf$1",symbols:[]},{name:"curly_braces$ebnf$1",symbols:["curly_braces$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"curly_braces",symbols:[{literal:"{"},"curly_braces$ebnf$1",{literal:"}"}],postprocess:([E,T,R])=>({type:I.NodeType.parenthesis,children:T,openParen:"{",closeParen:"}"})},{name:"square_brackets$ebnf$1",symbols:[]},{name:"square_brackets$ebnf$1",symbols:["square_brackets$ebnf$1","free_form_sql"],postprocess:E=>E[0].concat([E[1]])},{name:"square_brackets",symbols:[{literal:"["},"square_brackets$ebnf$1",{literal:"]"}],postprocess:([E,T,R])=>({type:I.NodeType.parenthesis,children:T,openParen:"[",closeParen:"]"})},{name:"property_access$subexpression$1",symbols:["identifier"]},{name:"property_access$subexpression$1",symbols:["array_subscript"]},{name:"property_access$subexpression$1",symbols:["all_columns_asterisk"]},{name:"property_access$subexpression$1",symbols:["parameter"]},{name:"property_access",symbols:["atomic_expression","_",N.has("PROPERTY_ACCESS_OPERATOR")?{type:"PROPERTY_ACCESS_OPERATOR"}:PROPERTY_ACCESS_OPERATOR,"_","property_access$subexpression$1"],postprocess:([E,T,R,A,[e]])=>({type:I.NodeType.property_access,object:_(E,{trailing:T}),operator:R.text,property:_(e,{leading:A})})},{name:"between_predicate",symbols:[N.has("BETWEEN")?{type:"BETWEEN"}:BETWEEN,"_","andless_expression_chain","_",N.has("AND")?{type:"AND"}:AND,"_","andless_expression"],postprocess:([E,T,R,A,e,S,O])=>({type:I.NodeType.between_predicate,betweenKw:L(E),expr1:s(R,{leading:T,trailing:A}),andKw:L(e),expr2:[_(O,{leading:S})]})},{name:"case_expression$ebnf$1",symbols:["expression_chain_"],postprocess:e},{name:"case_expression$ebnf$1",symbols:[],postprocess:()=>null},{name:"case_expression$ebnf$2",symbols:[]},{name:"case_expression$ebnf$2",symbols:["case_expression$ebnf$2","case_clause"],postprocess:E=>E[0].concat([E[1]])},{name:"case_expression",symbols:[N.has("CASE")?{type:"CASE"}:CASE,"_","case_expression$ebnf$1","case_expression$ebnf$2",N.has("END")?{type:"END"}:END],postprocess:([E,T,R,A,e])=>({type:I.NodeType.case_expression,caseKw:_(L(E),{trailing:T}),endKw:L(e),expr:R||[],clauses:A})},{name:"case_clause",symbols:[N.has("WHEN")?{type:"WHEN"}:WHEN,"_","expression_chain_",N.has("THEN")?{type:"THEN"}:THEN,"_","expression_chain_"],postprocess:([E,T,R,A,e,S])=>({type:I.NodeType.case_when,whenKw:_(L(E),{trailing:T}),thenKw:_(L(A),{trailing:e}),condition:R,result:S})},{name:"case_clause",symbols:[N.has("ELSE")?{type:"ELSE"}:ELSE,"_","expression_chain_"],postprocess:([E,T,R])=>({type:I.NodeType.case_else,elseKw:_(L(E),{trailing:T}),result:R})},{name:"comma$subexpression$1",symbols:[N.has("COMMA")?{type:"COMMA"}:COMMA]},{name:"comma",symbols:["comma$subexpression$1"],postprocess:([[E]])=>({type:I.NodeType.comma})},{name:"asterisk$subexpression$1",symbols:[N.has("ASTERISK")?{type:"ASTERISK"}:ASTERISK]},{name:"asterisk",symbols:["asterisk$subexpression$1"],postprocess:([[E]])=>({type:I.NodeType.operator,text:E.text})},{name:"operator$subexpression$1",symbols:[N.has("OPERATOR")?{type:"OPERATOR"}:OPERATOR]},{name:"operator",symbols:["operator$subexpression$1"],postprocess:([[E]])=>({type:I.NodeType.operator,text:E.text})},{name:"identifier$subexpression$1",symbols:[N.has("IDENTIFIER")?{type:"IDENTIFIER"}:IDENTIFIER]},{name:"identifier$subexpression$1",symbols:[N.has("QUOTED_IDENTIFIER")?{type:"QUOTED_IDENTIFIER"}:QUOTED_IDENTIFIER]},{name:"identifier$subexpression$1",symbols:[N.has("VARIABLE")?{type:"VARIABLE"}:VARIABLE]},{name:"identifier",symbols:["identifier$subexpression$1"],postprocess:([[E]])=>({type:I.NodeType.identifier,quoted:"IDENTIFIER"!==E.type,text:E.text})},{name:"parameter$subexpression$1",symbols:[N.has("NAMED_PARAMETER")?{type:"NAMED_PARAMETER"}:NAMED_PARAMETER]},{name:"parameter$subexpression$1",symbols:[N.has("QUOTED_PARAMETER")?{type:"QUOTED_PARAMETER"}:QUOTED_PARAMETER]},{name:"parameter$subexpression$1",symbols:[N.has("NUMBERED_PARAMETER")?{type:"NUMBERED_PARAMETER"}:NUMBERED_PARAMETER]},{name:"parameter$subexpression$1",symbols:[N.has("POSITIONAL_PARAMETER")?{type:"POSITIONAL_PARAMETER"}:POSITIONAL_PARAMETER]},{name:"parameter$subexpression$1",symbols:[N.has("CUSTOM_PARAMETER")?{type:"CUSTOM_PARAMETER"}:CUSTOM_PARAMETER]},{name:"parameter",symbols:["parameter$subexpression$1"],postprocess:([[E]])=>({type:I.NodeType.parameter,key:E.key,text:E.text})},{name:"literal$subexpression$1",symbols:[N.has("NUMBER")?{type:"NUMBER"}:NUMBER]},{name:"literal$subexpression$1",symbols:[N.has("STRING")?{type:"STRING"}:STRING]},{name:"literal",symbols:["literal$subexpression$1"],postprocess:([[E]])=>({type:I.NodeType.literal,text:E.text})},{name:"keyword$subexpression$1",symbols:[N.has("RESERVED_KEYWORD")?{type:"RESERVED_KEYWORD"}:RESERVED_KEYWORD]},{name:"keyword$subexpression$1",symbols:[N.has("RESERVED_KEYWORD_PHRASE")?{type:"RESERVED_KEYWORD_PHRASE"}:RESERVED_KEYWORD_PHRASE]},{name:"keyword$subexpression$1",symbols:[N.has("RESERVED_JOIN")?{type:"RESERVED_JOIN"}:RESERVED_JOIN]},{name:"keyword",symbols:["keyword$subexpression$1"],postprocess:([[E]])=>L(E)},{name:"data_type$subexpression$1",symbols:[N.has("RESERVED_DATA_TYPE")?{type:"RESERVED_DATA_TYPE"}:RESERVED_DATA_TYPE]},{name:"data_type$subexpression$1",symbols:[N.has("RESERVED_DATA_TYPE_PHRASE")?{type:"RESERVED_DATA_TYPE_PHRASE"}:RESERVED_DATA_TYPE_PHRASE]},{name:"data_type",symbols:["data_type$subexpression$1"],postprocess:([[E]])=>C(E)},{name:"data_type",symbols:[N.has("RESERVED_PARAMETERIZED_DATA_TYPE")?{type:"RESERVED_PARAMETERIZED_DATA_TYPE"}:RESERVED_PARAMETERIZED_DATA_TYPE,"_","parenthesis"],postprocess:([E,T,R])=>({type:I.NodeType.parameterized_data_type,dataType:_(C(E),{trailing:T}),parenthesis:R})},{name:"logic_operator$subexpression$1",symbols:[N.has("AND")?{type:"AND"}:AND]},{name:"logic_operator$subexpression$1",symbols:[N.has("OR")?{type:"OR"}:OR]},{name:"logic_operator$subexpression$1",symbols:[N.has("XOR")?{type:"XOR"}:XOR]},{name:"logic_operator",symbols:["logic_operator$subexpression$1"],postprocess:([[E]])=>L(E)},{name:"other_keyword$subexpression$1",symbols:[N.has("WHEN")?{type:"WHEN"}:WHEN]},{name:"other_keyword$subexpression$1",symbols:[N.has("THEN")?{type:"THEN"}:THEN]},{name:"other_keyword$subexpression$1",symbols:[N.has("ELSE")?{type:"ELSE"}:ELSE]},{name:"other_keyword$subexpression$1",symbols:[N.has("END")?{type:"END"}:END]},{name:"other_keyword",symbols:["other_keyword$subexpression$1"],postprocess:([[E]])=>L(E)},{name:"_$ebnf$1",symbols:[]},{name:"_$ebnf$1",symbols:["_$ebnf$1","comment"],postprocess:E=>E[0].concat([E[1]])},{name:"_",symbols:["_$ebnf$1"],postprocess:([E])=>E},{name:"comment",symbols:[N.has("LINE_COMMENT")?{type:"LINE_COMMENT"}:LINE_COMMENT],postprocess:([E])=>({type:I.NodeType.line_comment,text:E.text,precedingWhitespace:E.precedingWhitespace})},{name:"comment",symbols:[N.has("BLOCK_COMMENT")?{type:"BLOCK_COMMENT"}:BLOCK_COMMENT],postprocess:([E])=>({type:I.NodeType.block_comment,text:E.text,precedingWhitespace:E.precedingWhitespace})},{name:"comment",symbols:[N.has("DISABLE_COMMENT")?{type:"DISABLE_COMMENT"}:DISABLE_COMMENT],postprocess:([E])=>({type:I.NodeType.disable_comment,text:E.text,precedingWhitespace:E.precedingWhitespace})}],ParserStart:"main"};T.default=r,E.exports=T.default},2707:function(E,T,R){"use strict";var A=this&&this.__createBinding||(Object.create?function(E,T,R,A){void 0===A&&(A=R);var e=Object.getOwnPropertyDescriptor(T,R);e&&!("get"in e?!T.__esModule:e.writable||e.configurable)||(e={enumerable:!0,get:function(){return T[R]}}),Object.defineProperty(E,A,e)}:function(E,T,R,A){void 0===A&&(A=R),E[A]=T[R]}),e=this&&this.__setModuleDefault||(Object.create?function(E,T){Object.defineProperty(E,"default",{enumerable:!0,value:T})}:function(E,T){E.default=T}),S=this&&this.__importStar||function(E){if(E&&E.__esModule)return E;var T={};if(null!=E)for(var R in E)"default"!==R&&Object.prototype.hasOwnProperty.call(E,R)&&A(T,E,R);return e(T,E),T},I=this&&this.__rest||function(E,T){var R={};for(var A in E)Object.prototype.hasOwnProperty.call(E,A)&&T.indexOf(A)<0&&(R[A]=E[A]);if(null!=E&&"function"==typeof Object.getOwnPropertySymbols){var e=0;for(A=Object.getOwnPropertySymbols(E);e{if("string"==typeof R.language&&!T.supportedDialects.includes(R.language))throw new C.ConfigError(`Unsupported SQL dialect: ${R.language}`);const A=_[R.language||"sql"];return(0,T.formatDialect)(E,Object.assign(Object.assign({},R),{dialect:N[A]}))},T.formatDialect=(E,T)=>{var{dialect:R}=T,A=I(T,["dialect"]);if("string"!=typeof E)throw new Error("Invalid query argument. Expected string, instead got "+typeof E);const e=(0,C.validateConfig)(Object.assign(Object.assign({},s),A));return new L.default((0,t.createDialect)(R),e).format(E)}},1044:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.isMultiline=T.equalizeWhitespace=T.maxLength=T.sortByLengthDesc=T.last=T.dedupe=void 0,T.dedupe=E=>[...new Set(E)],T.last=E=>E[E.length-1],T.sortByLengthDesc=E=>E.sort(((E,T)=>T.length-E.length||E.localeCompare(T))),T.maxLength=E=>E.reduce(((E,T)=>Math.max(E,T.length)),0),T.equalizeWhitespace=E=>E.replace(/\s+/gu," "),T.isMultiline=E=>/\n/.test(E)},1217:(E,T)=>{"use strict";Object.defineProperty(T,"__esModule",{value:!0}),T.validateConfig=T.ConfigError=void 0;class R extends Error{}T.ConfigError=R,T.validateConfig=function(E){const T=["multilineLists","newlineBeforeOpenParen","newlineBeforeCloseParen","aliasAs","commaPosition","tabulateAlias"];for(const A of T)if(A in E)throw new R(`${A} config is no more supported.`);if(E.expressionWidth<=0)throw new R(`expressionWidth config must be positive number. Received ${E.expressionWidth} instead.`);var A,e;if(E.params&&!((A=E.params)instanceof Array?A:Object.values(A)).every((E=>"string"==typeof E))&&console.warn('WARNING: All "params" option values should be strings.'),E.paramTypes&&((e=E.paramTypes).custom&&Array.isArray(e.custom)&&!e.custom.every((E=>""!==E.regex))))throw new R("Empty regex given in custom paramTypes. That would result in matching infinite amount of parameters.");return E}},8515:function(E){var T;T=function(){function E(T,R,A){return this.id=++E.highestId,this.name=T,this.symbols=R,this.postprocess=A,this}function T(E,T,R,A){this.rule=E,this.dot=T,this.reference=R,this.data=[],this.wantedBy=A,this.isComplete=this.dot===E.symbols.length}function R(E,T){this.grammar=E,this.index=T,this.states=[],this.wants={},this.scannable=[],this.completed={}}function A(E,T){this.rules=E,this.start=T||this.rules[0].name;var R=this.byName={};this.rules.forEach((function(E){R.hasOwnProperty(E.name)||(R[E.name]=[]),R[E.name].push(E)}))}function e(){this.reset("")}function S(E,T,S){if(E instanceof A){var I=E;S=T}else I=A.fromCompiled(E,T);for(var O in this.grammar=I,this.options={keepHistory:!1,lexer:I.lexer||new e},S||{})this.options[O]=S[O];this.lexer=this.options.lexer,this.lexerState=void 0;var N=new R(I,0);this.table=[N],N.wants[I.start]=[],N.predict(I.start),N.process(),this.current=0}function I(E){var T=typeof E;if("string"===T)return E;if("object"===T){if(E.literal)return JSON.stringify(E.literal);if(E instanceof RegExp)return E.toString();if(E.type)return"%"+E.type;if(E.test)return"<"+String(E.test)+">";throw new Error("Unknown symbol type: "+E)}}return E.highestId=0,E.prototype.toString=function(E){var T=void 0===E?this.symbols.map(I).join(" "):this.symbols.slice(0,E).map(I).join(" ")+" ● "+this.symbols.slice(E).map(I).join(" ");return this.name+" → "+T},T.prototype.toString=function(){return"{"+this.rule.toString(this.dot)+"}, from: "+(this.reference||0)},T.prototype.nextState=function(E){var R=new T(this.rule,this.dot+1,this.reference,this.wantedBy);return R.left=this,R.right=E,R.isComplete&&(R.data=R.build(),R.right=void 0),R},T.prototype.build=function(){var E=[],T=this;do{E.push(T.right.data),T=T.left}while(T.left);return E.reverse(),E},T.prototype.finish=function(){this.rule.postprocess&&(this.data=this.rule.postprocess(this.data,this.reference,S.fail))},R.prototype.process=function(E){for(var T=this.states,R=this.wants,A=this.completed,e=0;e0&&T.push(" ^ "+A+" more lines identical to this"),A=0,T.push(" "+I)),R=I}},S.prototype.getSymbolDisplay=function(E){return function(E){var T=typeof E;if("string"===T)return E;if("object"===T){if(E.literal)return JSON.stringify(E.literal);if(E instanceof RegExp)return"character matching "+E;if(E.type)return E.type+" token";if(E.test)return"token matching "+String(E.test);throw new Error("Unknown symbol type: "+E)}}(E)},S.prototype.buildFirstStateStack=function(E,T){if(-1!==T.indexOf(E))return null;if(0===E.wantedBy.length)return[E];var R=E.wantedBy[0],A=[E].concat(T),e=this.buildFirstStateStack(R,A);return null===e?null:[E].concat(e)},S.prototype.save=function(){var E=this.table[this.current];return E.lexerState=this.lexerState,E},S.prototype.restore=function(E){var T=E.index;this.current=T,this.table[T]=E,this.table.splice(T+1),this.lexerState=E.lexerState,this.results=this.finish()},S.prototype.rewind=function(E){if(!this.options.keepHistory)throw new Error("set option `keepHistory` to enable rewinding");this.restore(this.table[E])},S.prototype.finish=function(){var E=[],T=this.grammar.start;return this.table[this.table.length-1].states.forEach((function(R){R.rule.name===T&&R.dot===R.rule.symbols.length&&0===R.reference&&R.data!==S.fail&&E.push(R)})),E.map((function(E){return E.data}))},{Parser:S,Grammar:A,Rule:E}},E.exports?E.exports=T():this.nearley=T()}},T={};function R(A){var e=T[A];if(void 0!==e)return e.exports;var S=T[A]={exports:{}};return E[A].call(S.exports,S,S.exports,R),S.exports}var A={};return(()=>{"use strict";var E=A;Object.defineProperty(E,"__esModule",{value:!0}),E.snowflake=E.singlestoredb=E.transactsql=E.trino=E.sql=E.sqlite=E.spark=E.redshift=E.postgresql=E.plsql=E.n1ql=E.tidb=E.mysql=E.mariadb=E.hive=E.duckdb=E.db2i=E.db2=E.clickhouse=E.bigquery=E.ConfigError=E.expandPhrases=E.formatDialect=E.format=E.supportedDialects=void 0;var T=R(2707);Object.defineProperty(E,"supportedDialects",{enumerable:!0,get:function(){return T.supportedDialects}}),Object.defineProperty(E,"format",{enumerable:!0,get:function(){return T.format}}),Object.defineProperty(E,"formatDialect",{enumerable:!0,get:function(){return T.formatDialect}});var e=R(7163);Object.defineProperty(E,"expandPhrases",{enumerable:!0,get:function(){return e.expandPhrases}});var S=R(1217);Object.defineProperty(E,"ConfigError",{enumerable:!0,get:function(){return S.ConfigError}});var I=R(5028);Object.defineProperty(E,"bigquery",{enumerable:!0,get:function(){return I.bigquery}});var O=R(5084);Object.defineProperty(E,"clickhouse",{enumerable:!0,get:function(){return O.clickhouse}});var N=R(1718);Object.defineProperty(E,"db2",{enumerable:!0,get:function(){return N.db2}});var t=R(9274);Object.defineProperty(E,"db2i",{enumerable:!0,get:function(){return t.db2i}});var L=R(3018);Object.defineProperty(E,"duckdb",{enumerable:!0,get:function(){return L.duckdb}});var C=R(7340);Object.defineProperty(E,"hive",{enumerable:!0,get:function(){return C.hive}});var _=R(1378);Object.defineProperty(E,"mariadb",{enumerable:!0,get:function(){return _.mariadb}});var s=R(3358);Object.defineProperty(E,"mysql",{enumerable:!0,get:function(){return s.mysql}});var r=R(2066);Object.defineProperty(E,"tidb",{enumerable:!0,get:function(){return r.tidb}});var a=R(7328);Object.defineProperty(E,"n1ql",{enumerable:!0,get:function(){return a.n1ql}});var D=R(6910);Object.defineProperty(E,"plsql",{enumerable:!0,get:function(){return D.plsql}});var P=R(2912);Object.defineProperty(E,"postgresql",{enumerable:!0,get:function(){return P.postgresql}});var n=R(1642);Object.defineProperty(E,"redshift",{enumerable:!0,get:function(){return n.redshift}});var i=R(9774);Object.defineProperty(E,"spark",{enumerable:!0,get:function(){return i.spark}});var o=R(5784);Object.defineProperty(E,"sqlite",{enumerable:!0,get:function(){return o.sqlite}});var M=R(3446);Object.defineProperty(E,"sql",{enumerable:!0,get:function(){return M.sql}});var U=R(198);Object.defineProperty(E,"trino",{enumerable:!0,get:function(){return U.trino}});var G=R(918);Object.defineProperty(E,"transactsql",{enumerable:!0,get:function(){return G.transactsql}});var l=R(7146);Object.defineProperty(E,"singlestoredb",{enumerable:!0,get:function(){return l.singlestoredb}});var p=R(3686);Object.defineProperty(E,"snowflake",{enumerable:!0,get:function(){return p.snowflake}})})(),A})())); +//# sourceMappingURL=sql-formatter.min.cjs.map \ No newline at end of file diff --git a/static/public/assets/deps/highlightjs/sql.min.js b/static/public/assets/deps/highlightjs/sql.min.js new file mode 100644 index 000000000..bac943a7f --- /dev/null +++ b/static/public/assets/deps/highlightjs/sql.min.js @@ -0,0 +1,21 @@ +/*! `sql` grammar compiled for Highlight.js 11.11.1 */ +(()=>{var e=(()=>{"use strict";return e=>{ +const r=e.regex,t=e.COMMENT("--","$"),a=["abs","acos","array_agg","asin","atan","avg","cast","ceil","ceiling","coalesce","corr","cos","cosh","count","covar_pop","covar_samp","cume_dist","dense_rank","deref","element","exp","extract","first_value","floor","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","last_value","lead","listagg","ln","log","log10","lower","max","min","mod","nth_value","ntile","nullif","percent_rank","percentile_cont","percentile_disc","position","position_regex","power","rank","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","row_number","sin","sinh","sqrt","stddev_pop","stddev_samp","substring","substring_regex","sum","tan","tanh","translate","translate_regex","treat","trim","trim_array","unnest","upper","value_of","var_pop","var_samp","width_bucket"],n=a,s=["abs","acos","all","allocate","alter","and","any","are","array","array_agg","array_max_cardinality","as","asensitive","asin","asymmetric","at","atan","atomic","authorization","avg","begin","begin_frame","begin_partition","between","bigint","binary","blob","boolean","both","by","call","called","cardinality","cascaded","case","cast","ceil","ceiling","char","char_length","character","character_length","check","classifier","clob","close","coalesce","collate","collect","column","commit","condition","connect","constraint","contains","convert","copy","corr","corresponding","cos","cosh","count","covar_pop","covar_samp","create","cross","cube","cume_dist","current","current_catalog","current_date","current_default_transform_group","current_path","current_role","current_row","current_schema","current_time","current_timestamp","current_path","current_role","current_transform_group_for_type","current_user","cursor","cycle","date","day","deallocate","dec","decimal","decfloat","declare","default","define","delete","dense_rank","deref","describe","deterministic","disconnect","distinct","double","drop","dynamic","each","element","else","empty","end","end_frame","end_partition","end-exec","equals","escape","every","except","exec","execute","exists","exp","external","extract","false","fetch","filter","first_value","float","floor","for","foreign","frame_row","free","from","full","function","fusion","get","global","grant","group","grouping","groups","having","hold","hour","identity","in","indicator","initial","inner","inout","insensitive","insert","int","integer","intersect","intersection","interval","into","is","join","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","language","large","last_value","lateral","lead","leading","left","like","like_regex","listagg","ln","local","localtime","localtimestamp","log","log10","lower","match","match_number","match_recognize","matches","max","member","merge","method","min","minute","mod","modifies","module","month","multiset","national","natural","nchar","nclob","new","no","none","normalize","not","nth_value","ntile","null","nullif","numeric","octet_length","occurrences_regex","of","offset","old","omit","on","one","only","open","or","order","out","outer","over","overlaps","overlay","parameter","partition","pattern","per","percent","percent_rank","percentile_cont","percentile_disc","period","portion","position","position_regex","power","precedes","precision","prepare","primary","procedure","ptf","range","rank","reads","real","recursive","ref","references","referencing","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","release","result","return","returns","revoke","right","rollback","rollup","row","row_number","rows","running","savepoint","scope","scroll","search","second","seek","select","sensitive","session_user","set","show","similar","sin","sinh","skip","smallint","some","specific","specifictype","sql","sqlexception","sqlstate","sqlwarning","sqrt","start","static","stddev_pop","stddev_samp","submultiset","subset","substring","substring_regex","succeeds","sum","symmetric","system","system_time","system_user","table","tablesample","tan","tanh","then","time","timestamp","timezone_hour","timezone_minute","to","trailing","translate","translate_regex","translation","treat","trigger","trim","trim_array","true","truncate","uescape","union","unique","unknown","unnest","update","upper","user","using","value","values","value_of","var_pop","var_samp","varbinary","varchar","varying","versioning","when","whenever","where","width_bucket","window","with","within","without","year","add","asc","collation","desc","final","first","last","view"].filter((e=>!a.includes(e))),i={ +match:r.concat(/\b/,r.either(...n),/\s*\(/),relevance:0,keywords:{built_in:n}} +;function o(e){ +return r.concat(/\b/,r.either(...e.map((e=>e.replace(/\s+/,"\\s+")))),/\b/)} +const c={scope:"keyword", +match:o(["create table","insert into","primary key","foreign key","not null","alter table","add constraint","grouping sets","on overflow","character set","respect nulls","ignore nulls","nulls first","nulls last","depth first","breadth first"]), +relevance:0};return{name:"SQL",case_insensitive:!0,illegal:/[{}]|<\//,keywords:{ +$pattern:/\b[\w\.]+/,keyword:((e,{exceptions:r,when:t}={})=>{const a=t +;return r=r||[],e.map((e=>e.match(/\|\d+$/)||r.includes(e)?e:a(e)?e+"|0":e)) +})(s,{when:e=>e.length<3}),literal:["true","false","unknown"], +type:["bigint","binary","blob","boolean","char","character","clob","date","dec","decfloat","decimal","float","int","integer","interval","nchar","nclob","national","numeric","real","row","smallint","time","timestamp","varchar","varying","varbinary"], +built_in:["current_catalog","current_date","current_default_transform_group","current_path","current_role","current_schema","current_transform_group_for_type","current_user","session_user","system_time","system_user","current_time","localtime","current_timestamp","localtimestamp"] +},contains:[{scope:"type", +match:o(["double precision","large object","with timezone","without timezone"]) +},c,i,{scope:"variable",match:/@[a-z0-9][a-z0-9_]*/},{scope:"string",variants:[{ +begin:/'/,end:/'/,contains:[{match:/''/}]}]},{begin:/"/,end:/"/,contains:[{ +match:/""/}]},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,{scope:"operator", +match:/[-+*/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?/,relevance:0}]}}})() +;hljs.registerLanguage("sql",e)})(); \ No newline at end of file diff --git a/static/public/dashboards/_overview.yaml b/static/public/dashboards/_overview.yaml index b3c2d8137..30215a94d 100644 --- a/static/public/dashboards/_overview.yaml +++ b/static/public/dashboards/_overview.yaml @@ -73,33 +73,63 @@ tabs: - name: Overview icon: grid widgets: - # System Metrics + # Golden Signals - The 4 key metrics for any APM - type: group - title: System Metrics + title: Golden Signals + description: 'The four key metrics from Google SRE for monitoring distributed systems: Traffic, Latency, Errors, and Saturation.' layout: { w: 12, h: 4 } children: - type: 'timeseries_stat' - title: 'Total Requests' - icon: list-tree + title: 'Traffic' + description: 'Incoming request rate. Measures system load and usage patterns.' + icon: activity query: | (kind == "server" or name == "apitoolkit-http-span" or name == "monoscope.http") | summarize count() by bin_auto(timestamp) | order by timestamp desc | limit 200 - unit: reqs - eager: true + unit: req/min layout: { w: 3, h: 2 } - - type: 'stat' - title: 'Global Error Rate' - icon: bug + - type: 'timeseries_stat' + title: 'P95 Latency' + description: '95th percentile response time. 95% of requests complete faster than this.' + icon: clock + query: | + (kind == "server" or name == "apitoolkit-http-span" or name == "monoscope.http") and duration != null + | summarize p95(duration) / 1000000 by bin_auto(timestamp) + | order by timestamp desc + | limit 200 + unit: ms + layout: { w: 3, h: 2 } + + - type: 'timeseries_stat' + title: 'Error Rate' + description: 'Percentage of failed requests (5xx errors). Lower is better.' + icon: alert-triangle query: | (kind == "server" or name == "apitoolkit-http-span") - | summarize round(countif(status_code == "ERROR") * 100.0 / count(), 2) + | summarize round(countif(status_code == "ERROR" or coalesce(attributes.http.response.status_code, 0) >= 500) * 100.0 / count(), 2) by bin_auto(timestamp) + | order by timestamp desc + | limit 200 unit: '%' - eager: true + alert_threshold: 5 + warning_threshold: 2 layout: { w: 3, h: 2 } + - type: 'stat' + title: 'Apdex Score' + description: 'Application Performance Index (0-1). Measures user satisfaction based on response times. >0.9 is excellent, <0.5 is poor.' + icon: smile + sql: "SELECT ROUND((SUM(CASE WHEN duration <= 500000000 THEN 1.0 WHEN duration <= 2000000000 THEN 0.5 ELSE 0 END)) / GREATEST(1, COUNT(*))::numeric, 2)::float FROM otel_logs_and_spans WHERE project_id='{{project_id}}' AND (kind = 'server' OR name = 'apitoolkit-http-span' OR name = 'monoscope.http') AND duration IS NOT NULL {{time_filter}}" + unit: '/1.0' + layout: { w: 3, h: 2 } + + # Secondary metrics row + - type: group + title: System Overview + layout: { w: 12, h: 2 } + children: - type: 'stat' title: 'Active Services' icon: server @@ -107,7 +137,15 @@ tabs: resource.service.name != null | summarize dcount(resource.service.name) unit: services - eager: true + layout: { w: 3, h: 2 } + + - type: 'stat' + title: 'Total Requests' + icon: list-tree + query: | + (kind == "server" or name == "apitoolkit-http-span" or name == "monoscope.http") + | summarize count() + unit: reqs layout: { w: 3, h: 2 } - type: 'stat' @@ -117,12 +155,21 @@ tabs: attributes.db.system.name != null | summarize dcount(attributes.db.system.name) unit: systems - eager: true + layout: { w: 3, h: 2 } + + - type: 'stat' + title: 'Unique Endpoints' + icon: route + query: | + (kind == "server" or name == "apitoolkit-http-span") and name != null + | summarize dcount(name) + unit: endpoints layout: { w: 3, h: 2 } # Services Table and Issues side by side - type: table - title: Services Overview + title: Services Health + description: 'Click a service row to view its detailed metrics in the Service tab.' layout: { w: 6, h: 6 } on_row_click: set_variable: service @@ -131,36 +178,25 @@ tabs: columns: - field: service_name title: Service + - field: apdex + title: Apdex + columnType: number - field: throughput title: Throughput columnType: number unit: req/min progress: column_percent - field: error_rate - title: Error Rate + title: Errors unit: '%' progress: value_percent progress_variant: error - field: p95_latency - title: P95 Latency + title: P95 columnType: duration unit: ms progress: column_percent - sql: | - SELECT - resource___service___name as service_name, - ROUND((COUNT(*)::numeric / GREATEST(1, EXTRACT(EPOCH FROM (MAX(timestamp) - MIN(timestamp))) / 60)), 2)::text as throughput, - ROUND((COUNT(*) FILTER (WHERE status_code = 'ERROR') * 100.0 / GREATEST(1, COUNT(*))::numeric), 2)::text as error_rate, - ROUND((approx_percentile(0.95, percentile_agg(duration))::numeric / 1e6), 2)::text as p95_latency - FROM otel_logs_and_spans - WHERE project_id='{{project_id}}' - AND resource___service___name IS NOT NULL - AND kind = 'server' - AND duration IS NOT NULL - {{time_filter}} - GROUP BY resource___service___name - ORDER BY COUNT(*)::numeric / GREATEST(1, EXTRACT(EPOCH FROM (MAX(timestamp) - MIN(timestamp))) / 60) DESC - LIMIT 50 + sql: "SELECT resource___service___name as service_name, ROUND((SUM(CASE WHEN duration <= 500000000 THEN 1.0 WHEN duration <= 2000000000 THEN 0.5 ELSE 0 END)) / GREATEST(1, COUNT(*))::numeric, 2)::text as apdex, ROUND((COUNT(*)::numeric / GREATEST(1, EXTRACT(EPOCH FROM (MAX(timestamp) - MIN(timestamp))) / 60)), 2)::text as throughput, ROUND((COUNT(*) FILTER (WHERE status_code = 'ERROR' OR COALESCE(attributes___http___response___status_code, 0) >= 500) * 100.0 / GREATEST(1, COUNT(*))::numeric), 2)::text as error_rate, ROUND((approx_percentile(0.95, percentile_agg(duration))::numeric / 1000000), 2)::text as p95_latency FROM otel_logs_and_spans WHERE project_id='{{project_id}}' AND resource___service___name IS NOT NULL AND kind = 'server' AND duration IS NOT NULL {{time_filter}} GROUP BY resource___service___name ORDER BY COUNT(*)::numeric / GREATEST(1, EXTRACT(EPOCH FROM (MAX(timestamp) - MIN(timestamp))) / 60) DESC LIMIT 50" # Anomalies/Issues widget - type: anomalies @@ -228,6 +264,7 @@ tabs: # Request Latency Percentiles - type: 'timeseries_line' title: 'Request Latency Percentiles' + description: 'Response time distribution. P50 is median, P99 shows worst-case latency for 1% of requests.' hide_subtitle: true summarize_by: max query: | @@ -269,7 +306,7 @@ tabs: unit: reqs layout: { w: 12, h: 4 } - - name: Service Summary + - name: Service icon: server requires: service widgets: @@ -303,7 +340,7 @@ tabs: icon: activity query: | resource.service.name == "{{var-service}}" and kind == "server" and duration != null - | summarize round(avg(duration) / 1e6, 2) + | summarize round(avg(duration) / 1000000, 2) unit: ms layout: { w: 3, h: 2 } @@ -475,6 +512,7 @@ tabs: # Full width resource table - type: table title: 'Resources Overview' + description: 'Click a resource to filter charts above. Total Time shows cumulative time spent on each resource.' layout: { w: 12, h: 8 } on_row_click: set_variable: resource @@ -552,10 +590,11 @@ tabs: - type: 'stat' title: 'Avg Trace Duration' + description: 'Average end-to-end time for complete request traces, from entry to final response.' icon: activity query: | parent_id == "" and duration != null - | summarize round(avg(duration) / 1e6, 2) + | summarize round(avg(duration) / 1000000, 2) unit: ms layout: { w: 3, h: 2 } @@ -651,45 +690,51 @@ tabs: - name: Logs icon: file-text widgets: - # Log Stats + # Log Stats - Enhanced with timeseries - type: group - title: Log Overview + title: Log Metrics layout: { w: 12, h: 4 } children: - type: 'timeseries_stat' title: 'Total Logs' icon: file-text query: | - body != null + body != null and ("{{var-service}}" == "" or resource.service.name == "{{var-service}}") | summarize count() by bin_auto(timestamp) | order by timestamp desc | limit 200 unit: logs layout: { w: 3, h: 2 } - - type: 'stat' + - type: 'timeseries_stat' title: 'Error Logs' icon: alert-triangle query: | - level in ("ERROR", "FATAL") - | summarize count() - unit: logs + level in ("ERROR", "FATAL") and ("{{var-service}}" == "" or resource.service.name == "{{var-service}}") + | summarize count() by bin_auto(timestamp) + | order by timestamp desc + | limit 200 + unit: errors + alert_threshold: 100 + warning_threshold: 50 layout: { w: 3, h: 2 } - - type: 'stat' - title: 'Warn Logs' + - type: 'timeseries_stat' + title: 'Warning Logs' icon: alert-circle query: | - level == "WARN" - | summarize count() - unit: logs + level == "WARN" and ("{{var-service}}" == "" or resource.service.name == "{{var-service}}") + | summarize count() by bin_auto(timestamp) + | order by timestamp desc + | limit 200 + unit: warns layout: { w: 3, h: 2 } - type: 'stat' title: 'Log Sources' icon: server query: | - resource.service.name != null + body != null and resource.service.name != null | summarize dcount(resource.service.name) unit: services layout: { w: 3, h: 2 } @@ -704,38 +749,114 @@ tabs: | limit 10000 layout: { w: 6, h: 4 } - # Error Pattern Analysis + # Error Log Trends by Service - type: 'timeseries' - title: 'Error Log Trends' + title: 'Error Logs by Service' theme: 'roma' sql: | WITH service_stats AS ( - SELECT - resource___service___name, - COUNT(*) as error_count + SELECT resource___service___name, COUNT(*) as error_count FROM otel_logs_and_spans - WHERE project_id='{{project_id}}' - AND level IN ('ERROR', 'FATAL') + WHERE project_id='{{project_id}}' AND level IN ('ERROR', 'FATAL') AND ('{{var-service}}' = '' OR resource___service___name = '{{var-service}}') {{time_filter}} GROUP BY resource___service___name ORDER BY error_count DESC LIMIT 10 ) - SELECT + SELECT extract(epoch from time_bucket('{{rollup_interval}}', timestamp))::integer AS time, o.resource___service___name as service_name, COUNT(*)::float as count FROM otel_logs_and_spans o INNER JOIN service_stats s ON o.resource___service___name = s.resource___service___name - WHERE o.project_id='{{project_id}}' - AND o.level IN ('ERROR', 'FATAL') + WHERE o.project_id='{{project_id}}' AND o.level IN ('ERROR', 'FATAL') AND ('{{var-service}}' = '' OR o.resource___service___name = '{{var-service}}') {{time_filter}} GROUP BY time, service_name ORDER BY time DESC layout: { w: 6, h: 4 } + # Top Error Patterns - NEW: Grouped error analysis + - type: table + title: 'Top Error Patterns' + description: 'Most frequent error messages grouped by pattern. Focus on high-count errors first.' + layout: { w: 6, h: 6 } + columns: + - field: error_pattern + title: Error Pattern + - field: count + title: Count + columnType: number + progress: column_percent + - field: service + title: Service + - field: last_seen + title: Last Seen + sql: | + SELECT + LEFT(COALESCE( + attributes___exception___message, + attributes___exception___type, + body::text, + status_message + ), 80) as error_pattern, + COUNT(*)::text as count, + resource___service___name as service, + MAX(timestamp)::text as last_seen + FROM otel_logs_and_spans + WHERE project_id='{{project_id}}' + AND (level IN ('ERROR', 'FATAL') OR status_code = 'ERROR') + AND ('{{var-service}}' = '' OR resource___service___name = '{{var-service}}') + {{time_filter}} + GROUP BY error_pattern, service + ORDER BY COUNT(*) DESC + LIMIT 20 + + # Logs by Service Distribution + - type: 'tree_map' + title: 'Logs by Service' + layout: { w: 6, h: 6 } + sql: | + SELECT + COALESCE(resource___service___name, 'Unknown') as service, + COUNT(*)::float as count + FROM otel_logs_and_spans + WHERE project_id='{{project_id}}' + AND body IS NOT NULL + AND ('{{var-service}}' = '' OR resource___service___name = '{{var-service}}') + {{time_filter}} + GROUP BY service + ORDER BY count DESC + LIMIT 20 + + # Recent Error Logs with Trace Links + - type: table + title: 'Recent Errors' + layout: { w: 12, h: 5 } + columns: + - field: timestamp + title: Time + - field: service + title: Service + - field: error_message + title: Error + - field: trace_id + title: Trace ID + sql: | + SELECT + timestamp::text, + resource___service___name as service, + LEFT(COALESCE(attributes___exception___message, body::text, status_message), 100) as error_message, + context___trace_id as trace_id + FROM otel_logs_and_spans + WHERE project_id='{{project_id}}' + AND (level IN ('ERROR', 'FATAL') OR status_code = 'ERROR') + AND ('{{var-service}}' = '' OR resource___service___name = '{{var-service}}') + {{time_filter}} + ORDER BY timestamp DESC + LIMIT 50 + # Log Stream - type: logs title: 'Log Stream' @@ -754,42 +875,138 @@ tabs: - name: Databases icon: database widgets: - # Database Overview Table + # Database Performance Metrics - Enhanced + - type: group + title: Database Performance + layout: { w: 12, h: 4 } + children: + - type: 'timeseries_stat' + title: 'Total Queries' + icon: database + query: | + attributes.db.system.name != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") + | summarize count() by bin_auto(timestamp) + | order by timestamp desc + | limit 200 + unit: queries + layout: { w: 3, h: 2 } + + - type: 'timeseries_stat' + title: 'Query Error Rate' + icon: alert-triangle + query: | + attributes.db.system.name != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") + | summarize round(countif(status_code == "ERROR") * 100.0 / count(), 2) by bin_auto(timestamp) + | order by timestamp desc + | limit 200 + unit: '%' + alert_threshold: 5 + warning_threshold: 1 + layout: { w: 3, h: 2 } + + - type: 'timeseries_stat' + title: 'P95 Query Time' + icon: clock + query: | + attributes.db.system.name != null and duration != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") + | summarize p95(duration) / 1000000 by bin_auto(timestamp) + | order by timestamp desc + | limit 200 + unit: ms + layout: { w: 3, h: 2 } + + - type: 'stat' + title: 'Slow Queries (>100ms)' + description: 'Database queries taking over 100ms. High counts may indicate missing indexes or inefficient queries.' + icon: turtle + sql: | + SELECT COUNT(*)::text + FROM otel_logs_and_spans + WHERE project_id='{{project_id}}' + AND attributes___db___system___name IS NOT NULL + AND duration > 100000000 + AND ('{{var-database}}' = '' OR attributes___db___system___name = '{{var-database}}') + {{time_filter}} + unit: queries + layout: { w: 3, h: 2 } + + # TOP: Query Optimization Targets - Sorted by TOTAL TIME spent - type: table - title: Database Systems - layout: { w: 12, h: 5 } + title: 'Query Optimization Targets (by Total Time)' + description: 'Queries sorted by total time spent. Optimizing these yields the biggest performance gains.' + layout: { w: 12, h: 7 } + columns: + - field: query_pattern + title: Query Pattern + - field: count + title: Count + columnType: number + progress: column_percent + - field: avg_ms + title: Avg + columnType: duration + unit: ms + - field: p95_ms + title: P95 + columnType: duration + unit: ms + - field: total_time + title: Total Time + columnType: duration + unit: s + progress: column_percent + - field: error_rate + title: Errors + unit: '%' + progress: value_percent + progress_variant: error + sql: | + SELECT + LEFT(COALESCE(attributes___db___query___summary, attributes___db___query___text, name), 80) as query_pattern, + COUNT(*)::text as count, + ROUND((AVG(duration) / 1e6)::numeric, 2)::text as avg_ms, + ROUND((approx_percentile(0.95, percentile_agg(duration)) / 1e6)::numeric, 2)::text as p95_ms, + ROUND((SUM(duration) / 1e9)::numeric, 2)::text as total_time, + ROUND((COUNT(*) FILTER (WHERE status_code = 'ERROR') * 100.0 / GREATEST(1, COUNT(*)))::numeric, 2)::text as error_rate + FROM otel_logs_and_spans + WHERE project_id='{{project_id}}' + AND attributes___db___system___name IS NOT NULL + AND duration IS NOT NULL + AND ('{{var-database}}' = '' OR attributes___db___system___name = '{{var-database}}') + {{time_filter}} + GROUP BY query_pattern + ORDER BY SUM(duration) DESC + LIMIT 30 + + # Database Systems Overview + - type: table + title: 'Database Systems' + layout: { w: 6, h: 5 } on_row_click: set_variable: database value: '{{row.db_system}}' columns: - field: db_system - title: Database Type + title: Database - field: operations - title: Operations + title: Count columnType: number progress: column_percent - field: avg_duration - title: Avg Duration + title: Avg columnType: duration unit: ms - progress: column_percent - field: error_rate - title: Error Rate + title: Errors unit: '%' progress: value_percent progress_variant: error - - field: throughput - title: Throughput - columnType: number - unit: ops/min - progress: column_percent sql: | SELECT attributes___db___system___name as db_system, COUNT(*)::text as operations, ROUND((AVG(duration) / 1e6)::numeric, 2)::text as avg_duration, - ROUND((COUNT(*) FILTER (WHERE status_code = 'ERROR') * 100.0 / GREATEST(1, COUNT(*)))::numeric, 2)::text as error_rate, - ROUND((COUNT(*)::float / GREATEST(1, EXTRACT(EPOCH FROM (MAX(timestamp) - MIN(timestamp))) / 60))::numeric, 2)::text as throughput + ROUND((COUNT(*) FILTER (WHERE status_code = 'ERROR') * 100.0 / GREATEST(1, COUNT(*)))::numeric, 2)::text as error_rate FROM otel_logs_and_spans WHERE project_id='{{project_id}}' AND attributes___db___system___name IS NOT NULL @@ -798,62 +1015,53 @@ tabs: GROUP BY db_system ORDER BY COUNT(*) DESC - # Database Performance Metrics - - type: group - title: Database Metrics - layout: { w: 12, h: 4 } - children: - - type: 'timeseries_stat' - title: 'Total Queries' - icon: database - query: | - attributes.db.system.name != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") - | summarize count() by bin_auto(timestamp) - | order by timestamp desc - | limit 200 - unit: queries - layout: { w: 3, h: 2 } - - - type: 'stat' - title: 'Query Error Rate' - icon: bug - query: | - attributes.db.system.name != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") - | summarize round(countif(status_code == "ERROR") * 100.0 / count(), 2) - unit: '%' - layout: { w: 3, h: 2 } - - - type: 'stat' - title: 'Avg Query Time' - icon: activity - query: | - attributes.db.system.name != null and duration != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") - | summarize round(avg(duration) / 1e6, 2) + # Queries by Operation Type + - type: table + title: 'Queries by Operation Type' + layout: { w: 6, h: 5 } + columns: + - field: operation + title: Operation + - field: count + title: Count + columnType: number + progress: column_percent + - field: avg_ms + title: Avg + columnType: duration unit: ms - layout: { w: 3, h: 2 } - - - type: 'stat' - title: 'DB Operations' - icon: layers - query: | - attributes.db.system.name != null and coalesce(attributes.db.operation.name, name) != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") - | summarize dcount(coalesce(attributes.db.operation.name, name)) - unit: types - layout: { w: 3, h: 2 } + - field: error_rate + title: Errors + unit: '%' + progress: value_percent + progress_variant: error + sql: | + SELECT + UPPER(COALESCE(attributes___db___operation___name, SPLIT_PART(name, ' ', 1), 'UNKNOWN')) as operation, + COUNT(*)::text as count, + ROUND((AVG(duration) / 1e6)::numeric, 2)::text as avg_ms, + ROUND((COUNT(*) FILTER (WHERE status_code = 'ERROR') * 100.0 / GREATEST(1, COUNT(*)))::numeric, 2)::text as error_rate + FROM otel_logs_and_spans + WHERE project_id='{{project_id}}' + AND attributes___db___system___name IS NOT NULL + AND duration IS NOT NULL + AND ('{{var-database}}' = '' OR attributes___db___system___name = '{{var-database}}') + {{time_filter}} + GROUP BY operation + ORDER BY COUNT(*) DESC + LIMIT 10 - # Query Performance by Operation - - type: 'timeseries' - title: 'Query Performance by Operation' + # Query Latency Over Time + - type: 'timeseries_line' + title: 'Query Latency Percentiles' query: | - attributes.db.system.name != null and duration != null and coalesce(attributes.db.operation.name, name) != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") - | extend operation = coalesce(attributes.db.operation.name, name) - | summarize avg(duration) by bin_auto(timestamp), operation + attributes.db.system.name != null and duration != null and ("{{var-database}}" == "" or attributes.db.system.name == "{{var-database}}") + | summarize percentiles(duration, 50, 95, 99) by bin_auto(timestamp) | order by timestamp desc - | limit 10000 unit: ns layout: { w: 6, h: 4 } - # Query Volume + # Query Volume Over Time - type: 'timeseries' title: 'Query Volume by Database' query: | @@ -864,17 +1072,15 @@ tabs: unit: queries layout: { w: 6, h: 4 } - # Slow Queries + # Recent Slow Queries (individual) - type: table - title: 'Slowest Queries' - layout: { w: 12, h: 6 } + title: 'Recent Slow Queries (>50ms)' + layout: { w: 12, h: 5 } columns: + - field: timestamp + title: Time - field: query title: Query - - field: operation - title: Operation - - field: database - title: Database - field: duration_ms title: Duration columnType: duration @@ -882,20 +1088,23 @@ tabs: progress: column_percent - field: service title: Service + - field: database + title: DB sql: | SELECT - COALESCE(attributes___db___query___text, attributes___db___query___summary, name) as query, - COALESCE(attributes___db___operation___name, name) as operation, - attributes___db___system___name as database, + timestamp::text, + LEFT(COALESCE(attributes___db___query___text, attributes___db___query___summary, name), 80) as query, ROUND((duration / 1e6)::numeric, 2) as duration_ms, - resource___service___name as service + resource___service___name as service, + attributes___db___system___name as database FROM otel_logs_and_spans WHERE project_id='{{project_id}}' - AND (attributes___db___query___text IS NOT NULL OR attributes___db___query___summary IS NOT NULL OR name IS NOT NULL) + AND attributes___db___system___name IS NOT NULL + AND duration > 50000000 AND ('{{var-database}}' = '' OR attributes___db___system___name = '{{var-database}}') {{time_filter}} - ORDER BY duration DESC - LIMIT 50 + ORDER BY timestamp DESC + LIMIT 30 # Database Errors - type: 'timeseries' @@ -917,3 +1126,233 @@ tabs: | order by timestamp desc | limit 10000 layout: { w: 6, h: 4 } + + - name: Infra + icon: server + widgets: + # Infrastructure Overview Stats + - type: group + title: Resource Utilization + layout: { w: 12, h: 4 } + children: + - type: 'stat' + title: 'Avg CPU Utilization' + icon: cpu + sql: | + SELECT ROUND(AVG((metric_value->'contents'->>'value')::numeric * 100), 1)::text || '%' + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name IN ('container.cpu.utilization', 'process.cpu.utilization', 'jvm.cpu.recent_utilization') + {{time_filter}} + layout: { w: 3, h: 2 } + + - type: 'stat' + title: 'Avg Memory Usage' + icon: hard-drive + sql: | + SELECT ROUND(AVG((metric_value->'contents'->>'value')::numeric), 1)::text || '%' + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name = 'container.memory.percent' + {{time_filter}} + layout: { w: 3, h: 2 } + + - type: 'stat' + title: 'Container Count' + icon: box + sql: | + SELECT COUNT(DISTINCT resource->'container'->>'name')::text + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND resource->'container'->>'name' IS NOT NULL + {{time_filter}} + layout: { w: 3, h: 2 } + + - type: 'stat' + title: 'Services with Metrics' + icon: layers + sql: | + SELECT COUNT(DISTINCT resource->'service'->>'name')::text + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND resource->'service'->>'name' IS NOT NULL + {{time_filter}} + layout: { w: 3, h: 2 } + + # CPU Utilization Over Time + - type: 'timeseries' + title: 'CPU Utilization by Service' + layout: { w: 6, h: 5 } + sql: | + SELECT + time_bucket('1 minute', timestamp) as timestamp, + COALESCE(resource->'service'->>'name', resource->'container'->>'name', 'unknown') as series, + ROUND(AVG((metric_value->'contents'->>'value')::numeric * 100)::numeric, 2) as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name IN ('container.cpu.utilization', 'jvm.cpu.recent_utilization', 'process.cpu.utilization') + {{time_filter}} + GROUP BY time_bucket('1 minute', timestamp), series + ORDER BY timestamp DESC + LIMIT 2000 + unit: '%' + + # Memory Usage Over Time + - type: 'timeseries' + title: 'Memory Usage by Service' + layout: { w: 6, h: 5 } + sql: | + SELECT + time_bucket('1 minute', timestamp) as timestamp, + COALESCE(resource->'service'->>'name', resource->'container'->>'name', 'unknown') as series, + ROUND(AVG((metric_value->'contents'->>'value')::numeric)::numeric, 2) as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name = 'container.memory.percent' + {{time_filter}} + GROUP BY time_bucket('1 minute', timestamp), series + ORDER BY timestamp DESC + LIMIT 2000 + unit: '%' + + # Container/Service Resource Table + - type: table + title: 'Service Resource Usage' + layout: { w: 12, h: 6 } + columns: + - field: service + title: Service/Container + - field: cpu_avg + title: CPU Avg + unit: '%' + progress: value_percent + - field: cpu_max + title: CPU Max + unit: '%' + progress: value_percent + progress_variant: warning + - field: mem_avg + title: Mem Avg + unit: '%' + progress: value_percent + - field: mem_max + title: Mem Max + unit: '%' + progress: value_percent + progress_variant: warning + - field: data_points + title: Samples + columnType: number + sql: | + WITH cpu_metrics AS ( + SELECT + COALESCE(resource->'service'->>'name', resource->'container'->>'name', 'unknown') as service, + (metric_value->'contents'->>'value')::numeric * 100 as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name IN ('container.cpu.utilization', 'jvm.cpu.recent_utilization', 'process.cpu.utilization') + {{time_filter}} + ), + mem_metrics AS ( + SELECT + COALESCE(resource->'service'->>'name', resource->'container'->>'name', 'unknown') as service, + (metric_value->'contents'->>'value')::numeric as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name = 'container.memory.percent' + {{time_filter}} + ) + SELECT + COALESCE(c.service, m.service) as service, + ROUND(AVG(c.value)::numeric, 1)::text as cpu_avg, + ROUND(MAX(c.value)::numeric, 1)::text as cpu_max, + ROUND(AVG(m.value)::numeric, 1)::text as mem_avg, + ROUND(MAX(m.value)::numeric, 1)::text as mem_max, + (COUNT(c.*) + COUNT(m.*))::text as data_points + FROM cpu_metrics c + FULL OUTER JOIN mem_metrics m ON c.service = m.service + GROUP BY COALESCE(c.service, m.service) + ORDER BY AVG(c.value) DESC NULLS LAST + LIMIT 20 + + # Kafka Consumer Metrics (if available) + - type: 'timeseries' + title: 'Kafka Consumer Lag' + layout: { w: 6, h: 4 } + sql: | + SELECT + time_bucket('1 minute', timestamp) as timestamp, + COALESCE(resource->'service'->>'name', 'kafka') as series, + AVG((metric_value->'contents'->>'value')::numeric) as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name = 'kafka.consumer.assigned_partitions' + {{time_filter}} + GROUP BY time_bucket('1 minute', timestamp), series + ORDER BY timestamp DESC + LIMIT 1000 + unit: partitions + + # System Load Average + - type: 'timeseries' + title: 'System Load Average' + layout: { w: 6, h: 4 } + sql: | + SELECT + time_bucket('1 minute', timestamp) as timestamp, + CASE + WHEN metric_name = 'system.cpu.load_average.1m' THEN '1 min' + WHEN metric_name = 'system.cpu.load_average.5m' THEN '5 min' + WHEN metric_name = 'system.cpu.load_average.15m' THEN '15 min' + END as series, + AVG((metric_value->'contents'->>'value')::numeric) as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name IN ('system.cpu.load_average.1m', 'system.cpu.load_average.5m', 'system.cpu.load_average.15m') + {{time_filter}} + GROUP BY time_bucket('1 minute', timestamp), metric_name + ORDER BY timestamp DESC + LIMIT 3000 + + # Redis Memory (if available) + - type: 'timeseries' + title: 'Redis Memory Usage' + layout: { w: 6, h: 4 } + sql: | + SELECT + time_bucket('1 minute', timestamp) as timestamp, + metric_name as series, + AVG((metric_value->'contents'->>'value')::numeric) / 1048576 as value + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + AND metric_name IN ('redis.memory.used', 'redis.memory.rss', 'redis.memory.peak') + {{time_filter}} + GROUP BY time_bucket('1 minute', timestamp), metric_name + ORDER BY timestamp DESC + LIMIT 3000 + unit: MB + + # All Available Metrics List + - type: table + title: 'Available Metrics' + layout: { w: 6, h: 4 } + columns: + - field: metric_name + title: Metric + - field: data_points + title: Data Points + columnType: number + progress: column_percent + - field: unit + title: Unit + sql: | + SELECT + metric_name, + COUNT(*)::text as data_points, + COALESCE(MAX(metric_unit), '-') as unit + FROM telemetry.metrics + WHERE project_id='{{project_id}}' + {{time_filter}} + GROUP BY metric_name + ORDER BY COUNT(*) DESC + LIMIT 20 diff --git a/test/doctests/Main.hs b/test/doctests/Main.hs index 79a9fe6e4..302d2a756 100644 --- a/test/doctests/Main.hs +++ b/test/doctests/Main.hs @@ -6,10 +6,11 @@ import Test.DocTest (doctest) main :: IO () main = do args <- getArgs - let extensions = + let extensions = [ "-XGHC2021" , "-XBlockArguments" , "-XDataKinds" + , "-XMultilineStrings" , "-XDerivingVia" , "-XDeriveAnyClass" , "-XDerivingStrategies" diff --git a/test/integration/Pages/BusinessFlowsSpec.hs b/test/integration/Pages/BusinessFlowsSpec.hs index d3f6e9729..e7ad6a03e 100644 --- a/test/integration/Pages/BusinessFlowsSpec.hs +++ b/test/integration/Pages/BusinessFlowsSpec.hs @@ -205,7 +205,7 @@ settingsTests = do it "should load manage members page" \TestContext{tcResources = tr, tcProjectId = testPid} -> do result <- testServant tr $ ManageMembers.manageMembersGetH testPid case result of - (_, ManageMembers.ManageMembersGet (PageCtx _ (pid, members))) -> do + (_, ManageMembers.ManageMembersGet (PageCtx _ (_, members, _))) -> do V.length members `shouldSatisfy` (>= 0) _ -> fail "Expected ManageMembersGet response" diff --git a/test/integration/Pages/GitSyncSpec.hs b/test/integration/Pages/GitSyncSpec.hs index 3841c226d..6b47b7b76 100644 --- a/test/integration/Pages/GitSyncSpec.hs +++ b/test/integration/Pages/GitSyncSpec.hs @@ -2,6 +2,7 @@ module Pages.GitSyncSpec (spec) where import BackgroundJobs qualified import Control.Lens ((.~), (^.), (^?)) +import Data.Generics.Product.Fields (field) import Data.Aeson qualified as AE import Data.Aeson.Lens (key, _String) import Data.ByteString qualified as BS @@ -189,7 +190,7 @@ createDash tr title tags = do , Dashboards.updatedAt = now , Dashboards.createdBy = Users.UserId UUID.nil , Dashboards.baseTemplate = Nothing - , Dashboards.schema = Just (def :: Dashboards.Dashboard){Dashboards.title = Just title, Dashboards.tags = Just tags} + , Dashboards.schema = Just $ (def :: Dashboards.Dashboard) & field @"title" .~ Just title & field @"tags" .~ Just tags , Dashboards.starredSince = Nothing , Dashboards.homepageSince = Nothing , Dashboards.tags = V.fromList tags @@ -286,7 +287,7 @@ spec = do Right s -> (s.title, s.tags) `shouldBe` (Just "Test", Just ["prod"]) it "serializes dashboard to YAML" \_ -> do - let schema = (def :: Dashboards.Dashboard){Dashboards.title = Just "My Dash", Dashboards.tags = Just ["a", "b"]} + let schema = (def :: Dashboards.Dashboard) & field @"title" .~ Just "My Dash" & field @"tags" .~ Just (["a", "b"] :: [Text]) case GitSync.yamlToDashboard (GitSync.dashboardToYaml schema) of Left e -> fail $ toString e Right s -> s.title `shouldBe` Just "My Dash" diff --git a/test/integration/Pages/Projects/ManageMembersSpec.hs b/test/integration/Pages/Projects/ManageMembersSpec.hs index 869db0a1e..a27f85245 100644 --- a/test/integration/Pages/Projects/ManageMembersSpec.hs +++ b/test/integration/Pages/Projects/ManageMembersSpec.hs @@ -35,6 +35,8 @@ spec = aroundAll withTestResources do it "Create member" \tr -> do -- Update project to a paid plan to allow multiple members _ <- withPool tr.trPool $ PGT.execute [sql|UPDATE projects.projects SET payment_plan = 'PAID' WHERE id = ?|] (Only testPid) + -- Clean up any existing test members from other tests (keep only the original test user) + _ <- withPool tr.trPool $ PGT.execute [sql|DELETE FROM projects.project_members WHERE project_id = ? AND user_id != '00000000-0000-0000-0000-000000000001'|] (Only testPid) pass let member = ManageMembers.ManageMembersForm @@ -45,7 +47,7 @@ spec = aroundAll withTestResources do testServant tr $ ManageMembers.manageMembersPostH testPid Nothing member -- Check if the response contains the newly added member case pg of - ManageMembers.ManageMembersPost (pid, p) -> do + ManageMembers.ManageMembersPost (_, p, _) -> do "example@gmail.com" `shouldSatisfy` (`elem` (p <&> (.email))) _ -> fail "Expected ManageMembersPost response" @@ -60,7 +62,7 @@ spec = aroundAll withTestResources do -- Check if the member's permission is updated case pg of - ManageMembers.ManageMembersPost (pid, projMembers) -> do + ManageMembers.ManageMembersPost (_, projMembers, _) -> do let memberM = projMembers & V.toList & find (\pm -> pm.email == "example@gmail.com") isJust memberM `shouldBe` True let mem = memberM & Unsafe.fromJust @@ -75,7 +77,7 @@ spec = aroundAll withTestResources do -- Check if the response contains the expected members -- Note: 2 members expected - the test user from setup + example@gmail.com case pg of - ManageMembers.ManageMembersGet (PageCtx _ (pid, projMembers)) -> do + ManageMembers.ManageMembersGet (PageCtx _ (_, projMembers, _)) -> do let emails = (.email) <$> projMembers "example@gmail.com" `shouldSatisfy` (`elem` emails) length projMembers `shouldBe` 2 @@ -92,7 +94,7 @@ spec = aroundAll withTestResources do -- Check if the member is deleted case pg of - ManageMembers.ManageMembersPost (pid, projMembers) -> do + ManageMembers.ManageMembersPost (_, projMembers, _) -> do let emails = (.email) <$> projMembers "example@gmail.com" `shouldNotSatisfy` (`elem` emails) _ -> fail "Expected ManageMembersPost response" @@ -107,7 +109,7 @@ spec = aroundAll withTestResources do testServant tr $ ManageMembers.manageMembersPostH testPid Nothing member -- Check if the response contains the newly added member case pg of - ManageMembers.ManageMembersPost (pid, p) -> do + ManageMembers.ManageMembersPost (_, p, _) -> do "example@gmail.com" `shouldSatisfy` (`elem` (p <&> (.email))) _ -> fail "Expected ManageMembersPost response" diff --git a/test/unit/Pkg/ParserSpec.hs b/test/unit/Pkg/ParserSpec.hs index fea71d476..fa4d8fcd8 100644 --- a/test/unit/Pkg/ParserSpec.hs +++ b/test/unit/Pkg/ParserSpec.hs @@ -91,7 +91,7 @@ SELECT extract(epoch from time_bucket('1 days', timestamp))::integer, 'value', c let (query, _) = fromRight' $ parseQueryToComponents (defSqlQueryCfg defPid fixedUTCTime Nothing Nothing) "| summarize count() / 5.0 by bin_auto(timestamp)" let expected = [text| - SELECT extract(epoch from time_bucket('6 hours', timestamp))::integer, ((count(*)::float / 5.0))::float AS arith_, count(*) OVER() as _total_count FROM otel_logs_and_spans WHERE project_id='00000000-0000-0000-0000-000000000000' and (TRUE) GROUP BY time_bucket('6 hours', timestamp) ORDER BY time_bucket('6 hours', timestamp) DESC |] + SELECT extract(epoch from time_bucket('6 hours', timestamp))::integer, (COALESCE((count(*)::float / NULLIF(5.0, 0)), 0))::float AS arith_, count(*) OVER() as _total_count FROM otel_logs_and_spans WHERE project_id='00000000-0000-0000-0000-000000000000' and (TRUE) GROUP BY time_bucket('6 hours', timestamp) ORDER BY time_bucket('6 hours', timestamp) DESC |] normT query `shouldBe` normT expected it "query a metric" do diff --git a/web-components/src/main.ts b/web-components/src/main.ts index 5bf8f97ad..85816f988 100644 --- a/web-components/src/main.ts +++ b/web-components/src/main.ts @@ -258,9 +258,13 @@ window.createTagify = (selectorOrElement: string | Element, options: any = {}) = maxItems: 50, fuzzySearch: true, position: 'input', + place: 'parent', caseSensitive: false, mapValueTo: 'name', searchKeys: ['value', 'name'], + // appendTarget: function () { + // return this.DOM.scope; + // }, }, }; const element = typeof selectorOrElement === 'string' ? document.querySelector(selectorOrElement) : selectorOrElement; From cbb6a64d77e8849b2867b96cf5d2141cc2ecac6c Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Fri, 16 Jan 2026 23:49:32 +0000 Subject: [PATCH 60/71] fix endpoint error, volumne and latency detections --- src/BackgroundJobs.hs | 12 +++++------- src/Models/Apis/Endpoints.hs | 21 +++++++++++---------- src/Pages/Anomalies.hs | 4 ++-- 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 512a0bbee..4446f47ab 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1556,7 +1556,7 @@ detectErrorSpikes pid authCtx = do (BSEstablished, Just mean, Just stddev) | stddev > 0 -> do let currentRate = fromIntegral errRate.currentHourCount :: Double zScore = (currentRate - mean) / stddev - isSpike = zScore > 3.0 && currentRate > mean + 5 + isSpike = abs zScore > 3.0 && currentRate > mean + 5 Relude.when isSpike $ do Log.logInfo "Error spike detected" (errRate.errorId, errRate.errorType, currentRate, mean, zScore) @@ -1676,7 +1676,7 @@ detectLogPatternSpikes pid authCtx = do let currentRate = fromIntegral lpRate.currentHourCount :: Double zScore = (currentRate - mean) / stddev -- Spike detection: >3 std devs AND at least 10 more events than baseline - isSpike = zScore > 3.0 && currentRate > mean + 10 + isSpike = abs zScore > 3.0 && currentRate > mean + 10 Relude.when isSpike $ do Log.logInfo "Log pattern spike detected" (lpRate.patternId, lpRate.logPattern, currentRate, mean, zScore) @@ -1749,16 +1749,14 @@ calculateEndpointBaselines pid = do detectEndpointLatencyDegradation :: Projects.ProjectId -> Config.AuthContext -> ATBackgroundCtx () detectEndpointLatencyDegradation pid authCtx = do Log.logInfo "Detecting endpoint latency degradation" pid - endpointsWithRates <- Endpoints.getEndpointsWithCurrentRates pid - forM_ endpointsWithRates \epRate -> do -- Check P95 latency degradation case (epRate.baselineLatencyP95, epRate.baselineLatencyStddev, epRate.currentHourLatencyP95) of (Just baselineP95, Just stddev, Just currentP95) | stddev > 0 -> do let zScore = (currentP95 - baselineP95) / stddev - -- Degradation: >3 std devs AND at least 50% increase - isDegraded = zScore > 3.0 && currentP95 > baselineP95 * 1.5 + -- Degradation: >3 std devs (positive or negative) AND at least 50% increase + isDegraded = abs zScore > 3.0 && currentP95 > baselineP95 * 1.1 Relude.when isDegraded $ do Log.logInfo "Endpoint latency degradation detected" (epRate.endpointHash, currentP95, baselineP95, zScore) issue <- liftIO $ Issues.createEndpointLatencyDegradationIssue pid epRate.endpointHash epRate.method epRate.urlPath (Just epRate.host) currentP95 baselineP95 stddev "p95" V.empty @@ -1783,7 +1781,7 @@ detectEndpointErrorRateSpike pid authCtx = do let currentErrorRate = fromIntegral epRate.currentHourErrors / fromIntegral epRate.currentHourRequests zScore = (currentErrorRate - baselineMean) / stddev -- Spike: >3 std devs AND error rate > 5% AND at least 5 errors - isSpike = zScore > 3.0 && currentErrorRate > 0.05 && epRate.currentHourErrors >= 5 + isSpike = abs zScore > 3.0 && currentErrorRate > 0.05 && epRate.currentHourErrors >= 5 Relude.when isSpike $ do Log.logInfo "Endpoint error rate spike detected" (epRate.endpointHash, currentErrorRate, baselineMean, zScore) diff --git a/src/Models/Apis/Endpoints.hs b/src/Models/Apis/Endpoints.hs index 2a23b750d..a2d8a373b 100644 --- a/src/Models/Apis/Endpoints.hs +++ b/src/Models/Apis/Endpoints.hs @@ -312,7 +312,7 @@ getEndpointsWithCurrentRates pid = PG.query q (pid, pid) attributes->'http'->>'route' AS url_path, attributes___http___request___method AS method, COUNT(*) AS request_count, - COUNT(*) FILTER (WHERE status_code >= 500 OR status_code = 0) AS error_count, + COUNT(*) FILTER (WHERE attributes___http___response___status_code::int >= 500 OR attributes___http___response___status_code::int = 0) AS error_count, PERCENTILE_CONT(0.50) WITHIN GROUP (ORDER BY duration) AS p50_latency, PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY duration) AS p95_latency, PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY duration) AS p99_latency @@ -360,7 +360,7 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo SELECT DATE_TRUNC('hour', timestamp) AS hour, COUNT(*) AS request_count, - COUNT(*) FILTER (WHERE status_code >= 500 OR status_code = 0) AS error_count, + COUNT(*) FILTER (WHERE attributes___http___response___status_code::int >= 500 OR attributes___http___response___status_code::int = 0) AS error_count, AVG(duration) AS avg_latency, PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY duration) AS p95_latency, PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY duration) AS p99_latency @@ -368,20 +368,21 @@ getEndpointStats pid endpointHash hours = listToMaybe <$> PG.query q (pid, endpo JOIN apis.endpoints e ON e.url_path = (ols.attributes->'http'->>'route') AND e.method = ols.attributes___http___request___method WHERE ols.project_id = ? + AND e.hash = ? AND ols.name = 'monoscope.http' AND ols.timestamp >= NOW() - MAKE_INTERVAL(hours => ?) GROUP BY DATE_TRUNC('hour', timestamp) ) SELECT COUNT(*)::int AS total_hours, - COALESCE(AVG(request_count), 0) AS hourly_mean_requests, - COALESCE(STDDEV(request_count), 0) AS hourly_stddev_requests, - COALESCE(AVG(error_count::float / NULLIF(request_count, 0)), 0) AS hourly_mean_errors, - COALESCE(STDDEV(error_count::float / NULLIF(request_count, 0)), 0) AS hourly_stddev_errors, - COALESCE(AVG(avg_latency), 0) AS mean_latency, - COALESCE(STDDEV(avg_latency), 0) AS stddev_latency, - COALESCE(AVG(p95_latency), 0) AS p95_latency, - COALESCE(AVG(p99_latency), 0) AS p99_latency + COALESCE(AVG(request_count), 0)::float8 AS hourly_mean_requests, + COALESCE(STDDEV(request_count), 0)::float8 AS hourly_stddev_requests, + COALESCE(AVG(error_count::float / NULLIF(request_count, 0)), 0)::float8 AS hourly_mean_errors, + COALESCE(STDDEV(error_count::float / NULLIF(request_count, 0)), 0)::float8 AS hourly_stddev_errors, + COALESCE(AVG(avg_latency), 0)::float8 AS mean_latency, + COALESCE(STDDEV(avg_latency), 0)::float8 AS stddev_latency, + COALESCE(AVG(p95_latency), 0)::float8 AS p95_latency, + COALESCE(AVG(p99_latency), 0)::float8 AS p99_latency FROM hourly_stats HAVING COUNT(*) > 0 |] diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 084d190d6..b8c83f712 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -78,7 +78,7 @@ import System.Config (AuthContext (..), EnvConfig (..)) import System.Types (ATAuthCtx, RespHeaders, addErrorToast, addRespHeaders, addSuccessToast) import Text.MMark qualified as MMark import Text.Time.Pretty (prettyTimeAuto) -import Utils (changeTypeFillColor, checkFreeTierExceeded, escapedQueryPartial, faSprite_, formatUTC, lookupValueText, methodFillColor, statusFillColor, toUriStr) +import Utils (changeTypeFillColor, checkFreeTierExceeded, getDurationNSMS, escapedQueryPartial, faSprite_, formatUTC, lookupValueText, methodFillColor, statusFillColor, toUriStr) import Web.FormUrlEncoded (FromForm) @@ -1347,7 +1347,7 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue div_ [class_ "flex items-center gap-2 text-sm"] do span_ [class_ "font-medium text-fillWarning-strong"] $ toHtml d.endpointMethod span_ [class_ "text-fillWarning-strong"] $ toHtml d.endpointPath - div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ d.percentile <> ": " <> show (round d.baselineLatencyMs :: Int) <> "ms → " <> show (round d.currentLatencyMs :: Int) <> "ms" + div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml (d.percentile <> ": " <> toText (getDurationNSMS (round d.baselineLatencyMs)) <> " → " <> toText (getDurationNSMS (round d.currentLatencyMs))) _ -> pass Issues.EndpointErrorRateSpike -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> From 37012835928915dab93bc4bd28292b1d8f2e48c4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 16 Jan 2026 23:50:27 +0000 Subject: [PATCH 61/71] Auto-format code with fourmolu --- src/Pages/Anomalies.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index b8c83f712..ed7ff0242 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -78,7 +78,7 @@ import System.Config (AuthContext (..), EnvConfig (..)) import System.Types (ATAuthCtx, RespHeaders, addErrorToast, addRespHeaders, addSuccessToast) import Text.MMark qualified as MMark import Text.Time.Pretty (prettyTimeAuto) -import Utils (changeTypeFillColor, checkFreeTierExceeded, getDurationNSMS, escapedQueryPartial, faSprite_, formatUTC, lookupValueText, methodFillColor, statusFillColor, toUriStr) +import Utils (changeTypeFillColor, checkFreeTierExceeded, escapedQueryPartial, faSprite_, formatUTC, getDurationNSMS, lookupValueText, methodFillColor, statusFillColor, toUriStr) import Web.FormUrlEncoded (FromForm) From 206e0dbfefb4a9872d5926b9f293cd8d9fb528e8 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sat, 17 Jan 2026 21:54:18 +0000 Subject: [PATCH 62/71] issue list ui improvemnts --- src/Pages/Anomalies.hs | 110 +++++++++++++++++++++++++++++++++-------- 1 file changed, 90 insertions(+), 20 deletions(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index ed7ff0242..31f116b9e 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -98,6 +98,62 @@ data IssueEndpointInfo = IssueEndpointInfo deriving (AE.FromJSON) via DAE.CustomJSON '[DAE.FieldLabelModifier '[DAE.CamelToSnake]] IssueEndpointInfo +-- | Card style variants for anomaly metric cards +data CardStyle = CardNeutral | CardHighlight | CardWarning | CardError | CardInfo + deriving (Eq) + + +-- | Metric card data for anomaly displays +data MetricCard = MetricCard + { label :: Text + , icon :: Text + , value :: Html () + , style :: CardStyle + , arrowIcon :: Maybe Text + } + + +-- | Get CSS classes for a card style +cardStyleClasses :: CardStyle -> (Text, Text, Text) +cardStyleClasses = \case + CardNeutral -> ("border-strokeWeak", "", "text-textWeak") + CardHighlight -> ("border-strokeInformation-weak", "bg-fillInformation-weak", "text-fillInformation-strong") + CardWarning -> ("border-strokeWarning-weak", "bg-fillWarning-weak", "text-fillWarning-strong") + CardError -> ("border-strokeError-weak", "bg-fillError-weak", "text-fillError-strong") + CardInfo -> ("border-strokeInformation-weak", "bg-fillInformation-weak", "text-fillInformation-strong") + + +-- | Render a single metric card +renderMetricCard :: MetricCard -> Html () +renderMetricCard card = do + let (borderClass, bgClass, iconColorClass) = cardStyleClasses card.style + valueColorClass = case card.style of + CardNeutral -> "text-textStrong" + _ -> iconColorClass + div_ [class_ $ "rounded-lg border p-4 " <> borderClass <> " " <> bgClass] do + div_ [class_ "flex items-center justify-between mb-2"] do + span_ [class_ "text-xs text-textWeak uppercase tracking-wide"] $ toHtml card.label + faSprite_ card.icon "regular" $ "w-4 h-4 " <> iconColorClass + div_ [class_ $ "flex items-center gap-1 " <> valueColorClass] do + span_ [class_ "text-xl font-bold"] card.value + whenJust card.arrowIcon $ \arrow -> + faSprite_ arrow "regular" "w-4 h-4" + + +-- | Render the full anomaly metrics section with cards and alert box +anomalyMetricsSection :: [MetricCard] -> CardStyle -> Text -> Text -> Html () +anomalyMetricsSection cards alertStyle alertTitle alertMessage = do + let (_, alertBgClass, alertIconClass) = cardStyleClasses alertStyle + div_ [class_ "mb-4"] do + div_ [class_ "grid grid-cols-4 gap-3"] do + forM_ cards renderMetricCard + div_ [class_ $ "mt-3 p-4 rounded-lg flex items-start gap-3 " <> alertBgClass] do + faSprite_ "circle-exclamation" "regular" $ "w-5 h-5 flex-shrink-0 mt-0.5 " <> alertIconClass + div_ [] do + div_ [class_ "font-semibold text-textStrong text-sm"] $ toHtml alertTitle + div_ [class_ "text-sm text-textWeak mt-1"] $ toHtml alertMessage + + acknowledgeAnomalyGetH :: Projects.ProjectId -> Anomalies.AnomalyId -> Maybe Text -> ATAuthCtx (RespHeaders AnomalyAction) acknowledgeAnomalyGetH pid aid hostM = do (sess, project) <- Sessions.sessionAndProject pid @@ -1343,31 +1399,43 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue _ -> pass Issues.EndpointLatencyDegradation -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointLatencyDegradationData) -> - div_ [class_ "mb-4 p-3 bg-fillWarning-weak border border-strokeWarning-weak rounded-lg"] do - div_ [class_ "flex items-center gap-2 text-sm"] do - span_ [class_ "font-medium text-fillWarning-strong"] $ toHtml d.endpointMethod - span_ [class_ "text-fillWarning-strong"] $ toHtml d.endpointPath - div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml (d.percentile <> ": " <> toText (getDurationNSMS (round d.baselineLatencyMs)) <> " → " <> toText (getDurationNSMS (round d.currentLatencyMs))) + let cards = + [ MetricCard "Percentile" "chart-line" (toHtml d.percentile) CardNeutral Nothing + , MetricCard "Baseline" "clock" (toHtml $ getDurationNSMS (round d.baselineLatencyMs)) CardNeutral Nothing + , MetricCard "Current" "chart-line" (toHtml $ getDurationNSMS (round d.currentLatencyMs)) CardHighlight Nothing + , MetricCard "Change" "circle-exclamation" (toHtml $ "+" <> show (round d.degradationPercent :: Int) <> "%") CardWarning (Just "arrow-up") + ] + in anomalyMetricsSection cards CardWarning "Performance Degradation Detected" issue.recommendedAction _ -> pass Issues.EndpointErrorRateSpike -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointErrorRateSpikeData) -> - div_ [class_ "mb-4 p-3 bg-fillError-weak border border-strokeError-weak rounded-lg"] do - div_ [class_ "flex items-center gap-2 text-sm"] do - span_ [class_ "font-medium text-fillError-strong"] $ toHtml d.endpointMethod - span_ [class_ "text-fillError-strong"] $ toHtml d.endpointPath - div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ show (round (d.currentErrorRate * 100) :: Int) <> "% error rate (" <> show d.errorCount <> " errors)" + let cards = + [ MetricCard "Error Rate" "percent" (toHtml $ show (round (d.currentErrorRate * 100) :: Int) <> "%") CardNeutral Nothing + , MetricCard "Baseline" "clock" (toHtml $ show (round (d.baselineErrorRate * 100) :: Int) <> "%") CardNeutral Nothing + , MetricCard "Errors" "hashtag" (toHtml $ show d.errorCount <> "/" <> show d.totalRequests) CardNeutral Nothing + , MetricCard "Spike" "circle-exclamation" (toHtml $ "+" <> show (round d.spikePercent :: Int) <> "%") CardError (Just "arrow-up") + ] + in anomalyMetricsSection cards CardError "Error Rate Spike Detected" issue.recommendedAction _ -> pass Issues.EndpointVolumeRateChange -> case AE.fromJSON (getAeson issue.issueData) of AE.Success (d :: Issues.EndpointVolumeRateChangeData) -> - div_ [class_ $ "mb-4 p-3 rounded-lg border " <> if d.changeDirection == "drop" then "bg-fillWarning-weak border-strokeWarning-weak" else "bg-fillInformation-weak border-strokeInformation-weak"] do - div_ [class_ "flex items-center gap-2 text-sm"] do - span_ [class_ "font-medium text-textStrong"] $ toHtml d.endpointMethod - span_ [class_ "text-textStrong"] $ toHtml d.endpointPath - div_ [class_ "text-xs text-textWeak mt-1"] $ toHtml $ "Traffic " <> d.changeDirection <> ": " <> show (round d.changePercent :: Int) <> "%" + let isDrop = d.changeDirection == "drop" + cardStyle = if isDrop then CardWarning else CardInfo + arrowIcon = if isDrop then "arrow-down" else "arrow-up" + changeSign = if isDrop then "" else "+" + alertTitle = if isDrop then "Traffic Drop Detected" else "Traffic Surge Detected" + cards = + [ MetricCard "Direction" arrowIcon (span_ [class_ "capitalize"] $ toHtml d.changeDirection) cardStyle Nothing + , MetricCard "Current" "gauge-high" (toHtml $ show (round d.currentRatePerHour :: Int) <> "/hr") CardNeutral Nothing + , MetricCard "Baseline" "clock" (toHtml $ show (round d.baselineRatePerHour :: Int) <> "/hr") CardNeutral Nothing + , MetricCard "Change" "circle-exclamation" (toHtml $ changeSign <> show (round (abs d.changePercent) :: Int) <> "%") cardStyle (Just arrowIcon) + ] + in anomalyMetricsSection cards cardStyle alertTitle issue.recommendedAction _ -> pass - -- Recommended action - div_ [class_ "border-l-4 border-strokeBrand pl-4 mb-4"] $ p_ [class_ "text-sm text-textStrong leading-relaxed"] $ toHtml issue.recommendedAction + -- Recommended action (only show for issue types that don't have built-in alert boxes) + unless (issue.issueType `elem` [Issues.EndpointLatencyDegradation, Issues.EndpointErrorRateSpike, Issues.EndpointVolumeRateChange]) do + div_ [class_ "border-l-4 border-strokeBrand pl-4 mb-4"] $ p_ [class_ "text-sm text-textStrong leading-relaxed"] $ toHtml issue.recommendedAction -- Action buttons let logsQuery = case issue.issueType of @@ -1391,9 +1459,11 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue a_ [href_ url, class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 text-textBrand hover:text-textBrand/80 hover:bg-fillBrand-weak"] do faSprite_ "eye" "regular" "w-4 h-4" span_ [class_ "leading-none"] "View related logs" - button_ [class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 border bg-background hover:text-accent-foreground text-textBrand border-strokeBrand-strong hover:bg-fillBrand-weak"] do - faSprite_ "code" "regular" "w-4 h-4" - span_ [class_ "leading-none"] "View Full Schema" + -- View Full Schema button (only for schema-related issue types) + when (issue.issueType `elem` [Issues.NewEndpoint, Issues.NewShape, Issues.FieldChange]) do + button_ [class_ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 border bg-background hover:text-accent-foreground text-textBrand border-strokeBrand-strong hover:bg-fillBrand-weak"] do + faSprite_ "code" "regular" "w-4 h-4" + span_ [class_ "leading-none"] "View Full Schema" -- Acknowledge button let isAcknowledged = isJust issue.acknowledgedAt let acknowledgeEndpoint = "/p/" <> issue.projectId.toText <> "/anomalies/" <> Issues.issueIdText issue.id <> if isAcknowledged then "/unacknowledge" else "/acknowledge" From 353862e16ece63071988cb23d511fa6ba654bc87 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sun, 18 Jan 2026 11:14:07 +0000 Subject: [PATCH 63/71] baseline state update after 14 days --- src/BackgroundJobs.hs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 4446f47ab..55dabbf44 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -1638,22 +1638,20 @@ processNewError pid errorHash authCtx = do calculateLogPatternBaselines :: Projects.ProjectId -> ATBackgroundCtx () calculateLogPatternBaselines pid = do Log.logInfo "Calculating log pattern baselines" pid - + now <- liftIO getCurrentTime -- Get all non-ignored patterns patterns <- LogPatterns.getLogPatterns pid Nothing 1000 0 - forM_ patterns \lp -> do -- Get hourly stats from otel_logs_and_spans over last 7 days (168 hours) statsM <- LogPatterns.getPatternStats pid lp.logPattern 168 - case statsM of Nothing -> pass Just stats -> do let newSamples = stats.totalHours newMean = stats.hourlyMean newStddev = stats.hourlyStddev - -- Establish baseline after 24 hours of data - newState = if newSamples >= 24 then BSEstablished else BSLearning + patternAgeDays = diffUTCTime now (zonedTimeToUTC lp.createdAt) / (24 * 60 * 60) + newState = if newMean > 100 || patternAgeDays >= 14 then BSEstablished else BSLearning _ <- LogPatterns.updateBaseline pid lp.patternHash newState newMean newStddev newSamples pass @@ -1720,6 +1718,7 @@ calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () calculateEndpointBaselines pid = do Log.logInfo "Calculating endpoint baselines" pid endpoints <- Endpoints.getActiveEndpoints pid + now <- liftIO getCurrentTime forM_ endpoints \ep -> do -- Get hourly stats over last 7 days (168 hours) @@ -1728,8 +1727,8 @@ calculateEndpointBaselines pid = do Nothing -> pass Just stats -> do let newSamples = stats.totalHours - -- Establish baseline after 24 hours of data - newState = if newSamples >= 24 then BSEstablished else BSLearning + endpointAgeDays = diffUTCTime now ep.createdAt / (24 * 60 * 60) + newState = if stats.hourlyMeanRequests > 100 || (endpointAgeDays >= 14 && stats.hourlyMeanRequests > 0) then BSEstablished else BSLearning Endpoints.updateEndpointBaseline ep.id newState From e164fc196ba991f0621a4d04c14e91055b0b946a Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sun, 18 Jan 2026 12:21:17 +0000 Subject: [PATCH 64/71] fix build --- src/BackgroundJobs.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 55dabbf44..dbf06cd35 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -18,7 +18,7 @@ import Data.Text.Display (display) import Data.Time (DayOfWeek (Monday), UTCTime (utctDay), ZonedTime, addUTCTime, dayOfWeek, formatTime, getZonedTime) import Data.Time.Clock (diffUTCTime) import Data.Time.Format (defaultTimeLocale) -import Data.Time.LocalTime (LocalTime (localDay), ZonedTime (zonedTimeToLocalTime), getCurrentTimeZone, utcToZonedTime) +import Data.Time.LocalTime (LocalTime (localDay), zonedTimeToUTC, ZonedTime (zonedTimeToLocalTime), getCurrentTimeZone, utcToZonedTime) import Data.UUID qualified as UUID import Data.UUID.V4 qualified as UUIDV4 import Data.Vector qualified as V @@ -1638,7 +1638,7 @@ processNewError pid errorHash authCtx = do calculateLogPatternBaselines :: Projects.ProjectId -> ATBackgroundCtx () calculateLogPatternBaselines pid = do Log.logInfo "Calculating log pattern baselines" pid - now <- liftIO getCurrentTime + now <- Time.currentTime -- Get all non-ignored patterns patterns <- LogPatterns.getLogPatterns pid Nothing 1000 0 forM_ patterns \lp -> do @@ -1718,7 +1718,7 @@ calculateEndpointBaselines :: Projects.ProjectId -> ATBackgroundCtx () calculateEndpointBaselines pid = do Log.logInfo "Calculating endpoint baselines" pid endpoints <- Endpoints.getActiveEndpoints pid - now <- liftIO getCurrentTime + now <- Time.currentTime forM_ endpoints \ep -> do -- Get hourly stats over last 7 days (168 hours) From c75c61d24f31754395494883236b780ae7d00bca Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 18 Jan 2026 12:21:45 +0000 Subject: [PATCH 65/71] Auto-format code with fourmolu --- src/BackgroundJobs.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index dbf06cd35..8fe07269f 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -18,7 +18,7 @@ import Data.Text.Display (display) import Data.Time (DayOfWeek (Monday), UTCTime (utctDay), ZonedTime, addUTCTime, dayOfWeek, formatTime, getZonedTime) import Data.Time.Clock (diffUTCTime) import Data.Time.Format (defaultTimeLocale) -import Data.Time.LocalTime (LocalTime (localDay), zonedTimeToUTC, ZonedTime (zonedTimeToLocalTime), getCurrentTimeZone, utcToZonedTime) +import Data.Time.LocalTime (LocalTime (localDay), ZonedTime (zonedTimeToLocalTime), getCurrentTimeZone, utcToZonedTime, zonedTimeToUTC) import Data.UUID qualified as UUID import Data.UUID.V4 qualified as UUIDV4 import Data.Vector qualified as V From 56c4ef87b4da97bd3371d729375a0b5fe66b52d8 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sun, 18 Jan 2026 13:17:58 +0000 Subject: [PATCH 66/71] add doct test fix build errors --- src/Models/Apis/Errors.hs | 61 +++++++++++++++++++++++++++ src/Models/Apis/Issues/Enhancement.hs | 6 +++ src/Models/Apis/LogPatterns.hs | 4 +- src/ProcessMessage.hs | 10 ++--- 4 files changed, 74 insertions(+), 7 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index c7de6b21f..ce0ee6185 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -690,6 +690,20 @@ data StackFrame = StackFrame deriving anyclass (NFData) +-- | Parse a stack trace into a list of stack frames based on runtime +-- +-- >>> length $ parseStackTrace "nodejs" "at processTicksAndRejections (node:internal/process/task_queues:95:5)\nat handleRequest (/app/src/server.js:42:15)" +-- 2 +-- +-- >>> map sfFunction $ parseStackTrace "python" "File \"/app/main.py\", line 10, in main\nFile \"/app/utils.py\", line 5, in helper" +-- ["main","helper"] +-- +-- >>> map sfIsInApp $ parseStackTrace "nodejs" "at handleRequest (/app/src/server.js:42:15)\nat processTicksAndRejections (node:internal/process/task_queues:95:5)" +-- [True,False] +-- +-- >>> map sfFunction $ parseStackTrace "java" "at com.example.MyClass.doWork(MyClass.java:25)\nat org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1067)" +-- ["doWork","doDispatch"] +-- parseStackTrace :: Text -> Text -> [StackFrame] parseStackTrace mSdk stackText = let lns = filter (not . T.null . T.strip) $ T.lines stackText @@ -1080,6 +1094,19 @@ parseGenericFrame line = -- | Normalize a stack trace for fingerprinting -- Returns a list of normalized frame strings suitable for hashing +-- +-- >>> normalizeStackTrace "nodejs" "at handleRequest (/app/src/server.js:42:15)\nat processTicksAndRejections (node:internal/process/task_queues:95:5)" +-- "server|handleRequest" +-- +-- >>> normalizeStackTrace "python" "File \"/app/main.py\", line 10, in main\nFile \"/app/utils.py\", line 5, in helper" +-- "main|main\nutils|helper" +-- +-- >>> normalizeStackTrace "java" "at com.example.MyClass.doWork(MyClass.java:25)\nat com.example.Service.process(Service.java:50)" +-- "com.example.MyClass|doWork\ncom.example.Service|process" +-- +-- >>> normalizeStackTrace "unknown" "some random frame info" +-- "some random frame info|some" +-- normalizeStackTrace :: Text -> Text -> Text normalizeStackTrace runtime stackText = let frames = parseStackTrace runtime stackText @@ -1118,6 +1145,19 @@ normalizeStackTrace runtime stackText = -- | Normalize an error message for fingerprinting -- Limits to first 2 non-empty lines and replaces variable content +-- +-- >>> normalizeMessage "Connection refused to 192.168.1.100:5432" +-- "Connection refused to {ipv4}{port}" +-- +-- >>> normalizeMessage "User c73bcdcc-2669-4bf6-81d3-e4ae73fb11fd not found" +-- "User {uuid} not found" +-- +-- >>> normalizeMessage "Error 404: Resource 12345 not available\n\nDetails here\nMore info" +-- "Error {integer}: Resource {integer} not available Details here" +-- +-- >>> normalizeMessage " Trimmed message " +-- "Trimmed message" +-- normalizeMessage :: Text -> Text normalizeMessage msg = let lns = take 2 $ filter (not . T.null . T.strip) $ T.lines msg @@ -1132,6 +1172,27 @@ normalizeMessage msg = -- 1. Stack trace (if has meaningful in-app frames) -- 2. Exception type + message -- 3. Message only +-- +-- With stack trace - uses projectId, exceptionType, and normalized stack: +-- >>> computeErrorFingerprint "proj1" (Just "svc") (Just "span") "nodejs" "TypeError" "msg" "at handler (/app/index.js:10:5)" +-- "7090116541995986264" +-- +-- Without stack trace but with exception type - uses projectId, service, span, type, and message: +-- >>> computeErrorFingerprint "proj1" (Just "user-service") (Just "/api/users") "nodejs" "ValidationError" "Invalid email format" "" +-- "1283749781654932840" +-- +-- Without stack trace or exception type - uses projectId, service, span, and message only: +-- >>> computeErrorFingerprint "proj1" (Just "api") (Just "/health") "nodejs" "" "Connection refused to 192.168.1.1:5432" "" +-- "14595802078498498993" +-- +-- Same error from different IPs produces same fingerprint (IP normalized): +-- >>> computeErrorFingerprint "proj1" (Just "db") Nothing "python" "" "Connection refused to 10.0.0.1:5432" "" == computeErrorFingerprint "proj1" (Just "db") Nothing "python" "" "Connection refused to 172.16.0.50:5432" "" +-- True +-- +-- Same stack trace produces same fingerprint regardless of message: +-- >>> computeErrorFingerprint "proj1" Nothing Nothing "nodejs" "Error" "message 1" "at handler (/app/index.js:10:5)" == computeErrorFingerprint "proj1" Nothing Nothing "nodejs" "Error" "different message" "at handler (/app/index.js:10:5)" +-- True +-- computeErrorFingerprint :: Text -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text computeErrorFingerprint projectIdText mService spanName runtime exceptionType message stackTrace = let diff --git a/src/Models/Apis/Issues/Enhancement.hs b/src/Models/Apis/Issues/Enhancement.hs index 4c204e47a..835bf422d 100644 --- a/src/Models/Apis/Issues/Enhancement.hs +++ b/src/Models/Apis/Issues/Enhancement.hs @@ -133,6 +133,8 @@ buildTitlePrompt issue = <> "Actual value: " <> toText (show alertData.actualValue) _ -> "Generate a concise title for this query alert." + -- Other issue types should use simpleTitle, but provide fallback for exhaustiveness + _ -> "Generate a concise title for this issue: " <> issue.title systemPrompt = unlines @@ -196,6 +198,8 @@ buildDescriptionPrompt issue = <> "Triggered at: " <> toText (show alertData.triggeredAt) _ -> "Describe this query alert." + -- Other issue types should use simpleDescription, but provide fallback for exhaustiveness + _ -> "Describe this issue and provide recommended actions: " <> issue.title systemPrompt = unlines @@ -254,6 +258,8 @@ buildCriticalityPrompt issue = "Runtime exception: " <> issue.title Issues.QueryAlert -> "Query alert: " <> issue.title + -- Other issue types should use simpleCriticality, but provide fallback for exhaustiveness + _ -> "Issue: " <> issue.title systemPrompt = unlines diff --git a/src/Models/Apis/LogPatterns.hs b/src/Models/Apis/LogPatterns.hs index 7d81296c9..cee0d8ff6 100644 --- a/src/Models/Apis/LogPatterns.hs +++ b/src/Models/Apis/LogPatterns.hs @@ -174,8 +174,8 @@ acknowledgeLogPatterns uid patternHashes upsertLogPattern :: DB es => Projects.ProjectId -> Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> Maybe Text -> Eff es Int64 -upsertLogPattern pid pat patHash serviceName logLevel traceId sampleMsg = - PG.execute q (pid, pat, patHash, serviceName, logLevel, traceId, sampleMsg) +upsertLogPattern pid pat patHash serviceName logLevel trId sampleMsg = + PG.execute q (pid, pat, patHash, serviceName, logLevel, trId, sampleMsg) where q = [sql| diff --git a/src/ProcessMessage.hs b/src/ProcessMessage.hs index 8fdd5ac3f..37274651a 100644 --- a/src/ProcessMessage.hs +++ b/src/ProcessMessage.hs @@ -292,7 +292,7 @@ processSpanToEntities pjc otelSpan dumpId = !outgoing = otelSpan.kind == Just "client" -- Extract trace ID from context for linking - !traceId = otelSpan.context >>= Telemetry.trace_id + !trId = otelSpan.context >>= Telemetry.trace_id -- Extract service name from resource !serviceName = Telemetry.atMapText "service.name" (unAesonTextMaybe otelSpan.resource) @@ -319,8 +319,8 @@ processSpanToEntities pjc otelSpan dumpId = , hash = endpointHash , outgoing = outgoing , description = "" - , firstTraceId = traceId - , recentTraceId = traceId + , firstTraceId = trId + , recentTraceId = trId , service = serviceName , baselineState = def , baselineSamples = 0 @@ -363,8 +363,8 @@ processSpanToEntities pjc otelSpan dumpId = , requestDescription = "" , exampleRequestPayload = requestBody , exampleResponsePayload = responseBody - , firstTraceId = traceId - , recentTraceId = traceId + , firstTraceId = trId + , recentTraceId = trId , service = serviceName } From 23849d1bfaea2cb0965b4a2cd83b2712008f9676 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Sun, 18 Jan 2026 14:21:47 +0000 Subject: [PATCH 67/71] fix build --- src/BackgroundJobs.hs | 19 +++++++++---------- src/Pages/Anomalies.hs | 4 ++-- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/src/BackgroundJobs.hs b/src/BackgroundJobs.hs index 8fe07269f..940a80a2b 100644 --- a/src/BackgroundJobs.hs +++ b/src/BackgroundJobs.hs @@ -414,15 +414,15 @@ runHourlyJob scheduledTime hour = do liftIO $ withResource ctx.jobsPool \conn -> forM_ activeProjects \pid -> do -- Error baseline and spike detection - createJob conn "background_jobs" $ ErrorBaselineCalculation pid - createJob conn "background_jobs" $ ErrorSpikeDetection pid + _ <- createJob conn "background_jobs" $ ErrorBaselineCalculation pid + _ <- createJob conn "background_jobs" $ ErrorSpikeDetection pid -- Log pattern baseline and spike detection - createJob conn "background_jobs" $ LogPatternBaselineCalculation pid - createJob conn "background_jobs" $ LogPatternSpikeDetection pid + _ <- createJob conn "background_jobs" $ LogPatternBaselineCalculation pid + _ <- createJob conn "background_jobs" $ LogPatternSpikeDetection pid -- Endpoint baseline and anomaly detection - createJob conn "background_jobs" $ EndpointBaselineCalculation pid - createJob conn "background_jobs" $ EndpointLatencyDegradationDetection pid - createJob conn "background_jobs" $ EndpointErrorRateSpikeDetection pid + _ <- createJob conn "background_jobs" $ EndpointBaselineCalculation pid + _ <- createJob conn "background_jobs" $ EndpointLatencyDegradationDetection pid + _ <- createJob conn "background_jobs" $ EndpointErrorRateSpikeDetection pid createJob conn "background_jobs" $ EndpointVolumeRateChangeDetection pid -- Cleanup expired query cache entries @@ -555,14 +555,13 @@ processPatterns kind fieldName events pid scheduledTime since = do -- Update otel_logs_and_spans with pattern void $ PG.execute (Query $ encodeUtf8 q) (patternTxt, pid, since, V.filter (/= "") ids) Relude.when (kind == "log" && not (T.null patternTxt)) $ do - let (serviceName, logLevel, traceId) = case V.head ids of + let (serviceName, logLevel, logTraceId) = case V.head ids of logId | logId /= "" -> case V.find (\(i, _, _, sName, lvl) -> i == logId) events of Just (_, _, trId, sName, lvl) -> (sName, lvl, trId) Nothing -> (Nothing, Nothing, Nothing) _ -> (Nothing, Nothing, Nothing) let patternHash = toXXHash patternTxt - traceShowM ("Upserting log pattern", pid, patternTxt, patternHash, sampleMsg) - void $ LogPatterns.upsertLogPattern pid patternTxt patternHash serviceName logLevel traceId (Just sampleMsg) + void $ LogPatterns.upsertLogPattern pid patternTxt patternHash serviceName logLevel logTraceId (Just sampleMsg) -- | Process a batch of (id, isSampleLog, content, serviceName, level) tuples through Drain diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 31f116b9e..9d915d03c 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -277,8 +277,8 @@ anomalyDetailCore pid firstM fetchIssue = do } -- Helper to fetch trace and spans given a trace ID let fetchTraceData traceIdM timeHint = case traceIdM of - Just traceId -> do - trM <- Telemetry.getTraceDetails pid traceId timeHint now + Just trId -> do + trM <- Telemetry.getTraceDetails pid trId timeHint now case trM of Just traceItem -> do spanRecords' <- Telemetry.getSpanRecordsByTraceId pid traceItem.traceId (Just traceItem.traceStartTime) now From bcd95d6b3459cca035b2c10acc22bbc5606485b4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 18 Jan 2026 14:22:17 +0000 Subject: [PATCH 68/71] Auto-format code with fourmolu --- src/Models/Apis/Errors.hs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index ce0ee6185..c523fd142 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -703,7 +703,6 @@ data StackFrame = StackFrame -- -- >>> map sfFunction $ parseStackTrace "java" "at com.example.MyClass.doWork(MyClass.java:25)\nat org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1067)" -- ["doWork","doDispatch"] --- parseStackTrace :: Text -> Text -> [StackFrame] parseStackTrace mSdk stackText = let lns = filter (not . T.null . T.strip) $ T.lines stackText @@ -1106,7 +1105,6 @@ parseGenericFrame line = -- -- >>> normalizeStackTrace "unknown" "some random frame info" -- "some random frame info|some" --- normalizeStackTrace :: Text -> Text -> Text normalizeStackTrace runtime stackText = let frames = parseStackTrace runtime stackText @@ -1157,7 +1155,6 @@ normalizeStackTrace runtime stackText = -- -- >>> normalizeMessage " Trimmed message " -- "Trimmed message" --- normalizeMessage :: Text -> Text normalizeMessage msg = let lns = take 2 $ filter (not . T.null . T.strip) $ T.lines msg @@ -1192,7 +1189,6 @@ normalizeMessage msg = -- Same stack trace produces same fingerprint regardless of message: -- >>> computeErrorFingerprint "proj1" Nothing Nothing "nodejs" "Error" "message 1" "at handler (/app/index.js:10:5)" == computeErrorFingerprint "proj1" Nothing Nothing "nodejs" "Error" "different message" "at handler (/app/index.js:10:5)" -- True --- computeErrorFingerprint :: Text -> Maybe Text -> Maybe Text -> Text -> Text -> Text -> Text -> Text computeErrorFingerprint projectIdText mService spanName runtime exceptionType message stackTrace = let From 6f31138c12df1f5c7cb17656efb0ed79796274a8 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Mon, 19 Jan 2026 17:02:12 +0000 Subject: [PATCH 69/71] fix duration formatting --- src/Pages/Anomalies.hs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Pages/Anomalies.hs b/src/Pages/Anomalies.hs index 9d915d03c..a2720a1a2 100644 --- a/src/Pages/Anomalies.hs +++ b/src/Pages/Anomalies.hs @@ -608,8 +608,8 @@ anomalyDetailPage pid issue tr otellogs errM now isFirst = do div_ [class_ "w-px h-4 bg-strokeWeak"] "" div_ [class_ "grid grid-cols-2 lg:grid-cols-4 gap-4 mb-4"] do statBox_ (Just pid) Nothing "Percentile" "" d.percentile Nothing Nothing - statBox_ (Just pid) Nothing "Current Latency" "" (show (round d.currentLatencyMs :: Int) <> "ms") Nothing Nothing - statBox_ (Just pid) Nothing "Baseline" "" (show (round d.baselineLatencyMs :: Int) <> "ms") Nothing Nothing + statBox_ (Just pid) Nothing "Current Latency" "" (toText $ getDurationNSMS (round d.currentLatencyMs)) Nothing Nothing + statBox_ (Just pid) Nothing "Baseline" "" (toText $ getDurationNSMS (round d.baselineLatencyMs)) Nothing Nothing div_ [class_ "p-4 bg-fillWarning-weak rounded-lg border border-strokeWarning-weak mb-4"] do span_ [class_ "text-sm text-fillWarning-strong font-medium"] $ toHtml $ "Latency increased by " <> show (round d.degradationPercent :: Int) <> "% (z-score: " <> show (round d.zScore :: Int) <> ")" _ -> pass @@ -1468,8 +1468,8 @@ renderIssueMainCol pid (IssueVM hideByDefault isWidget currTime timeFilter issue let isAcknowledged = isJust issue.acknowledgedAt let acknowledgeEndpoint = "/p/" <> issue.projectId.toText <> "/anomalies/" <> Issues.issueIdText issue.id <> if isAcknowledged then "/unacknowledge" else "/acknowledge" button_ - [ class_ $ "inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 " <> if isAcknowledged then "bg-fillSuccess-weak text-fillSuccess-strong border border-strokeSuccess-weak hover:bg-fillSuccess-weak/80" else "bg-fillPrimary text-textInverse-strong hover:bg-fillPrimary/90" - , hxGet_ acknowledgeEndpoint + [ class_ ("inline-flex items-center justify-center whitespace-nowrap text-sm font-medium transition-all h-8 rounded-md gap-1.5 px-3 " <> if isAcknowledged then "bg-fillSuccess-weak text-fillSuccess-strong border border-strokeSuccess-weak hover:bg-fillSuccess-weak/80" else "bg-fillBrand-strong text-white hover:bg-fillBrand-weak") + , hxPost_ acknowledgeEndpoint , hxSwap_ "outerHTML" , hxTarget_ "closest .itemsListItem" ] From 5402f820bf12fbf06998afa49ba730cec43f622a Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Mon, 19 Jan 2026 21:18:45 +0000 Subject: [PATCH 70/71] towards fixing tests --- test/integration/Pages/AnomaliesSpec.hs | 33 ++++--- .../Pages/Endpoints/ApiCatalogSpec.hs | 6 +- test/unit/Pkg/DrainSpec.hs | 98 +++++++++---------- 3 files changed, 72 insertions(+), 65 deletions(-) diff --git a/test/integration/Pages/AnomaliesSpec.hs b/test/integration/Pages/AnomaliesSpec.hs index 50486c2c2..39b90883c 100644 --- a/test/integration/Pages/AnomaliesSpec.hs +++ b/test/integration/Pages/AnomaliesSpec.hs @@ -94,9 +94,11 @@ spec = aroundAll withTestResources do pendingJobs <- getPendingBackgroundJobs tr.trATCtx logBackgroundJobsInfo tr.trLogger pendingJobs - -- Run only NewAnomaly jobs (which create issues from anomalies) + -- Run only API change jobs (which create issues from anomalies) _ <- runBackgroundJobsWhere tr.trATCtx $ \case - BackgroundJobs.NewAnomaly{} -> True + BackgroundJobs.NewEndpoint{} -> True + BackgroundJobs.NewShape{} -> True + BackgroundJobs.NewFieldChange{} -> True _ -> False createRequestDumps tr testPid 10 @@ -126,7 +128,8 @@ spec = aroundAll withTestResources do logBackgroundJobsInfo tr.trLogger pendingJobs2 _ <- runBackgroundJobsWhere tr.trATCtx $ \case - BackgroundJobs.NewAnomaly{anomalyType = aType} -> aType == "shape" || aType == "field" + BackgroundJobs.NewShape{} -> True + BackgroundJobs.NewFieldChange{} -> True _ -> False -- Acknowledge the endpoint anomaly directly using Issues module @@ -144,7 +147,7 @@ spec = aroundAll withTestResources do AnomalyList.anomalyListGetH testPid Nothing (Just "Acknowleged") Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing case pg of AnomalyList.ALPage (PageCtx _ tbl) -> do - let acknowledgedApiChangeIssues = V.filter (\(AnomalyList.IssueVM _ _ _ _ c) -> c.issueType == Issues.APIChange) tbl.rows + let acknowledgedApiChangeIssues = V.filter (\(AnomalyList.IssueVM _ _ _ _ c) -> c.issueType == Issues.NewEndpoint) tbl.rows V.length acknowledgedApiChangeIssues `shouldSatisfy` (> 0) _ -> error "Unexpected response" @@ -164,12 +167,13 @@ spec = aroundAll withTestResources do processMessagesAndBackgroundJobs tr msgs createRequestDumps tr testPid 10 - -- Get pending jobs and run only NewAnomaly jobs for shapes and fields + -- Get pending jobs and run only shape/field jobs pendingJobs3 <- getPendingBackgroundJobs tr.trATCtx logBackgroundJobsInfo tr.trLogger pendingJobs3 _ <- runBackgroundJobsWhere tr.trATCtx $ \case - BackgroundJobs.NewAnomaly{anomalyType = aType} -> aType == "shape" || aType == "field" + BackgroundJobs.NewShape{} -> True + BackgroundJobs.NewFieldChange{} -> True _ -> False -- Verify issues exist in the database (they may be acknowledged from previous test) @@ -189,7 +193,8 @@ spec = aroundAll withTestResources do logBackgroundJobsInfo tr.trLogger pendingJobs4 _ <- runBackgroundJobsWhere tr.trATCtx $ \case - BackgroundJobs.NewAnomaly{anomalyType = aType} -> aType == "shape" || aType == "field" + BackgroundJobs.NewShape{} -> True + BackgroundJobs.NewFieldChange{} -> True _ -> False -- Find and acknowledge the API change issues @@ -212,19 +217,19 @@ spec = aroundAll withTestResources do let msgs = [("m4", toStrict $ AE.encode reqMsg4)] processMessagesAndBackgroundJobs tr msgs - -- Get and run format anomaly jobs + -- Get and run format anomaly jobs (format changes are handled as field changes) pendingJobs5 <- getPendingBackgroundJobs tr.trATCtx logBackgroundJobsInfo tr.trLogger pendingJobs5 _ <- runBackgroundJobsWhere tr.trATCtx $ \case - BackgroundJobs.NewAnomaly{anomalyType = "format"} -> True + BackgroundJobs.NewFieldChange{} -> True _ -> False -- Get updated anomaly list anomalies <- getAnomalies tr - let formatApiChangeIssues = V.filter (\(AnomalyList.IssueVM _ _ _ _ c) -> c.issueType == Issues.APIChange) anomalies + let formatApiChangeIssues = V.filter (\(AnomalyList.IssueVM _ _ _ _ c) -> c.issueType == Issues.FieldChange) anomalies - -- In the new Issues system, format anomalies are part of API changes + -- In the new Issues system, format anomalies are part of field changes length formatApiChangeIssues `shouldSatisfy` (>= 1) length anomalies `shouldSatisfy` (> 0) @@ -233,10 +238,10 @@ spec = aroundAll withTestResources do AnomalyList.anomalyListGetH testPid Nothing (Just "Acknowledged") Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing case pg of AnomalyList.ALPage (PageCtx _ tbl) -> do - -- Acknowledged anomalies should include API changes - let acknowledgedApiChangeIssues = V.filter (\(AnomalyList.IssueVM _ _ _ _ c) -> c.issueType == Issues.APIChange) tbl.rows + -- Acknowledged anomalies should include new endpoint issues + let acknowledgedApiChangeIssues = V.filter (\(AnomalyList.IssueVM _ _ _ _ c) -> c.issueType == Issues.NewEndpoint) tbl.rows - -- We acknowledged at least one API change issue in the previous test + -- We acknowledged at least one new endpoint issue in the previous test length acknowledgedApiChangeIssues `shouldSatisfy` (>= 1) length tbl.rows `shouldSatisfy` (> 0) _ -> error "Unexpected response" diff --git a/test/integration/Pages/Endpoints/ApiCatalogSpec.hs b/test/integration/Pages/Endpoints/ApiCatalogSpec.hs index 77a2f1576..07fcacd45 100644 --- a/test/integration/Pages/Endpoints/ApiCatalogSpec.hs +++ b/test/integration/Pages/Endpoints/ApiCatalogSpec.hs @@ -164,9 +164,11 @@ spec = aroundAll withTestResources do pendingJobs <- getPendingBackgroundJobs tr.trATCtx logBackgroundJobsInfo tr.trLogger pendingJobs - -- Run only NewAnomaly jobs to create issues from anomalies + -- Run only API change jobs to create issues from anomalies _ <- runBackgroundJobsWhere tr.trATCtx $ \case - BackgroundJobs.NewAnomaly{} -> True + BackgroundJobs.NewEndpoint{} -> True + BackgroundJobs.NewShape{} -> True + BackgroundJobs.NewFieldChange{} -> True _ -> False -- Verify issues were created diff --git a/test/unit/Pkg/DrainSpec.hs b/test/unit/Pkg/DrainSpec.hs index 276a8d37d..6c0149c74 100644 --- a/test/unit/Pkg/DrainSpec.hs +++ b/test/unit/Pkg/DrainSpec.hs @@ -24,7 +24,7 @@ processNewLog logId logContent now tree = do firstToken = if V.null tokensVec then "" else V.head tokensVec in if tokenCount == 0 then tree -- Skip empty logs - else updateTreeWithLog tree tokenCount firstToken tokensVec logId logContent now + else updateTreeWithLog tree tokenCount firstToken tokensVec logId True logContent now processBatch :: V.Vector (Text, Text) -> UTCTime -> DrainTree -> DrainTree processBatch logBatch now initialTree = do V.foldl (\tree (logId, logContent) -> processNewLog logId logContent now tree) initialTree logBatch @@ -37,23 +37,23 @@ spec = describe "DRAIN updateTreeWithLog" $ do updatedTree = processBatch (V.fromList basicHttpLogs) (testTimeOffset 0) initialTree logGroups = getAllLogGroups updatedTree length logGroups `shouldBe` 3 - let patterns = V.map fst logGroups - V.toList patterns `shouldMatchList` + let patterns = V.map (\(_, template, _) -> template) logGroups + V.toList patterns `shouldMatchList` [ "DELETE /api/users/{integer} HTTP/{float} {integer}" , "POST /api/users HTTP/{float} {integer}" , "GET <*> HTTP/{float} {integer}" - ] - let log1 = V.find (\(tmp, logIds) -> tmp == "GET <*> HTTP/{float} {integer}") logGroups + ] + let log1 = V.find (\(_, tmp, _) -> tmp == "GET <*> HTTP/{float} {integer}") logGroups case log1 of - Just (_, lg) -> lg `shouldBe` V.fromList ["log5", "log3", "log2", "log1"] + Just (_, _, lg) -> lg `shouldBe` V.fromList ["log5", "log3", "log2", "log1"] Nothing -> error "log1 pattern not found" - let log2 = V.find (\(tmp, logIds) -> tmp == "POST /api/users HTTP/{float} {integer}") logGroups + let log2 = V.find (\(_, tmp, _) -> tmp == "POST /api/users HTTP/{float} {integer}") logGroups case log2 of - Just (_, lg) -> lg `shouldBe` V.fromList ["log4"] + Just (_, _, lg) -> lg `shouldBe` V.fromList ["log4"] Nothing -> error "log2 pattern not found" - let log3 = V.find (\(tmp, logIds) -> tmp == "DELETE /api/users/{integer} HTTP/{float} {integer}") logGroups + let log3 = V.find (\(_, tmp, _) -> tmp == "DELETE /api/users/{integer} HTTP/{float} {integer}") logGroups case log3 of - Just (_, lg) -> lg `shouldBe` V.fromList ["log6"] + Just (_, _, lg) -> lg `shouldBe` V.fromList ["log6"] Nothing -> error "log3 pattern not found" pass @@ -61,23 +61,23 @@ spec = describe "DRAIN updateTreeWithLog" $ do let initialTree = emptyDrainTree updatedTree = processBatch (V.fromList databaseLogs) (testTimeOffset 0) initialTree logGroups = getAllLogGroups updatedTree - let patterns = V.map fst logGroups + let patterns = V.map (\(_, template, _) -> template) logGroups V.toList patterns `shouldMatchList` [ "Connected to database <*>" , "Database query executed in {integer}ms" , "Connection pool exhausted max={integer} active={integer}" ] - let log1 = V.find (\(tmp, logIds) -> tmp == "Connected to database <*>") logGroups + let log1 = V.find (\(_, tmp, _) -> tmp == "Connected to database <*>") logGroups case log1 of - Just (_, lg) -> lg `shouldBe` V.fromList ["db3", "db2", "db1"] + Just (_, _, lg) -> lg `shouldBe` V.fromList ["db3", "db2", "db1"] Nothing -> error "db1 pattern not found" - let log2 = V.find (\(tmp, logIds) -> tmp == "Database query executed in {integer}ms") logGroups + let log2 = V.find (\(_, tmp, _) -> tmp == "Database query executed in {integer}ms") logGroups case log2 of - Just (_, lg) -> lg `shouldBe` V.fromList ["db6", "db5", "db4"] + Just (_, _, lg) -> lg `shouldBe` V.fromList ["db6", "db5", "db4"] Nothing -> error "db2 pattern not found" - let log3 = V.find (\(tmp, logIds) -> tmp == "Connection pool exhausted max={integer} active={integer}") logGroups + let log3 = V.find (\(_, tmp, _) -> tmp == "Connection pool exhausted max={integer} active={integer}") logGroups case log3 of - Just (_, lg) -> lg `shouldBe` V.fromList ["db8", "db7"] + Just (_, _, lg) -> lg `shouldBe` V.fromList ["db8", "db7"] Nothing -> error "db3 pattern not found" pass @@ -86,28 +86,28 @@ spec = describe "DRAIN updateTreeWithLog" $ do updatedTree = processBatch (V.fromList startupLogs) (testTimeOffset 0) initialTree logGroups = getAllLogGroups updatedTree length logGroups `shouldBe` 4 - let patterns = V.map fst logGroups + let patterns = V.map (\(_, template, _) -> template) logGroups V.toList patterns `shouldMatchList` [ "Initializing Redis connection <*>" , "Application ready to serve requests" , "Loading configuration from <*>" , "Starting application on port {integer}" ] - let log1 = V.find (\(tmp, logIds) -> tmp == "Starting application on port {integer}") logGroups + let log1 = V.find (\(_, tmp, _) -> tmp == "Starting application on port {integer}") logGroups case log1 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["start3", "start2", "start1"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["start3", "start2", "start1"] Nothing -> error "start1 pattern not found" - let log2 = V.find (\(tmp, logIds) -> tmp == "Loading configuration from <*>") logGroups + let log2 = V.find (\(_, tmp, _) -> tmp == "Loading configuration from <*>") logGroups case log2 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["start4", "start5"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["start4", "start5"] Nothing -> error "start4 pattern not found" - let log3 = V.find (\(tmp, logIds) -> tmp == "Initializing Redis connection <*>") logGroups + let log3 = V.find (\(_, tmp, _) -> tmp == "Initializing Redis connection <*>") logGroups case log3 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["start6", "start7"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["start6", "start7"] Nothing -> error "start6 pattern not found" - let log4 = V.find (\(tmp, logIds) -> tmp == "Application ready to serve requests") logGroups + let log4 = V.find (\(_, tmp, _) -> tmp == "Application ready to serve requests") logGroups case log4 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["start9", "start8"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["start9", "start8"] Nothing -> error "start8 pattern not found" pass @@ -116,28 +116,28 @@ spec = describe "DRAIN updateTreeWithLog" $ do updatedTree = processBatch (V.fromList errorLogs) (testTimeOffset 0) initialTree logGroups = getAllLogGroups updatedTree length logGroups `shouldBe` 4 - let patterns = V.map fst logGroups + let patterns = V.map (\(_, template, _) -> template) logGroups V.toList patterns `shouldMatchList` [ "ERROR Failed to authenticate user {email}" , "ERROR Database connection timeout after {integer}ms" , "WARN Retrying failed request attempt {integer} of {integer}" , "FATAL Out of memory heap size {integer}MB exceeded" ] - let log1 = V.find (\(tmp, logIds) -> tmp == "WARN Retrying failed request attempt {integer} of {integer}") logGroups + let log1 = V.find (\(_, tmp, _) -> tmp == "WARN Retrying failed request attempt {integer} of {integer}") logGroups case log1 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["err7", "err6", "err5"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["err7", "err6", "err5"] Nothing -> error "err1 pattern not found" - let log2 = V.find (\(tmp, logIds) -> tmp == "ERROR Failed to authenticate user {email}") logGroups + let log2 = V.find (\(_, tmp, _) -> tmp == "ERROR Failed to authenticate user {email}") logGroups case log2 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["err2", "err1"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["err2", "err1"] Nothing -> error "err2 pattern not found" - let log3 = V.find (\(tmp, logIds) -> tmp == "ERROR Database connection timeout after {integer}ms") logGroups + let log3 = V.find (\(_, tmp, _) -> tmp == "ERROR Database connection timeout after {integer}ms") logGroups case log3 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["err4", "err3"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["err4", "err3"] Nothing -> error "err3 pattern not found" - let log4 = V.find (\(tmp, logIds) -> tmp == "FATAL Out of memory heap size {integer}MB exceeded") logGroups + let log4 = V.find (\(_, tmp, _) -> tmp == "FATAL Out of memory heap size {integer}MB exceeded") logGroups case log4 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["err9", "err8"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["err9", "err8"] Nothing -> error "pattern not found" pass @@ -145,49 +145,49 @@ spec = describe "DRAIN updateTreeWithLog" $ do let initialTree = emptyDrainTree updatedTree = processBatch (V.fromList timestampedLogs) (testTimeOffset 0) initialTree logGroups = getAllLogGroups updatedTree - let patterns = V.map fst logGroups + let patterns = V.map (\(_, template, _) -> template) logGroups V.toList patterns `shouldMatchList` [ "{YYYY-MM-DDThh:mm:ss.sTZD} INFO User <*> <*> <*>" , "{YYYY-MM-DDThh:mm:ss.sTZD} ERROR Invalid token provided <*>" , "{YYYY-MM-DDThh:mm:ss.sTZD} WARN Rate limit exceeded client={ipv4}" ] - let log1 = V.find (\(tmp, logIds) -> tmp == "{YYYY-MM-DDThh:mm:ss.sTZD} INFO User <*> <*> <*>") logGroups + let log1 = V.find (\(_, tmp, _) -> tmp == "{YYYY-MM-DDThh:mm:ss.sTZD} INFO User <*> <*> <*>") logGroups case log1 of - Just (_, lg) -> do + Just (_, _, lg) -> do "ts1" `V.elem` lg `shouldBe` True "ts2" `V.elem` lg `shouldBe` True Nothing -> error "ts1 pattern not found" - let log3 = V.find (\(tmp, logIds) -> tmp == "{YYYY-MM-DDThh:mm:ss.sTZD} ERROR Invalid token provided <*>") logGroups + let log3 = V.find (\(_, tmp, _) -> tmp == "{YYYY-MM-DDThh:mm:ss.sTZD} ERROR Invalid token provided <*>") logGroups case log3 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["ts5", "ts6"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["ts5", "ts6"] Nothing -> error "ts5 pattern not found" - let log4 = V.find (\(tmp, logIds) -> tmp == "{YYYY-MM-DDThh:mm:ss.sTZD} WARN Rate limit exceeded client={ipv4}") logGroups + let log4 = V.find (\(_, tmp, _) -> tmp == "{YYYY-MM-DDThh:mm:ss.sTZD} WARN Rate limit exceeded client={ipv4}") logGroups case log4 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["ts7", "ts8"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["ts7", "ts8"] Nothing -> error "ts7 pattern not found" it "should get correct log patterns for microservice logs" $ do let initialTree = emptyDrainTree updatedTree = processBatch (V.fromList microserviceLogs) (testTimeOffset 0) initialTree logGroups = getAllLogGroups updatedTree - let patterns = V.map fst logGroups + let patterns = V.map (\(_, template, _) -> template) logGroups V.toList patterns `shouldMatchList` [ "payment-service processing payment amount={float} <*>" , "auth-service JWT validation successful for user <*>" , "user-service database query SELECT * FROM users WHERE id={integer} took {integer}ms" , "user-service received request <*> <*> <*>" ] - let log1 = V.find (\(tmp, logIds) -> tmp == "user-service received request <*> <*> <*>" ) logGroups + let log1 = V.find (\(_, tmp, _) -> tmp == "user-service received request <*> <*> <*>" ) logGroups case log1 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["svc2", "svc1"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["svc2", "svc1"] Nothing -> error "svc1 pattern not found" - let log3 = V.find (\(tmp, logIds) -> tmp == "user-service database query SELECT * FROM users WHERE id={integer} took {integer}ms") logGroups + let log3 = V.find (\(_, tmp, _) -> tmp == "user-service database query SELECT * FROM users WHERE id={integer} took {integer}ms") logGroups case log3 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["svc4", "svc5"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["svc4", "svc5"] Nothing -> error "svc4 pattern not found" - let log4 = V.find (\(tmp, logIds) -> tmp == "auth-service JWT validation successful for user <*>") logGroups + let log4 = V.find (\(_, tmp, _) -> tmp == "auth-service JWT validation successful for user <*>") logGroups case log4 of - Just (_, lg) -> V.toList lg `shouldMatchList` ["svc7", "svc8"] + Just (_, _, lg) -> V.toList lg `shouldMatchList` ["svc7", "svc8"] Nothing -> error "svc7 pattern not found" basicHttpLogs :: [(Text, Text)] From 2398e58682bc7251d8efddb18d173803f4effa19 Mon Sep 17 00:00:00 2001 From: Yussif Mohammed Date: Mon, 19 Jan 2026 22:24:09 +0000 Subject: [PATCH 71/71] more test fixes --- src/Models/Apis/Errors.hs | 8 ++++---- static/migrations/0028_rebuild_issues_table.sql | 1 + static/migrations/0031_new_api_change_triggers.sql | 3 ++- test/integration/Pages/AnomaliesSpec.hs | 8 ++++---- test/integration/Pages/Endpoints/ApiCatalogSpec.hs | 4 ++-- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/Models/Apis/Errors.hs b/src/Models/Apis/Errors.hs index c523fd142..be49545aa 100644 --- a/src/Models/Apis/Errors.hs +++ b/src/Models/Apis/Errors.hs @@ -1104,7 +1104,7 @@ parseGenericFrame line = -- "com.example.MyClass|doWork\ncom.example.Service|process" -- -- >>> normalizeStackTrace "unknown" "some random frame info" --- "some random frame info|some" +-- "some" normalizeStackTrace :: Text -> Text -> Text normalizeStackTrace runtime stackText = let frames = parseStackTrace runtime stackText @@ -1172,15 +1172,15 @@ normalizeMessage msg = -- -- With stack trace - uses projectId, exceptionType, and normalized stack: -- >>> computeErrorFingerprint "proj1" (Just "svc") (Just "span") "nodejs" "TypeError" "msg" "at handler (/app/index.js:10:5)" --- "7090116541995986264" +-- "269748a1" -- -- Without stack trace but with exception type - uses projectId, service, span, type, and message: -- >>> computeErrorFingerprint "proj1" (Just "user-service") (Just "/api/users") "nodejs" "ValidationError" "Invalid email format" "" --- "1283749781654932840" +-- "af0aa163" -- -- Without stack trace or exception type - uses projectId, service, span, and message only: -- >>> computeErrorFingerprint "proj1" (Just "api") (Just "/health") "nodejs" "" "Connection refused to 192.168.1.1:5432" "" --- "14595802078498498993" +-- "44e50418" -- -- Same error from different IPs produces same fingerprint (IP normalized): -- >>> computeErrorFingerprint "proj1" (Just "db") Nothing "python" "" "Connection refused to 10.0.0.1:5432" "" == computeErrorFingerprint "proj1" (Just "db") Nothing "python" "" "Connection refused to 172.16.0.50:5432" "" diff --git a/static/migrations/0028_rebuild_issues_table.sql b/static/migrations/0028_rebuild_issues_table.sql index cc0b3670b..8066be765 100644 --- a/static/migrations/0028_rebuild_issues_table.sql +++ b/static/migrations/0028_rebuild_issues_table.sql @@ -36,6 +36,7 @@ CREATE TABLE apis.issues ( request_payloads JSONB NOT NULL DEFAULT '[]'::jsonb, response_payloads JSONB NOT NULL DEFAULT '[]'::jsonb, archived_at TIMESTAMPTZ, + resolved_at TIMESTAMPTZ, llm_enhanced_at TIMESTAMPTZ, llm_enhancement_version INT, -- Flexible category-specific data diff --git a/static/migrations/0031_new_api_change_triggers.sql b/static/migrations/0031_new_api_change_triggers.sql index 116462be8..a35fe06b1 100644 --- a/static/migrations/0031_new_api_change_triggers.sql +++ b/static/migrations/0031_new_api_change_triggers.sql @@ -10,7 +10,7 @@ BEGIN END IF; job_tag := TG_ARGV[0]; INSERT INTO background_jobs (run_at, status, payload) - VALUES (now(),'queued',jsonb_build_object('tag', job_tag,'projectId', NEW.project_id,'hash', NEW.hash)); + VALUES (now(),'queued',jsonb_build_object('tag', job_tag,'contents', jsonb_build_array(NEW.project_id, NEW.hash))); RETURN NULL; END; $$ LANGUAGE plpgsql; @@ -18,6 +18,7 @@ $$ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS fields_created_anomaly ON apis.fields; DROP TRIGGER IF EXISTS endpoint_created_anomaly ON apis.endpoints; DROP TRIGGER IF EXISTS shapes_created_anomaly ON apis.shapes; +DROP TRIGGER IF EXISTS format_created_anomaly ON apis.formats; CREATE TRIGGER endpoint_created_new AFTER INSERT ON apis.endpoints FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewEndpoint'); CREATE TRIGGER shape_created_new AFTER INSERT ON apis.shapes FOR EACH ROW EXECUTE FUNCTION apis.api_change_detected_proc('NewShape'); diff --git a/test/integration/Pages/AnomaliesSpec.hs b/test/integration/Pages/AnomaliesSpec.hs index 39b90883c..4c2f98138 100644 --- a/test/integration/Pages/AnomaliesSpec.hs +++ b/test/integration/Pages/AnomaliesSpec.hs @@ -104,7 +104,7 @@ spec = aroundAll withTestResources do -- Check that API change issue was created for the endpoint apiChangeIssues <- withResource tr.trPool \conn -> PGS.query conn [sql| - SELECT id FROM apis.issues WHERE project_id = ? AND issue_type = 'api_change' + SELECT id FROM apis.issues WHERE project_id = ? AND issue_type = 'new_endpoint' |] (Only testPid) :: IO [Only Issues.IssueId] length apiChangeIssues `shouldBe` 1 @@ -116,7 +116,7 @@ spec = aroundAll withTestResources do it "should acknowledge endpoint anomaly" \tr -> do -- Find the API change issue for the endpoint apiChangeIssues <- withResource tr.trPool \conn -> PGS.query conn [sql| - SELECT id FROM apis.issues WHERE project_id = ? AND issue_type = 'api_change' + SELECT id FROM apis.issues WHERE project_id = ? AND issue_type = 'new_endpoint' |] (Only testPid) :: IO [Only Issues.IssueId] length apiChangeIssues `shouldBe` 1 issueId <- case apiChangeIssues of @@ -178,7 +178,7 @@ spec = aroundAll withTestResources do -- Verify issues exist in the database (they may be acknowledged from previous test) apiChangeIssues <- withResource tr.trPool \conn -> PGS.query conn [sql| - SELECT id, endpoint_hash FROM apis.issues WHERE project_id = ? AND issue_type = 'api_change' + SELECT id, endpoint_hash FROM apis.issues WHERE project_id = ? AND issue_type = 'new_endpoint' |] (Only testPid) :: IO [(Issues.IssueId, Text)] -- There should be at least one API change issue in the database @@ -199,7 +199,7 @@ spec = aroundAll withTestResources do -- Find and acknowledge the API change issues apiChangeIssuesForAck <- withResource tr.trPool \conn -> PGS.query conn [sql| - SELECT id FROM apis.issues WHERE project_id = ? AND issue_type = 'api_change' + SELECT id FROM apis.issues WHERE project_id = ? AND issue_type = 'new_endpoint' |] (Only testPid) :: IO [Only Issues.IssueId] length apiChangeIssuesForAck `shouldSatisfy` (>= 1) diff --git a/test/integration/Pages/Endpoints/ApiCatalogSpec.hs b/test/integration/Pages/Endpoints/ApiCatalogSpec.hs index 07fcacd45..e37149b2d 100644 --- a/test/integration/Pages/Endpoints/ApiCatalogSpec.hs +++ b/test/integration/Pages/Endpoints/ApiCatalogSpec.hs @@ -175,7 +175,7 @@ spec = aroundAll withTestResources do issuesCount <- withPool tr.trPool $ DBT.query [sql| SELECT COUNT(*) FROM apis.issues - WHERE project_id = ? AND issue_type = 'api_change' + WHERE project_id = ? AND issue_type = 'new_endpoint' |] (Only testPid) :: IO (V.Vector (Only Int)) case issuesCount of @@ -209,7 +209,7 @@ spec = aroundAll withTestResources do _ <- withPool tr.trPool $ DBT.execute [sql| UPDATE apis.issues SET acknowledged_at = NOW(), acknowledged_by = ? - WHERE project_id = ? AND issue_type = 'api_change' + WHERE project_id = ? AND issue_type = 'new_endpoint' |] (Users.UserId UUID.nil, testPid) -- Test active filter