From 4bcb3485c41ab728facd3834f0ba45a9c1800404 Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:47:25 +0100 Subject: [PATCH 001/160] Cursor can create changelog now (#2912) Added Cursor skill and command to review key changes merged in `main` in selected time range and create a summary in temp .md file. Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/changelog.md | 26 +++++ .cursor/skills/superplane-changelog/SKILL.md | 103 +++++++++++++++++++ 2 files changed, 129 insertions(+) create mode 100644 .cursor/commands/changelog.md create mode 100644 .cursor/skills/superplane-changelog/SKILL.md diff --git a/.cursor/commands/changelog.md b/.cursor/commands/changelog.md new file mode 100644 index 0000000000..05419034b6 --- /dev/null +++ b/.cursor/commands/changelog.md @@ -0,0 +1,26 @@ +--- +description: Generate a "what's new" changelog from merged commits over a time range (e.g. since Monday, last 5 days). Writes user-focused markdown to tmp/. +--- + +# Changelog + +Generate a changelog of what was merged to `main` for a given time range. The output is a single markdown file in `tmp/` with new integrations, new components and triggers, improvements, security updates, and bug fixes. + +**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. + +## Input + +- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", or "from Feb 3 to now". If the user does not specify, ask or default to "since Monday (5 days)". + +## Process + +1. Determine start and end dates from the user's time range. +2. Run `git log --since="" --format="%h %ad %s" --date=short main` and use it to identify what landed in the window. +3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. +4. Resolve component/trigger names from `pkg/integrations/` and `pkg/components/` (Labels). +5. Write `tmp/changelog__to_.md` following the skill's structure and format rules. + +## Output + +- Path to the generated file (e.g. `tmp/changelog_2026-02-03_to_2026-02-06.md`). +- Invite the user to review and edit the file as needed. diff --git a/.cursor/skills/superplane-changelog/SKILL.md b/.cursor/skills/superplane-changelog/SKILL.md new file mode 100644 index 0000000000..83d435012c --- /dev/null +++ b/.cursor/skills/superplane-changelog/SKILL.md @@ -0,0 +1,103 @@ +--- +name: superplane-changelog +description: When generating a SuperPlane changelog from merged commits. Use for "what's new" summaries with new integrations, new components/triggers, improvements, security updates, and bug fixes. Output is user-focused markdown in tmp/. +--- + +# SuperPlane Changelog + +Use this skill when the user wants a changelog of what was merged to `main` over a given time range (e.g. "since Monday", "last 5 days", "since last Friday"). Produce a single markdown file in `tmp/` with a consistent structure and tone. + +--- + +## 1. Determine time range + +- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", or a specific date. +- **Compute**: Start date (e.g. last Monday = start of week) and end date (today). For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. +- **Git**: Use `git log --since="YYYY-MM-DD" --format="%h %ad %s" --date=short main` to list commits. Only include in the changelog items whose merge/commit date falls **on or after** the start date. + +--- + +## 2. Classify what landed + +From commit messages and dates: + +- **New integrations**: Integrations that were **fully added** in the window (base integration registered + first components). Example: SendGrid, Jira. Do **not** count standalone components (e.g. SSH is a component under `pkg/components/ssh`, not an integration). +- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit dates to exclude anything that landed before the start date (e.g. Cloudflare DNS records merged Feb 1 are excluded if the window is "Monday Feb 3 to now"). +- **Improvements**: User-facing product changes (RBAC, Secrets, Bounty Program, integrations UX, list vs expression, multiple instances). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). +- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. +- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. + +To resolve component/trigger names and which integration they belong to, use `pkg/integrations/*/` and `pkg/components/*/`: check each integration's `Components()` and `Triggers()` and their `Label()` / `Name()` (e.g. `aws.go` for AWS, `ecr/`, `codeartifact/`). + +--- + +## 3. Format rules (strict) + +- **No em dashes (—)**. Use colons or parentheses instead (e.g. **RBAC**: description). +- **No "We" language**. Use direct, neutral phrasing (e.g. "Role-based access control." not "We introduced role-based access control."). +- **New integrations section**: List only integration names, one per line (e.g. SendGrid, Jira). +- **New components section**: Use **Integration:** Component1, Component2, ... One line per integration or standalone component (e.g. **GitHub:** Get Release; **SSH:** Run commands on remote hosts). +- **Improvements**: Each bullet is **Bold label**: Short, user-focused description. No implementation details. No "We". +- **Security**: Dedicated section (use only when there are security-related commits). Each bullet: include **CVE identifier** when available (e.g. CVE-2024-12345), then a short description of the vulnerability or fix. If no CVE, use "Fixed: " plus description (e.g. "Fixed: SSRF protection added to HTTP requests"). Same tone as rest of changelog; no em dashes. +- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. + +--- + +## 4. Output structure + +Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) with this structure: + +```markdown +# SuperPlane Changelog (Feb X-Y, YYYY) + +## What's new since [Monday], [Month Day], YYYY (X days) + +#### N new integrations + + - IntegrationA + - IntegrationB + +#### M new components and triggers + + - **IntegrationA:** Component1, Component2, Trigger1 + - **IntegrationB:** Component1 + - **Standalone:** Description (e.g. **SSH:** Run commands on remote hosts) + +#### Improvements + + - **RBAC**: Role-based access control. Define roles and permissions... + - **Secrets**: Create, update, and delete organization secrets... + - **Bounty Program**: Get paid for building integrations. See [link]... + - (etc.) + +#### Security + + - CVE-YYYY-NNNNN: Short description of vulnerability and fix (when CVE exists). + - Fixed: Short description of security fix (when no CVE). + (Omit this section entirely if no security-related commits in the window.) + +#### Bug Fixes + + - Fixed: Short description + - Fixed: ... +``` + +- Use three spaces before list bullets for indentation under each #### heading. +- Counts (N new integrations, M new components and triggers) must match the listed items and the chosen time window. + +--- + +## 5. Workflow summary + +1. Ask for or infer time range (e.g. "Monday to now" = 5 days). +2. Run `git log --since="" --format="%h %ad %s" --date=short main` and optionally inspect merge dates for key PRs. +3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only), security fixes (dedicated section; separate from bug fixes), and bug fixes. +4. Resolve labels from code: `pkg/integrations//` and `pkg/components/` for component/trigger names. +5. Write `tmp/changelog_.md` following the structure and format rules above. +6. Tell the user the file path and that they can review or edit it. + +--- + +## 6. Reference example + +See `tmp/changelog_2026-02-01_to_2026-02-06.md` (or the latest similar file in `tmp/`) for a concrete example of the desired style and structure. From 9acded6b3199fdf30b55e419af5c065db3249115 Mon Sep 17 00:00:00 2001 From: harxhist Date: Fri, 6 Feb 2026 22:13:22 +0530 Subject: [PATCH 002/160] feat: Add Claude integration and Create Message component (#2909) ### Summary This PR introduces the **Anthropic Claude** integration to SuperPlane. It establishes the base connection using an API Key and adds the `createMessage` component. This allows workflows to generate text, summaries, and structured responses using Claude models (e.g., for incident summaries, PR analysis, or documentation). Resolves #2623 Resolves #2624 ### Features - **Base Integration:** Connects to Anthropic via API Key (header authentication). - **Create Message Component:** - Support for user `Prompt` and optional `System message`. - Configurable `Model` selection (e.g., `claude-3-5-sonnet`, `claude-3-5-haiku`). - Adjustable `Max tokens` and `Temperature`. - **UI/UX:** Added integration icon and frontend mappers for the workflow builder. ### Output The `createMessage` component returns a single assistant reply to the output channel containing: - `id`: The unique message identifier. - `content`: The text content of the response. - `usage`: Token statistics (`input_tokens`, `output_tokens`). - `stop_reason`: The reason the generation stopped. ### Implementation Details - **Backend:** Implemented package `claude` to handle requests to `https://api.anthropic.com/v1/`. - **Structure:** Followed standard integration patterns for connection and separate files for components. - **Testing:** Added comprehensive unit tests for execution logic. - **Frontend:** Registered the integration in the sidebar and added necessary mappers in `web_src`. ### Files Changed In total 29 files are added or updated. | **Changes** | **Change Description** | | ----------------------------------------------- | ------------------------------------------------------------ | | `pkg/integrations/claude/claude.go` | Base integration setup and definition | | `pkg/integrations/claude/create_message.go` | `createMessage` component logic and execution | | `pkg/integrations/claude/client.go` | HTTP client for Anthropic API interaction | | `pkg/integrations/claude/*_test.go` | Unit tests for base, client, and component | | `pkg/integrations/claude/example*` | Example output JSON and embedding logic | | `docs/components/` | Added `Claude.mdx` & updated ordering for other components | | `web_src/src/pages/workflowv2/mappers/claude/*` | Frontend mappers for UI configuration | | `web_src/src/ui/...` & `web_src/src/utils/...` | UI registration, sidebar updates, and display name utilities | | `web_src/src/assets/...` | Claude integration SVG icon | | `pkg/server/server.go` | Registered new Claude integration | ### Test Plan - [x] All existing tests pass - [x] New component tests pass (`go test ./pkg/integrations/claude/...`) - [x] Code compiles without errors - [x] Verified API connection with valid Anthropic API Key - [x] Verified UI rendering of the Claude component in the Workflow Builder - [x] Used Claude in the Workflow and recorded video: [Watch Video (Loom)](https://www.loom.com/share/6b7af4edfca749f89a98417a96d58a80) ### Checklist - [x] Signed-off commits - [x] Unit tests - [x] Example output JSON - [x] Documentation in component --------- Signed-off-by: Harsh Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Claude.mdx | 90 +++++ docs/components/Cloudflare.mdx | 2 +- docs/components/Dash0.mdx | 2 +- docs/components/Datadog.mdx | 2 +- docs/components/Daytona.mdx | 2 +- docs/components/Discord.mdx | 2 +- docs/components/GitHub.mdx | 2 +- docs/components/Jira.mdx | 2 +- docs/components/OpenAI.mdx | 2 +- docs/components/PagerDuty.mdx | 2 +- docs/components/Rootly.mdx | 2 +- docs/components/SMTP.mdx | 2 +- docs/components/Semaphore.mdx | 2 +- docs/components/SendGrid.mdx | 2 +- docs/components/Slack.mdx | 2 +- pkg/integrations/claude/claude.go | 144 +++++++ pkg/integrations/claude/claude_test.go | 304 +++++++++++++++ pkg/integrations/claude/client.go | 167 ++++++++ pkg/integrations/claude/client_test.go | 355 ++++++++++++++++++ pkg/integrations/claude/create_message.go | 262 +++++++++++++ .../claude/create_message_test.go | 321 ++++++++++++++++ pkg/integrations/claude/example.go | 17 + .../claude/example_output_create_message.json | 31 ++ pkg/server/server.go | 1 + .../src/assets/icons/integrations/claude.svg | 1 + .../pages/workflowv2/mappers/claude/base.ts | 70 ++++ .../pages/workflowv2/mappers/claude/index.ts | 13 + web_src/src/pages/workflowv2/mappers/index.ts | 9 + .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + web_src/src/utils/integrationDisplayName.ts | 1 + 31 files changed, 1806 insertions(+), 14 deletions(-) create mode 100644 docs/components/Claude.mdx create mode 100644 pkg/integrations/claude/claude.go create mode 100644 pkg/integrations/claude/claude_test.go create mode 100644 pkg/integrations/claude/client.go create mode 100644 pkg/integrations/claude/client_test.go create mode 100644 pkg/integrations/claude/create_message.go create mode 100644 pkg/integrations/claude/create_message_test.go create mode 100644 pkg/integrations/claude/example.go create mode 100644 pkg/integrations/claude/example_output_create_message.json create mode 100644 web_src/src/assets/icons/integrations/claude.svg create mode 100644 web_src/src/pages/workflowv2/mappers/claude/base.ts create mode 100644 web_src/src/pages/workflowv2/mappers/claude/index.ts diff --git a/docs/components/Claude.mdx b/docs/components/Claude.mdx new file mode 100644 index 0000000000..064248912b --- /dev/null +++ b/docs/components/Claude.mdx @@ -0,0 +1,90 @@ +--- +title: "Claude" +sidebar: + order: 3 +--- + +Use Claude models in workflows + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + +## Instructions + +To get new Claude API key, go to [platform.claude.com](https://platform.claude.com). + + + +## Create Message + +The Create Message component uses Anthropic's Claude models to generate text responses. + +### Use Cases + +- **Summarization**: Generate summaries of incidents or deployments. +- **Code Analysis**: specific code review or PR comments. +- **Content Generation**: Create documentation or drafting communications. + +### Configuration + +- **Model**: The Claude model to use (e.g., claude-3-5-sonnet-latest). +- **Prompt**: The main user message/instruction. +- **System Message**: (Optional) Context to define the assistant's behavior or persona. +- **Max Tokens**: (Optional) Limit the length of the generated response. +- **Temperature**: (Optional) Control randomness (0.0 to 1.0). + +### Output + +Returns a payload containing: +- **text**: The content generated by Claude. +- **usage**: Input and output token counts. +- **stopReason**: Why the generation ended (e.g., "end_turn", "max_tokens"). +- **model**: The specific model version used. + +### Notes + +- Requires a valid Claude API key configured in integration +- Response quality and speed depend on the selected model +- Token usage is tracked and may incur costs based on your Claude plan + +### Example Output + +```json +{ + "data": { + "id": "msg_01X9JGt5...123456", + "model": "claude-3-5-sonnet-latest", + "response": { + "content": [ + { + "text": "Here is the summary of the deployment logs you requested...", + "type": "text" + } + ], + "id": "msg_01X9JGt5...123456", + "model": "claude-3-5-sonnet-latest", + "role": "assistant", + "stop_reason": "end_turn", + "type": "message", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + } + }, + "stopReason": "end_turn", + "text": "Here is the summary of the deployment logs you requested...", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + } + }, + "timestamp": "2026-02-06T12:00:00Z", + "type": "claude.message" +} +``` + diff --git a/docs/components/Cloudflare.mdx b/docs/components/Cloudflare.mdx index 8e46d193f0..6b42cd58e2 100644 --- a/docs/components/Cloudflare.mdx +++ b/docs/components/Cloudflare.mdx @@ -1,7 +1,7 @@ --- title: "Cloudflare" sidebar: - order: 3 + order: 4 --- Manage Cloudflare zones, rules, and DNS diff --git a/docs/components/Dash0.mdx b/docs/components/Dash0.mdx index 73a927e74a..d17af3e98b 100644 --- a/docs/components/Dash0.mdx +++ b/docs/components/Dash0.mdx @@ -1,7 +1,7 @@ --- title: "Dash0" sidebar: - order: 4 + order: 5 --- Connect to Dash0 to query data using Prometheus API diff --git a/docs/components/Datadog.mdx b/docs/components/Datadog.mdx index 58080d29cd..ea8f7ee9ad 100644 --- a/docs/components/Datadog.mdx +++ b/docs/components/Datadog.mdx @@ -1,7 +1,7 @@ --- title: "Datadog" sidebar: - order: 5 + order: 6 --- Create events in Datadog diff --git a/docs/components/Daytona.mdx b/docs/components/Daytona.mdx index 12e815c18f..fa1189dc37 100644 --- a/docs/components/Daytona.mdx +++ b/docs/components/Daytona.mdx @@ -1,7 +1,7 @@ --- title: "Daytona" sidebar: - order: 6 + order: 7 --- Execute code in isolated sandbox environments diff --git a/docs/components/Discord.mdx b/docs/components/Discord.mdx index e6f387cd0c..5651752656 100644 --- a/docs/components/Discord.mdx +++ b/docs/components/Discord.mdx @@ -1,7 +1,7 @@ --- title: "Discord" sidebar: - order: 7 + order: 8 --- Send messages to Discord channels diff --git a/docs/components/GitHub.mdx b/docs/components/GitHub.mdx index 88e3709665..48a421cdb3 100644 --- a/docs/components/GitHub.mdx +++ b/docs/components/GitHub.mdx @@ -1,7 +1,7 @@ --- title: "GitHub" sidebar: - order: 8 + order: 9 --- Manage and react to changes in your GitHub repositories diff --git a/docs/components/Jira.mdx b/docs/components/Jira.mdx index 0c3f669824..649dd22ad5 100644 --- a/docs/components/Jira.mdx +++ b/docs/components/Jira.mdx @@ -1,7 +1,7 @@ --- title: "Jira" sidebar: - order: 9 + order: 10 --- Manage and react to issues in Jira diff --git a/docs/components/OpenAI.mdx b/docs/components/OpenAI.mdx index e91bf4c963..6f8fff7856 100644 --- a/docs/components/OpenAI.mdx +++ b/docs/components/OpenAI.mdx @@ -1,7 +1,7 @@ --- title: "OpenAI" sidebar: - order: 10 + order: 11 --- Generate text responses with OpenAI models diff --git a/docs/components/PagerDuty.mdx b/docs/components/PagerDuty.mdx index 9df618766a..2a9cb49b06 100644 --- a/docs/components/PagerDuty.mdx +++ b/docs/components/PagerDuty.mdx @@ -1,7 +1,7 @@ --- title: "PagerDuty" sidebar: - order: 11 + order: 12 --- Manage and react to incidents in PagerDuty diff --git a/docs/components/Rootly.mdx b/docs/components/Rootly.mdx index bb16b5150c..86eaa108bd 100644 --- a/docs/components/Rootly.mdx +++ b/docs/components/Rootly.mdx @@ -1,7 +1,7 @@ --- title: "Rootly" sidebar: - order: 12 + order: 13 --- Manage and react to incidents in Rootly diff --git a/docs/components/SMTP.mdx b/docs/components/SMTP.mdx index bf05017651..c0965c7207 100644 --- a/docs/components/SMTP.mdx +++ b/docs/components/SMTP.mdx @@ -1,7 +1,7 @@ --- title: "SMTP" sidebar: - order: 13 + order: 14 --- Send emails via any SMTP server diff --git a/docs/components/Semaphore.mdx b/docs/components/Semaphore.mdx index 1bf545c92c..54e0ac2af8 100644 --- a/docs/components/Semaphore.mdx +++ b/docs/components/Semaphore.mdx @@ -1,7 +1,7 @@ --- title: "Semaphore" sidebar: - order: 14 + order: 15 --- Run and react to your Semaphore workflows diff --git a/docs/components/SendGrid.mdx b/docs/components/SendGrid.mdx index d6ca9c4a1c..8cb7f547b4 100644 --- a/docs/components/SendGrid.mdx +++ b/docs/components/SendGrid.mdx @@ -1,7 +1,7 @@ --- title: "SendGrid" sidebar: - order: 15 + order: 16 --- Send transactional and marketing email with SendGrid diff --git a/docs/components/Slack.mdx b/docs/components/Slack.mdx index e524fe6a27..7891472e50 100644 --- a/docs/components/Slack.mdx +++ b/docs/components/Slack.mdx @@ -1,7 +1,7 @@ --- title: "Slack" sidebar: - order: 16 + order: 17 --- Send and react to Slack messages and interactions diff --git a/pkg/integrations/claude/claude.go b/pkg/integrations/claude/claude.go new file mode 100644 index 0000000000..6120238d71 --- /dev/null +++ b/pkg/integrations/claude/claude.go @@ -0,0 +1,144 @@ +package claude + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +func init() { + registry.RegisterIntegration("claude", &Claude{}) +} + +type Claude struct{} + +type Configuration struct { + APIKey string `json:"apiKey"` +} + +func (i *Claude) Name() string { + return "claude" +} + +func (i *Claude) Label() string { + return "Claude" +} + +func (i *Claude) Icon() string { + return "loader" +} + +func (i *Claude) Description() string { + return "Use Claude models in workflows" +} + +func (i *Claude) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "apiKey", + Label: "API Key", + Type: configuration.FieldTypeString, + Sensitive: true, + Description: "Claude API key", + Required: true, + }, + } +} + +func (i *Claude) Components() []core.Component { + return []core.Component{ + &CreateMessage{}, + } +} + +func (i *Claude) Triggers() []core.Trigger { + return []core.Trigger{} +} + +func (i *Claude) Instructions() string { + return "To get new Claude API key, go to [platform.claude.com](https://platform.claude.com)." +} + +func (i *Claude) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (i *Claude) Sync(ctx core.SyncContext) error { + config := Configuration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %v", err) + } + + if config.APIKey == "" { + return fmt.Errorf("apiKey is required") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + if err := client.Verify(); err != nil { + return err + } + + ctx.Integration.Ready() + return nil +} + +func (i *Claude) HandleRequest(ctx core.HTTPRequestContext) { +} + +func (i *Claude) CompareWebhookConfig(a, b any) (bool, error) { + return true, nil +} + +func (i *Claude) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + if resourceType != "model" { + return []core.IntegrationResource{}, nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + models, err := client.ListModels() + if err != nil { + return nil, err + } + + resources := make([]core.IntegrationResource, 0, len(models)) + for _, model := range models { + if model.ID == "" { + continue + } + + resources = append(resources, core.IntegrationResource{ + Type: resourceType, + Name: model.ID, + ID: model.ID, + }) + } + + return resources, nil +} + +func (i *Claude) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { + return nil, nil +} + +func (i *Claude) CleanupWebhook(ctx core.CleanupWebhookContext) error { + return nil +} + +func (i *Claude) Actions() []core.Action { + return []core.Action{} +} + +func (i *Claude) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} diff --git a/pkg/integrations/claude/claude_test.go b/pkg/integrations/claude/claude_test.go new file mode 100644 index 0000000000..5a7f0eb51b --- /dev/null +++ b/pkg/integrations/claude/claude_test.go @@ -0,0 +1,304 @@ +package claude + +import ( + "bytes" + "fmt" + "io" + "net/http" + "testing" + "time" + + "github.com/google/uuid" + "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +// --- Mocks --- + +// mockHTTPContext implements core.HTTPContext for testing +type mockHTTPContext struct { + RoundTripFunc func(req *http.Request) *http.Response +} + +func (m *mockHTTPContext) Do(req *http.Request) (*http.Response, error) { + if m.RoundTripFunc != nil { + return m.RoundTripFunc(req), nil + } + return &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewBufferString(`{}`)), + }, nil +} + +// mockIntegrationContext implements core.IntegrationContext for testing +type mockIntegrationContext struct { + config map[string][]byte + ready bool + errMsg string +} + +func newMockIntegrationContext() *mockIntegrationContext { + return &mockIntegrationContext{ + config: make(map[string][]byte), + } +} + +func (m *mockIntegrationContext) GetConfig(name string) ([]byte, error) { + val, ok := m.config[name] + if !ok { + return nil, fmt.Errorf("config not found: %s", name) + } + return val, nil +} + +func (m *mockIntegrationContext) Ready() { + m.ready = true +} + +func (m *mockIntegrationContext) Error(message string) { + m.errMsg = message +} + +// Stubs for other interface methods +func (m *mockIntegrationContext) ID() uuid.UUID { return uuid.New() } +func (m *mockIntegrationContext) GetMetadata() any { return nil } +func (m *mockIntegrationContext) SetMetadata(any) {} +func (m *mockIntegrationContext) NewBrowserAction(core.BrowserAction) {} +func (m *mockIntegrationContext) RemoveBrowserAction() {} +func (m *mockIntegrationContext) SetSecret(string, []byte) error { return nil } +func (m *mockIntegrationContext) GetSecrets() ([]core.IntegrationSecret, error) { + return nil, nil +} +func (m *mockIntegrationContext) RequestWebhook(any) error { return nil } +func (m *mockIntegrationContext) Subscribe(any) (*uuid.UUID, error) { + u := uuid.New() + return &u, nil +} +func (m *mockIntegrationContext) ScheduleResync(time.Duration) error { return nil } +func (m *mockIntegrationContext) ScheduleActionCall(string, any, time.Duration) error { + return nil +} +func (m *mockIntegrationContext) ListSubscriptions() ([]core.IntegrationSubscriptionContext, error) { + return nil, nil +} + +// --- Tests --- + +func TestClaude_Configuration(t *testing.T) { + i := &Claude{} + configs := i.Configuration() + + // Use string here instead of configuration.FieldType to avoid type errors + expectedFields := map[string]struct { + Type string + Required bool + }{ + "apiKey": {string(configuration.FieldTypeString), true}, + } + + if len(configs) != len(expectedFields) { + t.Errorf("expected %d config fields, got %d", len(expectedFields), len(configs)) + } + + for _, field := range configs { + expected, ok := expectedFields[field.Name] + if !ok { + t.Errorf("unexpected field: %s", field.Name) + continue + } + // Cast field.Type to string for comparison + if string(field.Type) != expected.Type { + t.Errorf("field %s: expected type %s, got %s", field.Name, expected.Type, field.Type) + } + if field.Required != expected.Required { + t.Errorf("field %s: expected required %v, got %v", field.Name, expected.Required, field.Required) + } + } +} + +func TestClaude_Sync(t *testing.T) { + logger := logrus.NewEntry(logrus.New()) + + tests := []struct { + name string + config map[string]interface{} + mockResponses func(*http.Request) *http.Response + expectError bool + expectReady bool + }{ + { + name: "Success", + config: map[string]interface{}{ + "apiKey": "sk-ant-test", + }, + mockResponses: func(req *http.Request) *http.Response { + if req.URL.Path == "/v1/models" { + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(`{"data": [{"id": "claude-2"}]}`)), + } + } + return &http.Response{StatusCode: 404, Body: io.NopCloser(bytes.NewBufferString(""))} + }, + expectError: false, + expectReady: true, + }, + { + name: "Missing API Key", + config: map[string]interface{}{ + "apiKey": "", + }, + expectError: true, + }, + { + name: "Verify Fails (401)", + config: map[string]interface{}{ + "apiKey": "invalid", + }, + mockResponses: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: 401, + Body: io.NopCloser(bytes.NewBufferString(`{"error": {"type": "authentication_error", "message": "invalid api key"}}`)), + } + }, + expectError: true, + expectReady: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := &Claude{} + mockInt := newMockIntegrationContext() + mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponses} + + // Populate mock integration config (used by NewClient) + if v, ok := tt.config["apiKey"].(string); ok { + mockInt.config["apiKey"] = []byte(v) + } + + ctx := core.SyncContext{ + Logger: logger, + Configuration: tt.config, // Used by mapstructure decode + HTTP: mockHTTP, + Integration: mockInt, + } + + err := i.Sync(ctx) + + if tt.expectError && err == nil { + t.Error("expected error, got nil") + } + if !tt.expectError && err != nil { + t.Errorf("unexpected error: %v", err) + } + if tt.expectReady && !mockInt.ready { + t.Error("expected integration to be marked ready") + } + }) + } +} + +func TestClaude_ListResources(t *testing.T) { + logger := logrus.NewEntry(logrus.New()) + + tests := []struct { + name string + resourceType string + config map[string][]byte // Config is pulled from integration context + mockResponses func(*http.Request) *http.Response + expectedIDs []string + expectError bool + }{ + { + name: "List Models Success", + resourceType: "model", + config: map[string][]byte{ + "apiKey": []byte("test"), + }, + mockResponses: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(`{ + "data": [ + {"id": "claude-2.1"}, + {"id": "claude-instant-1.2"} + ] + }`)), + } + }, + expectedIDs: []string{"claude-2.1", "claude-instant-1.2"}, + }, + { + name: "Invalid Resource Type", + resourceType: "deployment", + expectedIDs: []string{}, // Returns empty list, no error + }, + { + name: "API Error", + resourceType: "model", + config: map[string][]byte{ + "apiKey": []byte("test"), + }, + mockResponses: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: 500, + Body: io.NopCloser(bytes.NewBufferString("server error")), + } + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := &Claude{} + mockInt := newMockIntegrationContext() + mockInt.config = tt.config + mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponses} + + ctx := core.ListResourcesContext{ + Logger: logger, + HTTP: mockHTTP, + Integration: mockInt, + } + + resources, err := i.ListResources(tt.resourceType, ctx) + + if tt.expectError { + if err == nil { + t.Error("expected error, got nil") + } + return + } + + if err != nil { + t.Errorf("unexpected error: %v", err) + return + } + + if len(resources) != len(tt.expectedIDs) { + t.Errorf("expected %d resources, got %d", len(tt.expectedIDs), len(resources)) + } + + for idx, id := range tt.expectedIDs { + if resources[idx].ID != id { + t.Errorf("resource %d: expected ID %s, got %s", idx, id, resources[idx].ID) + } + } + }) + } +} + +func TestClaude_CompareWebhookConfig(t *testing.T) { + i := &Claude{} + // Should always return true, nil based on implementation + ok, err := i.CompareWebhookConfig(nil, nil) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if !ok { + t.Error("expected CompareWebhookConfig to return true") + } +} diff --git a/pkg/integrations/claude/client.go b/pkg/integrations/claude/client.go new file mode 100644 index 0000000000..2375aab1cd --- /dev/null +++ b/pkg/integrations/claude/client.go @@ -0,0 +1,167 @@ +package claude + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + defaultBaseURL = "https://api.anthropic.com/v1" + anthropicVersionValue = "2023-06-01" +) + +type Client struct { + APIKey string + BaseURL string + http core.HTTPContext +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type CreateMessageRequest struct { + Model string `json:"model"` + Messages []Message `json:"messages"` + System string `json:"system,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` +} + +type MessageContent struct { + Type string `json:"type"` + Text string `json:"text,omitempty"` +} + +type MessageUsage struct { + InputTokens int `json:"input_tokens"` + OutputTokens int `json:"output_tokens"` +} + +type CreateMessageResponse struct { + ID string `json:"id"` + Type string `json:"type"` + Role string `json:"role"` + Content []MessageContent `json:"content"` + Model string `json:"model"` + StopReason string `json:"stop_reason"` + StopSequence string `json:"stop_sequence,omitempty"` + Usage MessageUsage `json:"usage"` +} + +type ModelsResponse struct { + Data []Model `json:"data"` +} + +type Model struct { + ID string `json:"id"` +} + +type claudeErrorResponse struct { + Error struct { + Type string `json:"type"` + Message string `json:"message"` + } `json:"error"` +} + +func NewClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*Client, error) { + if ctx == nil { + return nil, fmt.Errorf("no integration context") + } + + apiKey, err := ctx.GetConfig("apiKey") + if err != nil { + return nil, err + } + + return &Client{ + APIKey: string(apiKey), + BaseURL: defaultBaseURL, + http: httpClient, + }, nil +} + +func (c *Client) Verify() error { + _, err := c.execRequest(http.MethodGet, c.BaseURL+"/models", nil) + return err +} + +func (c *Client) ListModels() ([]Model, error) { + responseBody, err := c.execRequest(http.MethodGet, c.BaseURL+"/models", nil) + if err != nil { + return nil, err + } + + var response ModelsResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal models response: %v", err) + } + + return response.Data, nil +} + +func (c *Client) CreateMessage(req CreateMessageRequest) (*CreateMessageResponse, error) { + reqBody, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %v", err) + } + + responseBody, err := c.execRequest(http.MethodPost, c.BaseURL+"/messages", bytes.NewBuffer(reqBody)) + if err != nil { + return nil, err + } + + var response CreateMessageResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal message response: %v", err) + } + + return &response, nil +} + +func (c *Client) execRequest(method, URL string, body io.Reader) ([]byte, error) { + req, err := http.NewRequest(method, URL, body) + if err != nil { + return nil, fmt.Errorf("failed to build request: %v", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("x-api-key", c.APIKey) + req.Header.Set("anthropic-version", anthropicVersionValue) + + res, err := c.http.Do(req) + if err != nil { + return nil, fmt.Errorf("request failed: %v", err) + } + defer res.Body.Close() + + responseBody, err := io.ReadAll(res.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %v", err) + } + + if res.StatusCode < http.StatusOK || res.StatusCode >= http.StatusMultipleChoices { + var apiErr claudeErrorResponse + var errorMessage string + + // Try to parse the official Anthropic error message + if err := json.Unmarshal(responseBody, &apiErr); err == nil && apiErr.Error.Message != "" { + errorMessage = apiErr.Error.Message + } else { + errorMessage = string(responseBody) + } + + // Handle 401 specifically + if res.StatusCode == http.StatusUnauthorized { + return nil, fmt.Errorf("Claude credentials are invalid or expired: %s", errorMessage) + } + + return nil, fmt.Errorf("request failed (%d): %s", res.StatusCode, errorMessage) + } + return responseBody, nil +} diff --git a/pkg/integrations/claude/client_test.go b/pkg/integrations/claude/client_test.go new file mode 100644 index 0000000000..4c4ffbd3b9 --- /dev/null +++ b/pkg/integrations/claude/client_test.go @@ -0,0 +1,355 @@ +package claude + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + "time" + + "github.com/google/uuid" + "github.com/superplanehq/superplane/pkg/core" +) + +// --- Mocks --- + +// mockHTTPContextForClient implements core.HTTPContext +type mockHTTPContextForClient struct { + // RoundTripFunc allows us to define the response for a specific request + RoundTripFunc func(req *http.Request) *http.Response +} + +func (m *mockHTTPContextForClient) Do(req *http.Request) (*http.Response, error) { + if m.RoundTripFunc != nil { + return m.RoundTripFunc(req), nil + } + // Default fallback + return &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewBufferString(`{}`)), + }, nil +} + +// mockIntegrationContextForClient implements core.IntegrationContext +type mockIntegrationContextForClient struct { + config map[string][]byte +} + +func newMockIntegrationContextForClient() *mockIntegrationContextForClient { + return &mockIntegrationContextForClient{ + config: make(map[string][]byte), + } +} + +func (m *mockIntegrationContextForClient) GetConfig(name string) ([]byte, error) { + val, ok := m.config[name] + if !ok { + return nil, fmt.Errorf("config not found: %s", name) + } + return val, nil +} + +// Stubs to satisfy the core.IntegrationContext interface +func (m *mockIntegrationContextForClient) ID() uuid.UUID { return uuid.New() } +func (m *mockIntegrationContextForClient) GetMetadata() any { return nil } +func (m *mockIntegrationContextForClient) SetMetadata(any) {} +func (m *mockIntegrationContextForClient) Ready() {} +func (m *mockIntegrationContextForClient) Error(message string) {} +func (m *mockIntegrationContextForClient) NewBrowserAction(core.BrowserAction) {} +func (m *mockIntegrationContextForClient) RemoveBrowserAction() {} +func (m *mockIntegrationContextForClient) SetSecret(string, []byte) error { return nil } +func (m *mockIntegrationContextForClient) GetSecrets() ([]core.IntegrationSecret, error) { + return nil, nil +} +func (m *mockIntegrationContextForClient) RequestWebhook(any) error { return nil } +func (m *mockIntegrationContextForClient) Subscribe(any) (*uuid.UUID, error) { + return nil, nil +} +func (m *mockIntegrationContextForClient) ScheduleResync(time.Duration) error { return nil } +func (m *mockIntegrationContextForClient) ScheduleActionCall(string, any, time.Duration) error { + return nil +} +func (m *mockIntegrationContextForClient) ListSubscriptions() ([]core.IntegrationSubscriptionContext, error) { + return nil, nil +} + +// --- Tests --- + +func TestNewClient(t *testing.T) { + mockHTTP := &mockHTTPContextForClient{} + + tests := []struct { + name string + setupMock func(*mockIntegrationContextForClient) + ctx core.IntegrationContext + expectError bool + }{ + { + name: "Success", + setupMock: func(m *mockIntegrationContextForClient) { + m.config["apiKey"] = []byte("sk-123") + }, + expectError: false, + }, + { + name: "Nil Context", + ctx: nil, + expectError: true, + }, + { + name: "Missing API Key", + setupMock: func(m *mockIntegrationContextForClient) { + // No API Key + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var integrationCtx core.IntegrationContext + + if tt.ctx != nil { + // Use explicitly provided context (e.g. nil) + integrationCtx = tt.ctx + } else { + // Use the mock + mockInt := newMockIntegrationContextForClient() + if tt.setupMock != nil { + tt.setupMock(mockInt) + } + integrationCtx = mockInt + } + + client, err := NewClient(mockHTTP, integrationCtx) + + if tt.expectError { + if err == nil { + t.Error("expected error, got nil") + } + } else { + if err != nil { + t.Errorf("unexpected error: %v", err) + } + if client == nil { + t.Error("expected client, got nil") + } + if client.APIKey != "sk-123" { + t.Errorf("expected API Key 'sk-123', got %s", client.APIKey) + } + } + }) + } +} + +func TestClient_Verify(t *testing.T) { + tests := []struct { + name string + responseStatus int + expectError bool + }{ + {name: "Success 200", responseStatus: 200, expectError: false}, + {name: "Unauthorized 401", responseStatus: 401, expectError: true}, + {name: "Server Error 500", responseStatus: 500, expectError: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockHTTP := &mockHTTPContextForClient{ + RoundTripFunc: func(req *http.Request) *http.Response { + if req.Method != http.MethodGet { + t.Errorf("expected method GET, got %s", req.Method) + } + if req.URL.String() != "https://api.anthropic.com/v1/models" { + t.Errorf("expected URL .../models, got %s", req.URL.String()) + } + return &http.Response{ + StatusCode: tt.responseStatus, + Body: io.NopCloser(bytes.NewBufferString(`{}`)), + } + }, + } + + client := &Client{ + APIKey: "test-key", + BaseURL: defaultBaseURL, + http: mockHTTP, + } + + err := client.Verify() + if tt.expectError && err == nil { + t.Error("expected error, got nil") + } + if !tt.expectError && err != nil { + t.Errorf("unexpected error: %v", err) + } + }) + } +} + +func TestClient_ListModels(t *testing.T) { + mockHTTP := &mockHTTPContextForClient{ + RoundTripFunc: func(req *http.Request) *http.Response { + jsonResp := `{ + "data": [ + {"id": "claude-3-opus"}, + {"id": "claude-3-sonnet"} + ] + }` + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(jsonResp)), + } + }, + } + + client := &Client{http: mockHTTP, BaseURL: defaultBaseURL} + + models, err := client.ListModels() + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if len(models) != 2 { + t.Errorf("expected 2 models, got %d", len(models)) + } + if models[0].ID != "claude-3-opus" { + t.Errorf("expected first model to be claude-3-opus, got %s", models[0].ID) + } +} + +func TestClient_CreateMessage(t *testing.T) { + mockHTTP := &mockHTTPContextForClient{ + RoundTripFunc: func(req *http.Request) *http.Response { + // Verify Headers + if req.Header.Get("x-api-key") != "my-secret-key" { + t.Errorf("missing or wrong x-api-key header") + } + if req.Header.Get("anthropic-version") != "2023-06-01" { + t.Errorf("missing or wrong anthropic-version header") + } + if req.Header.Get("Content-Type") != "application/json" { + t.Errorf("missing or wrong Content-Type") + } + + // Verify Body + bodyBytes, _ := io.ReadAll(req.Body) + var sentReq CreateMessageRequest + if err := json.Unmarshal(bodyBytes, &sentReq); err != nil { + t.Errorf("failed to unmarshal sent body: %v", err) + } + if sentReq.Model != "claude-3-opus" { + t.Errorf("sent wrong model: %s", sentReq.Model) + } + + // Return Success + jsonResp := `{ + "id": "msg_123", + "type": "message", + "role": "assistant", + "content": [ + {"type": "text", "text": "Hello there"} + ], + "model": "claude-3-opus", + "stop_reason": "end_turn", + "usage": {"input_tokens": 10, "output_tokens": 5} + }` + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(jsonResp)), + } + }, + } + + client := &Client{ + APIKey: "my-secret-key", + BaseURL: defaultBaseURL, + http: mockHTTP, + } + + req := CreateMessageRequest{ + Model: "claude-3-opus", + Messages: []Message{ + {Role: "user", Content: "Hi"}, + }, + MaxTokens: 1024, + } + + resp, err := client.CreateMessage(req) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if resp.ID != "msg_123" { + t.Errorf("expected ID msg_123, got %s", resp.ID) + } + if len(resp.Content) == 0 || resp.Content[0].Text != "Hello there" { + t.Error("response content mismatch") + } +} + +func TestClient_ErrorHandling(t *testing.T) { + tests := []struct { + name string + responseBody string + statusCode int + expectedErrorMsg string + }{ + { + name: "Structured Anthropic Error", + responseBody: `{ + "error": { + "type": "invalid_request_error", + "message": "max_tokens is too large" + } + }`, + statusCode: 400, + expectedErrorMsg: "request failed (400): max_tokens is too large", + }, + { + name: "Unstructured Plain Text Error", + responseBody: `Bad Gateway`, + statusCode: 502, + expectedErrorMsg: "request failed (502): Bad Gateway", + }, + { + name: "Auth Error (401)", + responseBody: `{ + "error": { + "type": "authentication_error", + "message": "invalid x-api-key" + } + }`, + statusCode: 401, + expectedErrorMsg: "Claude credentials are invalid or expired: invalid x-api-key", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockHTTP := &mockHTTPContextForClient{ + RoundTripFunc: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: tt.statusCode, + Body: io.NopCloser(bytes.NewBufferString(tt.responseBody)), + } + }, + } + + client := &Client{http: mockHTTP, BaseURL: defaultBaseURL} + + // We use ListModels as a simple way to trigger execRequest + _, err := client.ListModels() + + if err == nil { + t.Fatal("expected error, got nil") + } + + if err.Error() != tt.expectedErrorMsg { + t.Errorf("expected error message '%s', got '%s'", tt.expectedErrorMsg, err.Error()) + } + }) + } +} diff --git a/pkg/integrations/claude/create_message.go b/pkg/integrations/claude/create_message.go new file mode 100644 index 0000000000..fa2f74004e --- /dev/null +++ b/pkg/integrations/claude/create_message.go @@ -0,0 +1,262 @@ +package claude + +import ( + "fmt" + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "net/http" + "strings" +) + +const MessagePayloadType = "claude.message" + +type CreateMessage struct{} + +type CreateMessageSpec struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + SystemMessage string `json:"systemMessage"` + MaxTokens int `json:"maxTokens"` + Temperature *float64 `json:"temperature"` +} + +type MessagePayload struct { + ID string `json:"id"` + Model string `json:"model"` + Text string `json:"text"` + Usage *MessageUsage `json:"usage,omitempty"` + StopReason string `json:"stopReason,omitempty"` + Response *CreateMessageResponse `json:"response"` +} + +func (c *CreateMessage) Name() string { + return "claude.createMessage" +} + +func (c *CreateMessage) Label() string { + return "Create Message" +} + +func (c *CreateMessage) Description() string { + return "Generate a response using Anthropic's Claude models via the Messages API" +} + +func (c *CreateMessage) Documentation() string { + return `The Create Message component uses Anthropic's Claude models to generate text responses. + +## Use Cases + +- **Summarization**: Generate summaries of incidents or deployments. +- **Code Analysis**: specific code review or PR comments. +- **Content Generation**: Create documentation or drafting communications. + +## Configuration + +- **Model**: The Claude model to use (e.g., claude-3-5-sonnet-latest). +- **Prompt**: The main user message/instruction. +- **System Message**: (Optional) Context to define the assistant's behavior or persona. +- **Max Tokens**: (Optional) Limit the length of the generated response. +- **Temperature**: (Optional) Control randomness (0.0 to 1.0). + +## Output + +Returns a payload containing: +- **text**: The content generated by Claude. +- **usage**: Input and output token counts. +- **stopReason**: Why the generation ended (e.g., "end_turn", "max_tokens"). +- **model**: The specific model version used. + +## Notes + +- Requires a valid Claude API key configured in integration +- Response quality and speed depend on the selected model +- Token usage is tracked and may incur costs based on your Claude plan +` +} + +func (c *CreateMessage) Icon() string { + return "message-square" +} + +func (c *CreateMessage) Color() string { + return "orange" +} + +func (c *CreateMessage) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateMessage) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "model", + Label: "Model", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + Default: "claude-opus-4-6", + Placeholder: "Select a Claude model", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "model", + }, + }, + }, + { + Name: "prompt", + Label: "Prompt", + Type: configuration.FieldTypeText, + Required: true, + Placeholder: "Enter the user prompt", + Description: "The main instruction or question for Claude", + }, + { + Name: "systemMessage", + Label: "System Message", + Type: configuration.FieldTypeText, + Required: false, + Placeholder: "e.g. You are a concise DevOps assistant", + Description: "Optional context to set behavior or persona", + }, + { + Name: "maxTokens", + Label: "Max Tokens", + Type: configuration.FieldTypeNumber, + Required: false, + Default: "4096", + Description: "Maximum number of tokens to generate e.g. Defaults to 4096.", + }, + { + Name: "temperature", + Label: "Temperature", + Type: configuration.FieldTypeNumber, + Required: false, + Default: "1.0", + Description: "Amount of randomness injected into the response (0.0 to 1.0)", + }, + } +} + +func (c *CreateMessage) Setup(ctx core.SetupContext) error { + spec := CreateMessageSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %v", err) + } + + if spec.Model == "" { + return fmt.Errorf("model is required") + } + + if spec.Prompt == "" { + return fmt.Errorf("prompt is required") + } + + return nil +} + +func (c *CreateMessage) Execute(ctx core.ExecutionContext) error { + spec := CreateMessageSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %v", err) + } + + if spec.Model == "" { + return fmt.Errorf("model is required") + } + if spec.Prompt == "" { + return fmt.Errorf("prompt is required") + } + + if spec.MaxTokens == 0 { + spec.MaxTokens = 4096 + } + + if spec.MaxTokens < 1 { + return fmt.Errorf("maxTokens must be at least 1") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + req := CreateMessageRequest{ + Model: spec.Model, + MaxTokens: spec.MaxTokens, + Messages: []Message{ + { + Role: "user", + Content: spec.Prompt, + }, + }, + Temperature: spec.Temperature, + } + + if spec.SystemMessage != "" { + req.System = spec.SystemMessage + } + + response, err := client.CreateMessage(req) + if err != nil { + return err + } + + text := extractMessageText(response) + + payload := MessagePayload{ + ID: response.ID, + Model: response.Model, + Text: text, + Usage: &response.Usage, + StopReason: response.StopReason, + Response: response, + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + MessagePayloadType, + []any{payload}, + ) +} + +func (c *CreateMessage) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateMessage) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateMessage) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateMessage) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *CreateMessage) Cleanup(ctx core.SetupContext) error { + return nil +} + +func extractMessageText(response *CreateMessageResponse) string { + if response == nil || len(response.Content) == 0 { + return "" + } + + var builder strings.Builder + for _, block := range response.Content { + if block.Type == "text" { + if builder.Len() > 0 { + builder.WriteString("\n") + } + builder.WriteString(block.Text) + } + } + return builder.String() +} diff --git a/pkg/integrations/claude/create_message_test.go b/pkg/integrations/claude/create_message_test.go new file mode 100644 index 0000000000..5c78764850 --- /dev/null +++ b/pkg/integrations/claude/create_message_test.go @@ -0,0 +1,321 @@ +package claude + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "testing" + + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +// --- Mocks --- + +// mockExecutionState implements core.ExecutionStateContext +type mockExecutionState struct { + EmittedChannel string + EmittedType string + EmittedPayloads []any + Finished bool + Failed bool + FailReason, FailMsg string +} + +func (m *mockExecutionState) IsFinished() bool { return m.Finished } +func (m *mockExecutionState) SetKV(key, value string) error { return nil } + +func (m *mockExecutionState) Emit(channel, payloadType string, payloads []any) error { + m.EmittedChannel = channel + m.EmittedType = payloadType + m.EmittedPayloads = payloads + return nil +} + +func (m *mockExecutionState) Pass() error { + m.Finished = true + return nil +} + +func (m *mockExecutionState) Fail(reason, message string) error { + m.Finished = true + m.Failed = true + m.FailReason = reason + m.FailMsg = message + return nil +} + +// --- Tests --- + +func TestCreateMessage_Configuration(t *testing.T) { + c := &CreateMessage{} + config := c.Configuration() + + expectedFields := map[string]struct { + Required bool + Type string + }{ + "model": {true, string(configuration.FieldTypeIntegrationResource)}, + "prompt": {true, string(configuration.FieldTypeText)}, + "systemMessage": {false, string(configuration.FieldTypeText)}, + "maxTokens": {false, string(configuration.FieldTypeNumber)}, + "temperature": {false, string(configuration.FieldTypeNumber)}, + } + + for _, field := range config { + expected, ok := expectedFields[field.Name] + if !ok { + t.Errorf("unexpected field: %s", field.Name) + continue + } + if field.Required != expected.Required { + t.Errorf("field %s: expected required %v, got %v", field.Name, expected.Required, field.Required) + } + if string(field.Type) != expected.Type { + t.Errorf("field %s: expected type %s, got %s", field.Name, expected.Type, field.Type) + } + } +} + +func TestCreateMessage_Setup(t *testing.T) { + c := &CreateMessage{} + + tests := []struct { + name string + config map[string]interface{} + expectError bool + }{ + { + name: "Valid Config", + config: map[string]interface{}{ + "model": "claude-3-opus", + "prompt": "Hello", + }, + expectError: false, + }, + { + name: "Missing Model", + config: map[string]interface{}{ + "prompt": "Hello", + }, + expectError: true, + }, + { + name: "Missing Prompt", + config: map[string]interface{}{ + "model": "claude-3-opus", + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := core.SetupContext{ + Configuration: tt.config, + } + err := c.Setup(ctx) + if tt.expectError && err == nil { + t.Error("expected error, got nil") + } + if !tt.expectError && err != nil { + t.Errorf("unexpected error: %v", err) + } + }) + } +} + +func TestCreateMessage_Execute(t *testing.T) { + c := &CreateMessage{} + + // Helper to create a valid response JSON + validResponseJSON := `{ + "id": "msg_01", + "type": "message", + "role": "assistant", + "model": "claude-3-test", + "content": [ + {"type": "text", "text": "Hello world"} + ], + "stop_reason": "end_turn", + "usage": {"input_tokens": 10, "output_tokens": 5} + }` + + tests := []struct { + name string + config map[string]interface{} + mockResponse func(*http.Request) *http.Response + expectError bool + expectEmission bool + validatePayload func(*testing.T, MessagePayload) + }{ + { + name: "Success", + config: map[string]interface{}{ + "model": "claude-3-test", + "prompt": "Say hello", + "maxTokens": 500, + "systemMessage": "You are a bot", + "temperature": 0.7, + }, + mockResponse: func(req *http.Request) *http.Response { + // Verify request body + body, _ := io.ReadAll(req.Body) + var sent CreateMessageRequest + json.Unmarshal(body, &sent) + + if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { + return &http.Response{StatusCode: 400, Body: io.NopCloser(bytes.NewBufferString("bad request body"))} + } + + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(validResponseJSON)), + } + }, + expectError: false, + expectEmission: true, + validatePayload: func(t *testing.T, p MessagePayload) { + if p.Text != "Hello world" { + t.Errorf("expected text 'Hello world', got '%s'", p.Text) + } + if p.ID != "msg_01" { + t.Errorf("expected ID 'msg_01', got '%s'", p.ID) + } + if p.Usage.InputTokens != 10 { + t.Errorf("expected usage 10, got %d", p.Usage.InputTokens) + } + }, + }, + { + name: "Missing Configuration in Execute", + config: map[string]interface{}{ + "model": "", // Invalid + }, + expectError: true, + }, + { + name: "API Error", + config: map[string]interface{}{ + "model": "claude-3-test", + "prompt": "fail me", + }, + mockResponse: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: 500, + Body: io.NopCloser(bytes.NewBufferString(`{"error": {"message": "internal error"}}`)), + } + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Setup Mocks + mockState := &mockExecutionState{} + mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponse} + mockInt := &mockIntegrationContext{ + config: map[string][]byte{ + "apiKey": []byte("test-key"), + }, + } + + ctx := core.ExecutionContext{ + Configuration: tt.config, + ExecutionState: mockState, + HTTP: mockHTTP, + Integration: mockInt, + } + + err := c.Execute(ctx) + + if tt.expectError { + if err == nil { + t.Error("expected error, got nil") + } + return + } + + if err != nil { + t.Errorf("unexpected error: %v", err) + return + } + + if tt.expectEmission { + if mockState.EmittedType != MessagePayloadType { + t.Errorf("expected emitted type %s, got %s", MessagePayloadType, mockState.EmittedType) + } + if len(mockState.EmittedPayloads) != 1 { + t.Errorf("expected 1 payload, got %d", len(mockState.EmittedPayloads)) + } else if tt.validatePayload != nil { + // Convert payload back to struct for validation + // In real execution this is passed as any, here we cast it + payload, ok := mockState.EmittedPayloads[0].(MessagePayload) + if !ok { + t.Error("emitted payload is not MessagePayload") + } else { + tt.validatePayload(t, payload) + } + } + } + }) + } +} + +func TestExtractMessageText(t *testing.T) { + // This function is unexported, but accessible within the package_test if package is same + // If the test file is package claude_test, we can't access it. + // Assuming package claude per user instruction. + + tests := []struct { + name string + response *CreateMessageResponse + expected string + }{ + { + name: "Nil Response", + response: nil, + expected: "", + }, + { + name: "Single Text Block", + response: &CreateMessageResponse{ + Content: []MessageContent{ + {Type: "text", Text: "Hello"}, + }, + }, + expected: "Hello", + }, + { + name: "Multiple Text Blocks", + response: &CreateMessageResponse{ + Content: []MessageContent{ + {Type: "text", Text: "Hello"}, + {Type: "text", Text: "World"}, + }, + }, + expected: "Hello\nWorld", + }, + { + name: "Mixed Blocks (ignore non-text if any)", + response: &CreateMessageResponse{ + Content: []MessageContent{ + {Type: "image", Text: ""}, // hypothetical non-text + {Type: "text", Text: "Real Text"}, + }, + }, + expected: "Real Text", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := extractMessageText(tt.response) + if got != tt.expected { + t.Errorf("expected '%s', got '%s'", tt.expected, got) + } + }) + } +} diff --git a/pkg/integrations/claude/example.go b/pkg/integrations/claude/example.go new file mode 100644 index 0000000000..f3b8fafdcc --- /dev/null +++ b/pkg/integrations/claude/example.go @@ -0,0 +1,17 @@ +package claude + +import ( + _ "embed" + "github.com/superplanehq/superplane/pkg/utils" + "sync" +) + +//go:embed example_output_create_message.json +var exampleOutputCreateMessageBytes []byte + +var exampleOutputCreateMessageOnce sync.Once +var exampleOutputCreateMessage map[string]any + +func (c *CreateMessage) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateMessageOnce, exampleOutputCreateMessageBytes, &exampleOutputCreateMessage) +} diff --git a/pkg/integrations/claude/example_output_create_message.json b/pkg/integrations/claude/example_output_create_message.json new file mode 100644 index 0000000000..2fd3c0e729 --- /dev/null +++ b/pkg/integrations/claude/example_output_create_message.json @@ -0,0 +1,31 @@ +{ + "type": "claude.message", + "data": { + "id": "msg_01X9JGt5...123456", + "model": "claude-3-5-sonnet-latest", + "text": "Here is the summary of the deployment logs you requested...", + "stopReason": "end_turn", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + }, + "response": { + "id": "msg_01X9JGt5...123456", + "type": "message", + "role": "assistant", + "model": "claude-3-5-sonnet-latest", + "content": [ + { + "type": "text", + "text": "Here is the summary of the deployment logs you requested..." + } + ], + "stop_reason": "end_turn", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + } + } + }, + "timestamp": "2026-02-06T12:00:00Z" +} \ No newline at end of file diff --git a/pkg/server/server.go b/pkg/server/server.go index 01cb22cd3b..f8ae5cae63 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -33,6 +33,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/components/timegate" _ "github.com/superplanehq/superplane/pkg/components/wait" _ "github.com/superplanehq/superplane/pkg/integrations/aws" + _ "github.com/superplanehq/superplane/pkg/integrations/claude" _ "github.com/superplanehq/superplane/pkg/integrations/cloudflare" _ "github.com/superplanehq/superplane/pkg/integrations/dash0" _ "github.com/superplanehq/superplane/pkg/integrations/datadog" diff --git a/web_src/src/assets/icons/integrations/claude.svg b/web_src/src/assets/icons/integrations/claude.svg new file mode 100644 index 0000000000..8c8b959770 --- /dev/null +++ b/web_src/src/assets/icons/integrations/claude.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/claude/base.ts b/web_src/src/pages/workflowv2/mappers/claude/base.ts new file mode 100644 index 0000000000..3079629fb5 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/claude/base.ts @@ -0,0 +1,70 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import claudeIcon from "@/assets/icons/integrations/claude.svg"; +import { formatTimeAgo } from "@/utils/date"; + +export const baseMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "claude"; + + return { + iconSrc: claudeIcon, + iconSlug: context.componentDefinition?.icon ?? "loader", + collapsedBackground: "bg-white", + collapsed: context.node.isCollapsed, + title: context.node.name || context.componentDefinition?.label || context.componentDefinition?.name || "Claude", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const payload = outputs?.default?.[0]; + + if (payload?.type) { + details["Event Type"] = payload.type; + } + + if (payload?.timestamp) { + details["Emitted At"] = new Date(payload.timestamp).toLocaleString(); + } + + return details; + }, + + subtitle(context: SubtitleContext): string { + const timestamp = context.execution.updatedAt || context.execution.createdAt; + return timestamp ? formatTimeAgo(new Date(timestamp)) : ""; + }, +}; + +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + const subtitleTimestamp = execution.updatedAt || execution.createdAt; + const eventSubtitle = subtitleTimestamp ? formatTimeAgo(new Date(subtitleTimestamp)) : ""; + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/claude/index.ts b/web_src/src/pages/workflowv2/mappers/claude/index.ts new file mode 100644 index 0000000000..5fa26c1f52 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/claude/index.ts @@ -0,0 +1,13 @@ +import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; +import { baseMapper } from "./base"; +import { buildActionStateRegistry } from "../utils"; + +export const componentMappers: Record = { + createMessage: baseMapper, +}; + +export const triggerRenderers: Record = {}; + +export const eventStateRegistry: Record = { + createMessage: buildActionStateRegistry("completed"), +}; diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index aa1d6aaa31..645e2cda38 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -86,6 +86,12 @@ import { triggerRenderers as openaiTriggerRenderers, eventStateRegistry as openaiEventStateRegistry, } from "./openai/index"; +import { + componentMappers as claudeComponentMappers, + triggerRenderers as claudeTriggerRenderers, + eventStateRegistry as claudeEventStateRegistry, +} from "./claude/index"; + import { filterMapper, FILTER_STATE_REGISTRY } from "./filter"; import { sshMapper, SSH_STATE_REGISTRY } from "./ssh"; import { waitCustomFieldRenderer, waitMapper, WAIT_STATE_REGISTRY } from "./wait"; @@ -132,6 +138,7 @@ const appMappers: Record> = { aws: awsComponentMappers, discord: discordComponentMappers, openai: openaiComponentMappers, + claude: claudeComponentMappers, }; const appTriggerRenderers: Record> = { @@ -149,6 +156,7 @@ const appTriggerRenderers: Record> = { aws: awsTriggerRenderers, discord: discordTriggerRenderers, openai: openaiTriggerRenderers, + claude: claudeTriggerRenderers, }; const appEventStateRegistries: Record> = { @@ -165,6 +173,7 @@ const appEventStateRegistries: Record discord: discordEventStateRegistry, rootly: rootlyEventStateRegistry, openai: openaiEventStateRegistry, + claude: claudeEventStateRegistry, aws: awsEventStateRegistry, }; diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index 8882603ad5..f060fefb42 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -21,6 +21,7 @@ import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; +import claudeIcon from "@/assets/icons/integrations/claude.svg"; import pagerDutyIcon from "@/assets/icons/integrations/pagerduty.svg"; import slackIcon from "@/assets/icons/integrations/slack.svg"; import awsIcon from "@/assets/icons/integrations/aws.svg"; @@ -398,6 +399,7 @@ function CategorySection({ jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, + claude: claudeIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, @@ -467,6 +469,7 @@ function CategorySection({ github: githubIcon, openai: openAiIcon, "open-ai": openAiIcon, + claude: claudeIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index f8d15fff49..c46a490c90 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -10,6 +10,7 @@ import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; +import claudeIcon from "@/assets/icons/integrations/claude.svg"; import pagerDutyIcon from "@/assets/icons/integrations/pagerduty.svg"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import slackIcon from "@/assets/icons/integrations/slack.svg"; @@ -29,6 +30,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, + claude: claudeIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, @@ -48,6 +50,7 @@ export const APP_LOGO_MAP: Record> = { jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, + claude: claudeIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, diff --git a/web_src/src/utils/integrationDisplayName.ts b/web_src/src/utils/integrationDisplayName.ts index 6d1dd4b8ed..d54f92ea5f 100644 --- a/web_src/src/utils/integrationDisplayName.ts +++ b/web_src/src/utils/integrationDisplayName.ts @@ -5,6 +5,7 @@ const INTEGRATION_TYPE_DISPLAY_NAMES: Record = { github: "GitHub", openai: "OpenAI", + claude: "Claude", pagerduty: "PagerDuty", slack: "Slack", discord: "Discord", From b967b6e67b6be678814f2bdcf3b8085951aeb012 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Sat, 7 Feb 2026 00:38:44 +0500 Subject: [PATCH 003/160] grafana integration base code Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Grafana.mdx | 121 +++++++++ pkg/integrations/grafana/client.go | 115 ++++++++ pkg/integrations/grafana/grafana.go | 138 ++++++++++ pkg/integrations/grafana/on_alert_firing.go | 130 +++++++++ pkg/integrations/grafana/query_data_source.go | 246 ++++++++++++++++++ .../src/assets/icons/integrations/grafana.svg | 2 + .../pages/workflowv2/mappers/grafana/index.ts | 16 ++ .../mappers/grafana/on_alert_firing.ts | 114 ++++++++ .../mappers/grafana/query_data_source.ts | 101 +++++++ .../pages/workflowv2/mappers/grafana/types.ts | 26 ++ web_src/src/pages/workflowv2/mappers/index.ts | 9 +- 11 files changed, 1017 insertions(+), 1 deletion(-) create mode 100644 docs/components/Grafana.mdx create mode 100644 pkg/integrations/grafana/client.go create mode 100644 pkg/integrations/grafana/grafana.go create mode 100644 pkg/integrations/grafana/on_alert_firing.go create mode 100644 pkg/integrations/grafana/query_data_source.go create mode 100644 web_src/src/assets/icons/integrations/grafana.svg create mode 100644 web_src/src/pages/workflowv2/mappers/grafana/index.ts create mode 100644 web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts create mode 100644 web_src/src/pages/workflowv2/mappers/grafana/query_data_source.ts create mode 100644 web_src/src/pages/workflowv2/mappers/grafana/types.ts diff --git a/docs/components/Grafana.mdx b/docs/components/Grafana.mdx new file mode 100644 index 0000000000..954de7e360 --- /dev/null +++ b/docs/components/Grafana.mdx @@ -0,0 +1,121 @@ +--- +title: "Grafana" +sidebar: + order: 9 +--- + +Connect Grafana alerts and data queries to SuperPlane workflows + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Triggers + + + + + +## Actions + + + + + +## Instructions + +To connect Grafana to SuperPlane: + +1. Create a Service Account token or API key in Grafana. +2. Set the Base URL to your Grafana instance (for example, `https://grafana.example.com`). +3. Paste the API token into the Grafana integration configuration and save. + +For the alert trigger: + +1. Create a contact point with type **Webhook** in Grafana (Alerting > Contact points). +2. Use the webhook URL generated by SuperPlane for the trigger. +3. Route your alert rule to that contact point. + + + +## On Alert Firing + +The On Alert Firing trigger starts a workflow execution when Grafana Unified Alerting sends a firing alert webhook. + +### Use Cases + +- **Incident automation**: Start incident workflows when an alert fires +- **Notification workflows**: Post to chat or paging systems +- **Enrichment**: Fetch related dashboards or metrics + +### Configuration + +No additional configuration is required beyond the integration setup. + +### Event Data + +Emits the full Grafana webhook payload, including status, alerts array, and labels. + +### Example Data + +```json +{ + "data": { + "status": "firing", + "title": "High error rate", + "ruleUid": "alert_rule_uid", + "alerts": [ + { + "status": "firing", + "labels": { + "alertname": "HighErrorRate", + "service": "api" + }, + "annotations": { + "summary": "Error rate above threshold" + } + } + ] + }, + "timestamp": "2026-02-06T12:00:00Z", + "type": "grafana.alert.firing" +} +``` + + + +## Query Data Source + +The Query Data Source component executes a Grafana data source query and returns the response. + +### Use Cases + +- **Metrics investigation**: Run PromQL or other data source queries +- **Alert validation**: Check current conditions before escalating +- **Incident context**: Attach metric data to notifications + +### Configuration + +- **Data Source UID**: The Grafana datasource UID +- **Query**: The datasource query (PromQL, InfluxQL, etc.) +- **Time From**: Optional start time (for example, `now-5m`) +- **Time To**: Optional end time (for example, `now`) +- **Format**: Optional format passed to the datasource query + +### Output + +Emits the Grafana Query API response JSON. + +### Example Output + +```json +{ + "data": { + "results": { + "A": { + "frames": [] + } + } + }, + "timestamp": "2026-02-06T12:00:00Z", + "type": "grafana.query.result" +} +``` diff --git a/pkg/integrations/grafana/client.go b/pkg/integrations/grafana/client.go new file mode 100644 index 0000000000..47d6190a87 --- /dev/null +++ b/pkg/integrations/grafana/client.go @@ -0,0 +1,115 @@ +package grafana + +import ( + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + maxResponseSize = 2 * 1024 * 1024 // 2MB +) + +type Client struct { + BaseURL string + APIToken string + http core.HTTPContext +} + +func NewClient(httpCtx core.HTTPContext, ctx core.IntegrationContext, requireToken bool) (*Client, error) { + baseURL, err := readBaseURL(ctx) + if err != nil { + return nil, err + } + + apiToken, err := readAPIToken(ctx) + if err != nil { + return nil, err + } + + if requireToken && apiToken == "" { + return nil, fmt.Errorf("apiToken is required") + } + + return &Client{ + BaseURL: baseURL, + APIToken: apiToken, + http: httpCtx, + }, nil +} + +func readBaseURL(ctx core.IntegrationContext) (string, error) { + baseURLConfig, err := ctx.GetConfig("baseURL") + if err != nil { + return "", fmt.Errorf("error reading baseURL: %v", err) + } + + if baseURLConfig == nil { + return "", fmt.Errorf("baseURL is required") + } + + baseURL := strings.TrimSpace(string(baseURLConfig)) + if baseURL == "" { + return "", fmt.Errorf("baseURL is required") + } + + if _, err := url.Parse(baseURL); err != nil { + return "", fmt.Errorf("invalid baseURL: %v", err) + } + + return strings.TrimSuffix(baseURL, "/"), nil +} + +func readAPIToken(ctx core.IntegrationContext) (string, error) { + apiTokenConfig, err := ctx.GetConfig("apiToken") + if err != nil { + return "", fmt.Errorf("error reading apiToken: %v", err) + } + + if apiTokenConfig == nil { + return "", nil + } + + return strings.TrimSpace(string(apiTokenConfig)), nil +} + +func (c *Client) buildURL(path string) string { + return fmt.Sprintf("%s/%s", strings.TrimSuffix(c.BaseURL, "/"), strings.TrimPrefix(path, "/")) +} + +func (c *Client) execRequest(method, path string, body io.Reader, contentType string) ([]byte, int, error) { + req, err := http.NewRequest(method, c.buildURL(path), body) + if err != nil { + return nil, 0, fmt.Errorf("error building request: %v", err) + } + + req.Header.Set("Accept", "application/json") + if c.APIToken != "" { + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.APIToken)) + } + if contentType != "" { + req.Header.Set("Content-Type", contentType) + } + + res, err := c.http.Do(req) + if err != nil { + return nil, 0, fmt.Errorf("error executing request: %v", err) + } + defer res.Body.Close() + + limitedReader := io.LimitReader(res.Body, maxResponseSize) + responseBody, err := io.ReadAll(limitedReader) + if err != nil { + return nil, res.StatusCode, fmt.Errorf("error reading body: %v", err) + } + + if len(responseBody) >= maxResponseSize { + return nil, res.StatusCode, fmt.Errorf("response too large: exceeds maximum size of %d bytes", maxResponseSize) + } + + return responseBody, res.StatusCode, nil +} diff --git a/pkg/integrations/grafana/grafana.go b/pkg/integrations/grafana/grafana.go new file mode 100644 index 0000000000..cdbf957a6b --- /dev/null +++ b/pkg/integrations/grafana/grafana.go @@ -0,0 +1,138 @@ +package grafana + +import ( + "fmt" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +func init() { + registry.RegisterIntegration("grafana", &Grafana{}) +} + +type Grafana struct{} + +type IntegrationMetadata struct { + BaseURL string `json:"baseURL" mapstructure:"baseURL"` +} + +func (g *Grafana) Name() string { + return "grafana" +} + +func (g *Grafana) Label() string { + return "Grafana" +} + +func (g *Grafana) Icon() string { + return "grafana" +} + +func (g *Grafana) Description() string { + return "Connect Grafana alerts and data queries to SuperPlane workflows" +} + +func (g *Grafana) Instructions() string { + return ` +To connect Grafana: +1. Create a Service Account token or API key in Grafana (Configuration > API Keys or Service Accounts). +2. Set the Base URL to your Grafana instance (e.g. https://grafana.example.com). +3. Paste the API token into SuperPlane and save. + +For the alert trigger: +1. Create a contact point with type "Webhook" in Grafana (Alerting > Contact points). +2. Use the webhook URL generated by SuperPlane for the trigger. +` +} + +func (g *Grafana) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "baseURL", + Label: "Base URL", + Type: configuration.FieldTypeString, + Description: "Your Grafana base URL (e.g. https://grafana.example.com)", + Required: true, + }, + { + Name: "apiToken", + Label: "API Token", + Type: configuration.FieldTypeString, + Description: "Grafana API key or service account token", + Sensitive: true, + Required: false, + }, + } +} + +func (g *Grafana) Actions() []core.Action { + return []core.Action{} +} + +func (g *Grafana) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} + +func (g *Grafana) Components() []core.Component { + return []core.Component{ + &QueryDataSource{}, + } +} + +func (g *Grafana) Triggers() []core.Trigger { + return []core.Trigger{ + &OnAlertFiring{}, + } +} + +func (g *Grafana) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (g *Grafana) Sync(ctx core.SyncContext) error { + baseURL, err := ctx.Integration.GetConfig("baseURL") + if err != nil { + return fmt.Errorf("error reading baseURL: %v", err) + } + + if baseURL == nil || strings.TrimSpace(string(baseURL)) == "" { + return fmt.Errorf("baseURL is required") + } + + metadata := IntegrationMetadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %v", err) + } + + metadata.BaseURL = strings.TrimSuffix(strings.TrimSpace(string(baseURL)), "/") + if err := ctx.Integration.SetMetadata(metadata); err != nil { + return fmt.Errorf("failed to set metadata: %v", err) + } + + ctx.Integration.Ready() + return nil +} + +func (g *Grafana) HandleRequest(ctx core.HTTPRequestContext) { + ctx.Response.WriteHeader(404) +} + +func (g *Grafana) CompareWebhookConfig(a, b any) (bool, error) { + return true, nil +} + +func (g *Grafana) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + return []core.IntegrationResource{}, nil +} + +func (g *Grafana) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { + return nil, nil +} + +func (g *Grafana) CleanupWebhook(ctx core.CleanupWebhookContext) error { + return nil +} diff --git a/pkg/integrations/grafana/on_alert_firing.go b/pkg/integrations/grafana/on_alert_firing.go new file mode 100644 index 0000000000..d413b46cea --- /dev/null +++ b/pkg/integrations/grafana/on_alert_firing.go @@ -0,0 +1,130 @@ +package grafana + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnAlertFiring struct{} + +func (t *OnAlertFiring) Name() string { + return "grafana.onAlertFiring" +} + +func (t *OnAlertFiring) Label() string { + return "On Alert Firing" +} + +func (t *OnAlertFiring) Description() string { + return "Trigger when a Grafana alert rule is firing" +} + +func (t *OnAlertFiring) Documentation() string { + return `The On Alert Firing trigger starts a workflow when Grafana Unified Alerting sends a firing alert webhook. + +## Setup + +1. Create a contact point in Grafana (Alerting > Contact points) with type Webhook. +2. Use the webhook URL generated by SuperPlane for this trigger. +3. Make sure the alert rule routes to that contact point. + +## Event Data + +The trigger emits the full Grafana webhook payload, including: +- status (firing/resolved) +- alerts array with labels and annotations +- groupLabels, commonLabels, commonAnnotations +- externalURL and other alerting metadata +` +} + +func (t *OnAlertFiring) Icon() string { + return "alert-triangle" +} + +func (t *OnAlertFiring) Color() string { + return "gray" +} + +func (t *OnAlertFiring) Configuration() []configuration.Field { + return []configuration.Field{} +} + +func (t *OnAlertFiring) Setup(ctx core.TriggerContext) error { + return ctx.Integration.RequestWebhook(struct{}{}) +} + +func (t *OnAlertFiring) Actions() []core.Action { + return []core.Action{} +} + +func (t *OnAlertFiring) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (t *OnAlertFiring) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + if len(ctx.Body) == 0 { + return http.StatusBadRequest, fmt.Errorf("empty body") + } + + var payload map[string]any + if err := json.Unmarshal(ctx.Body, &payload); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + if !isFiringAlert(payload) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("grafana.alert.firing", payload); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (t *OnAlertFiring) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func isFiringAlert(payload map[string]any) bool { + status := extractString(payload["status"]) + if strings.EqualFold(status, "firing") { + return true + } + + alerts, ok := payload["alerts"] + if !ok { + return false + } + + alertList, ok := alerts.([]any) + if !ok { + return false + } + + for _, item := range alertList { + alert, ok := item.(map[string]any) + if !ok { + continue + } + if strings.EqualFold(extractString(alert["status"]), "firing") { + return true + } + } + + return false +} + +func extractString(value any) string { + text, ok := value.(string) + if !ok { + return "" + } + return strings.TrimSpace(text) +} diff --git a/pkg/integrations/grafana/query_data_source.go b/pkg/integrations/grafana/query_data_source.go new file mode 100644 index 0000000000..c7f090674a --- /dev/null +++ b/pkg/integrations/grafana/query_data_source.go @@ -0,0 +1,246 @@ +package grafana + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "net/http" + "strings" + "time" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type QueryDataSource struct{} + +type QueryDataSourceSpec struct { + DataSourceUID string `json:"dataSourceUid"` + Query string `json:"query"` + TimeFrom *string `json:"timeFrom,omitempty"` + TimeTo *string `json:"timeTo,omitempty"` + Format *string `json:"format,omitempty"` +} + +type grafanaQueryRequest struct { + Queries []grafanaQuery `json:"queries"` + From string `json:"from,omitempty"` + To string `json:"to,omitempty"` +} + +type grafanaQuery struct { + RefID string `json:"refId"` + DatasourceUID string `json:"datasourceUid"` + Expr string `json:"expr,omitempty"` + Query string `json:"query,omitempty"` + Format string `json:"format,omitempty"` +} + +func (q *QueryDataSource) Name() string { + return "grafana.queryDataSource" +} + +func (q *QueryDataSource) Label() string { + return "Query Data Source" +} + +func (q *QueryDataSource) Description() string { + return "Execute a query against a Grafana data source and return the result" +} + +func (q *QueryDataSource) Documentation() string { + return `The Query Data Source component executes a query against a Grafana data source using the Grafana Query API. + +## Use Cases + +- **Metrics investigation**: Run PromQL or other datasource queries from workflows +- **Alert validation**: Validate alert conditions before escalation +- **Incident context**: Pull current metrics into incident workflows + +## Configuration + +- **Data Source UID**: The Grafana datasource UID to query +- **Query**: The datasource query (PromQL, InfluxQL, etc.) +- **Time From / Time To**: Optional time range (relative like "now-5m" or absolute) +- **Format**: Optional query format (depends on the datasource) + +## Output + +Returns the Grafana query API response JSON. +` +} + +func (q *QueryDataSource) Icon() string { + return "database" +} + +func (q *QueryDataSource) Color() string { + return "blue" +} + +func (q *QueryDataSource) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (q *QueryDataSource) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "dataSourceUid", + Label: "Data Source UID", + Type: configuration.FieldTypeString, + Required: true, + Description: "The Grafana datasource UID to query", + Placeholder: "prometheus", + }, + { + Name: "query", + Label: "Query", + Type: configuration.FieldTypeText, + Required: true, + Description: "The datasource query (PromQL, InfluxQL, etc.)", + Placeholder: "sum(rate(http_requests_total[5m]))", + }, + { + Name: "timeFrom", + Label: "Time From", + Type: configuration.FieldTypeString, + Required: false, + Description: "Start time (e.g. now-5m or 2024-01-01T00:00:00Z)", + Placeholder: "now-5m", + }, + { + Name: "timeTo", + Label: "Time To", + Type: configuration.FieldTypeString, + Required: false, + Description: "End time (e.g. now or 2024-01-01T01:00:00Z)", + Placeholder: "now", + }, + { + Name: "format", + Label: "Format", + Type: configuration.FieldTypeString, + Required: false, + Description: "Optional format passed to the datasource query", + }, + } +} + +func (q *QueryDataSource) Setup(ctx core.SetupContext) error { + spec := QueryDataSourceSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + if strings.TrimSpace(spec.DataSourceUID) == "" { + return errors.New("dataSourceUid is required") + } + if strings.TrimSpace(spec.Query) == "" { + return errors.New("query is required") + } + + return nil +} + +func (q *QueryDataSource) Execute(ctx core.ExecutionContext) error { + spec := QueryDataSourceSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration, true) + if err != nil { + return fmt.Errorf("error creating client: %v", err) + } + + request := grafanaQueryRequest{ + Queries: []grafanaQuery{ + { + RefID: "A", + DatasourceUID: strings.TrimSpace(spec.DataSourceUID), + Expr: strings.TrimSpace(spec.Query), + Query: strings.TrimSpace(spec.Query), + }, + }, + } + + if spec.TimeFrom != nil && strings.TrimSpace(*spec.TimeFrom) != "" { + request.From = strings.TrimSpace(*spec.TimeFrom) + } + + if spec.TimeTo != nil && strings.TrimSpace(*spec.TimeTo) != "" { + request.To = strings.TrimSpace(*spec.TimeTo) + } + + if request.From == "" || request.To == "" { + from, to := defaultTimeRange() + if request.From == "" { + request.From = from + } + if request.To == "" { + request.To = to + } + } + + if spec.Format != nil && strings.TrimSpace(*spec.Format) != "" { + request.Queries[0].Format = strings.TrimSpace(*spec.Format) + } + + body, err := json.Marshal(request) + if err != nil { + return fmt.Errorf("error marshaling request: %v", err) + } + + responseBody, status, err := client.execRequest(http.MethodPost, "/api/ds/query", bytes.NewReader(body), "application/json") + if err != nil { + return fmt.Errorf("error querying data source: %v", err) + } + + if status < 200 || status >= 300 { + return fmt.Errorf("grafana query failed with status %d: %s", status, string(responseBody)) + } + + var response map[string]any + if err := json.Unmarshal(responseBody, &response); err != nil { + return fmt.Errorf("error parsing response: %v", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "grafana.query.result", + []any{response}, + ) +} + +func (q *QueryDataSource) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (q *QueryDataSource) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (q *QueryDataSource) Actions() []core.Action { + return []core.Action{} +} + +func (q *QueryDataSource) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (q *QueryDataSource) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (q *QueryDataSource) Cleanup(ctx core.SetupContext) error { + return nil +} + +func defaultTimeRange() (string, string) { + now := time.Now().UTC() + from := now.Add(-5 * time.Minute) + return fmt.Sprintf("%d", from.UnixMilli()), fmt.Sprintf("%d", now.UnixMilli()) +} diff --git a/web_src/src/assets/icons/integrations/grafana.svg b/web_src/src/assets/icons/integrations/grafana.svg new file mode 100644 index 0000000000..9457f19c82 --- /dev/null +++ b/web_src/src/assets/icons/integrations/grafana.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/grafana/index.ts b/web_src/src/pages/workflowv2/mappers/grafana/index.ts new file mode 100644 index 0000000000..7e7b9a8699 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/grafana/index.ts @@ -0,0 +1,16 @@ +import { ComponentBaseMapper, TriggerRenderer, EventStateRegistry } from "../types"; +import { buildActionStateRegistry } from "../utils"; +import { onAlertFiringTriggerRenderer } from "./on_alert_firing"; +import { queryDataSourceMapper } from "./query_data_source"; + +export const componentMappers: Record = { + queryDataSource: queryDataSourceMapper, +}; + +export const triggerRenderers: Record = { + onAlertFiring: onAlertFiringTriggerRenderer, +}; + +export const eventStateRegistry: Record = { + queryDataSource: buildActionStateRegistry("queried"), +}; diff --git a/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts b/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts new file mode 100644 index 0000000000..d445f81ea3 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts @@ -0,0 +1,114 @@ +import { getBackgroundColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { TriggerProps } from "@/ui/trigger"; +import grafanaIcon from "@/assets/icons/integrations/grafana.svg"; +import { OnAlertFiringEventData } from "./types"; + +/** + * Renderer for the "grafana.onAlertFiring" trigger + */ +export const onAlertFiringTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnAlertFiringEventData | undefined; + const alertName = getAlertName(eventData); + const status = eventData?.status || "firing"; + const subtitle = buildSubtitle(status, context.event?.createdAt); + + return { + title: alertName || "Grafana alert firing", + subtitle, + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnAlertFiringEventData | undefined; + + return { + Status: stringOrDash(eventData?.status || "firing"), + "Alert Name": stringOrDash(getAlertName(eventData)), + "Rule UID": stringOrDash(eventData?.ruleUid), + "Rule ID": stringOrDash(eventData?.ruleId), + "Org ID": stringOrDash(eventData?.orgId), + "External URL": stringOrDash(eventData?.externalURL), + }; + }, + + getTriggerProps: (context: TriggerRendererContext) => { + const { node, definition, lastEvent } = context; + const metadataItems = []; + + if (lastEvent?.data) { + const eventData = lastEvent.data as OnAlertFiringEventData; + const alertName = getAlertName(eventData); + if (alertName) { + metadataItems.push({ + icon: "bell", + label: alertName, + }); + } + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: grafanaIcon, + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnAlertFiringEventData | undefined; + const status = eventData?.status || "firing"; + const alertName = getAlertName(eventData); + const subtitle = buildSubtitle(status, lastEvent.createdAt); + + props.lastEventData = { + title: alertName || "Grafana alert firing", + subtitle, + receivedAt: new Date(lastEvent.createdAt), + state: "triggered", + eventId: lastEvent.id, + }; + } + + return props; + }, +}; + +function getAlertName(eventData?: OnAlertFiringEventData): string | undefined { + if (!eventData) return undefined; + + if (eventData.title && eventData.title.trim() !== "") { + return eventData.title; + } + + const commonLabel = eventData.commonLabels?.alertname; + if (commonLabel && commonLabel.trim() !== "") { + return commonLabel; + } + + const firstAlert = eventData.alerts?.[0]; + const labelName = firstAlert?.labels?.alertname; + if (labelName && labelName.trim() !== "") { + return labelName; + } + + return undefined; +} + +function buildSubtitle(status: string, createdAt?: string): string { + const timeAgo = createdAt ? formatTimeAgo(new Date(createdAt)) : ""; + if (status && timeAgo) { + return `${status} · ${timeAgo}`; + } + + return status || timeAgo; +} + +function stringOrDash(value?: unknown): string { + if (value === undefined || value === null || value === "") { + return "-"; + } + + return String(value); +} diff --git a/web_src/src/pages/workflowv2/mappers/grafana/query_data_source.ts b/web_src/src/pages/workflowv2/mappers/grafana/query_data_source.ts new file mode 100644 index 0000000000..f0332fcfee --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/grafana/query_data_source.ts @@ -0,0 +1,101 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseMapper, + ExecutionDetailsContext, + ComponentBaseContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import { MetadataItem } from "@/ui/metadataList"; +import grafanaIcon from "@/assets/icons/integrations/grafana.svg"; +import { QueryDataSourceConfiguration } from "./types"; +import { formatTimeAgo } from "@/utils/date"; + +export const queryDataSourceMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name || "unknown"; + + return { + iconSrc: grafanaIcon, + collapsedBackground: "bg-white", + collapsed: context.node.isCollapsed, + title: context.node.name || context.componentDefinition.label || "Unnamed component", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + metadata: metadataList(context.node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + + if (!outputs || !outputs.default || outputs.default.length === 0) { + return { Response: "No data returned" }; + } + + const payload = outputs.default[0]; + const responseData = payload?.data as Record | undefined; + + if (!responseData) { + return { Response: "No data returned" }; + } + + const details: Record = {}; + if (payload?.timestamp) { + details["Queried At"] = new Date(payload.timestamp).toLocaleString(); + } + try { + details["Response Data"] = JSON.stringify(responseData, null, 2); + } catch (error) { + details["Response Data"] = String(responseData); + } + + return details; + }, + + subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) return ""; + return formatTimeAgo(new Date(context.execution.createdAt)); + }, +}; + +function metadataList(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as QueryDataSourceConfiguration; + + if (configuration?.dataSourceUid) { + metadata.push({ icon: "database", label: `Datasource: ${configuration.dataSourceUid}` }); + } + + if (configuration?.query) { + const preview = configuration.query.length > 50 ? configuration.query.substring(0, 50) + "..." : configuration.query; + metadata.push({ icon: "code", label: preview }); + } + + if (configuration?.format) { + metadata.push({ icon: "funnel", label: `Format: ${configuration.format}` }); + } + + return metadata; +} + +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/grafana/types.ts b/web_src/src/pages/workflowv2/mappers/grafana/types.ts new file mode 100644 index 0000000000..9a3a69e2a3 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/grafana/types.ts @@ -0,0 +1,26 @@ +export interface OnAlertFiringEventData { + status?: string; + title?: string; + ruleUid?: string; + ruleId?: number; + orgId?: number; + externalURL?: string; + alerts?: Array<{ + status?: string; + labels?: Record; + annotations?: Record; + startsAt?: string; + endsAt?: string; + }>; + groupLabels?: Record; + commonLabels?: Record; + commonAnnotations?: Record; +} + +export interface QueryDataSourceConfiguration { + dataSourceUid: string; + query: string; + timeFrom?: string; + timeTo?: string; + format?: string; +} diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 645e2cda38..208b01d1ee 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -91,7 +91,11 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; - +import { + componentMappers as grafanaComponentMappers, + triggerRenderers as grafanaTriggerRenderers, + eventStateRegistry as grafanaEventStateRegistry, +} from "./grafana/index"; import { filterMapper, FILTER_STATE_REGISTRY } from "./filter"; import { sshMapper, SSH_STATE_REGISTRY } from "./ssh"; import { waitCustomFieldRenderer, waitMapper, WAIT_STATE_REGISTRY } from "./wait"; @@ -139,6 +143,7 @@ const appMappers: Record> = { discord: discordComponentMappers, openai: openaiComponentMappers, claude: claudeComponentMappers, + grafana: grafanaComponentMappers, }; const appTriggerRenderers: Record> = { @@ -157,6 +162,7 @@ const appTriggerRenderers: Record> = { discord: discordTriggerRenderers, openai: openaiTriggerRenderers, claude: claudeTriggerRenderers, + grafana: grafanaTriggerRenderers, }; const appEventStateRegistries: Record> = { @@ -175,6 +181,7 @@ const appEventStateRegistries: Record openai: openaiEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, + grafana: grafanaEventStateRegistry, }; const componentAdditionalDataBuilders: Record = { From 7681abacb76925a22fcc822dd4e877f9b07a1fde Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 6 Feb 2026 17:46:51 -0300 Subject: [PATCH 004/160] docs: update component-review.rules.md (#2917) Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/component-review.rules.md | 158 ++++++++------------- 1 file changed, 60 insertions(+), 98 deletions(-) diff --git a/.cursor/commands/component-review.rules.md b/.cursor/commands/component-review.rules.md index 39845e50a9..172daafc3a 100644 --- a/.cursor/commands/component-review.rules.md +++ b/.cursor/commands/component-review.rules.md @@ -14,22 +14,17 @@ For integrations: listed in the integration's `Components()` or `Triggers()` out ### Naming -The component name shown in the UI is non-empty and matches: - - Core Components: uses the same name as the registration name - - For integration: - - Starts with - - Followed by a dot - - Followed by the name od the component - - First letter of the component should be lowercase, e.g. github.getIssue vs github.GetIssue - - The name should be camel-case - - Should not contain spaces, or underscores - - If the component is a trigger, it should start with `on` +- For core components: uses the same name as the registration name +- For integration components: name should follow the format `.`, e.g. `github.getIssue` vs `github.GetIssue` +- A component name should always use camel-case, and should never contain spaces, or underscore +- If the component is a trigger, it should start with `on` +- Trigger names should use the resource names that they reference, not actions, unless the trigger itself is about an specific action. For example, `github.onIssueComment` instead of `github.onIssueCommented` ### Label -The component label shown in the UI is non-empty and human-readable - - Uses Title Case - - No raw slug casing +- The component label shown in the UI is non-empty and human-readable +- Uses Title Case +- No raw slug casing ### Description @@ -37,15 +32,15 @@ The component description shown in the UI is non-empty, user-facing. Short, clea ### Documentation -The component documentation shown in the UI is non-empty. - - It is valid markdown - - If it uses titles, the biggest level should be ## +- The component documentation shown in the UI is non-empty. +- It is valid markdown +- If it uses titles, the biggest level should be ## ### Icon -The component icon shown in the UI is non-empty and maps to a valid UI icon - - Either Lucide Icon slug - - Or an existing custom asset +- The component icon shown in the UI is non-empty and maps to a valid UI icon +- Either Lucide Icon slug +- Or an existing custom asset ### Color @@ -53,106 +48,73 @@ The component color shown in the UI is non-empty and consistent with existing co ### Example output -The example output shown in the UI embeds an example - JSON file (e.g., `example_output.json`). +- The example output shown in the UI embeds an example +- JSON file (e.g., `example_output.json`). +- Example JSON files are valid and match the emitted payload structure. ### Configuration fields -Every configuration field shown in the UI has a: - - `Name` - - `Label` - - `Type` - - `Description` - - Required fields are marked `Required: true`. - -### Configuration inputs - -When possible, configuration never asks users to enter IDs that aren't easily -available to them (e.g., requiring a Discord channel ID). - -When users can choose from existing resources (e.g., a GitHub repository), -prefer a dropdown selector over manual entry. - -### Predicate filters - -For trigger filters and component configuration that use equality, non-equality, or regex matching, prefer `configuration.FieldTypeAnyPredicateList` and match values via `configuration.MatchesAnyPredicate`. Avoid ad-hoc wildcard or comma parsing unless `any-predicate-list` cannot express the requirement (document the exception in the component/trigger file). +- Every configuration field shown in the UI has a: `Name`, `Label`, `Type`, and `Description` +- Required fields are marked `Required: true`. +- Required fields should always be placed before optional fields +- When possible, configuration never asks users to enter IDs that aren't easily available to them (e.g., requiring a Discord channel ID). +- When users can choose from existing resources (e.g., a GitHub repository), always use `FieldTypeIntegrationResource`. Example: instead of having a `channelId` field of type `FieldTypeString`, use a `FieldTypeIntegrationResource` configuration field. +- For trigger filters and component configuration that use equality, non-equality, or regex matching, prefer `configuration.FieldTypeAnyPredicateList` and match values via `configuration.MatchesAnyPredicate`. Avoid ad-hoc wildcard or comma parsing unless `any-predicate-list` cannot express the requirement. If `configuration.FieldTypeAnyPredicateList` cannot meet your requirements, look for ways to extend it before developing a specific implementation. +- if filters are part of the trigger, we should always have a default filter for the most common use case. That makes it easier for the user to configure it, and we don't produce unnecessary events to the system. For example, the default `github.onPush` refs filter is for commits on the main branch. ### Output channels -The output channels shown in the UI include at least one channel -(or rely on default), and channel names/labels are non-empty. - -### Setup validation - -Setup validation in the UI enforces required configuration and -shows clear errors for missing inputs. - -### Actions/webhooks - -If actions or webhooks are available in the UI, -they validate inputs and show meaningful errors. - -## Code quality - -### Early returns - -Logic favors early returns over nested `else` blocks where applicable. +The output channels shown in the UI include at least one channel (or rely on default), and channel names/labels are non-empty. -### Transaction safety +### Setup -Any DB access in transactional flows uses -`*InTransaction()` variants, never `database.Conn()`. +- If the component/trigger uses an `FieldTypeIntegrationResource` configuration field, `Setup()` must verify that the resource being referenced exists, and once verified, information about it must be stored in a struct in the component/trigger metadata. -### No implicit any +### Webhooks -TypeScript (if applicable) uses explicit types for -inline handler parameters. +- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, `ctx.Integration.RequestWebhook()` and implement the integration's `SetupWebhook`, `CleanupWebhook` +- We should always aim to share webhooks between components, if they use the same underlying event configuration. Use `CompareWebhookConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. -### No `any` abuse +### Triggers -Avoid `as any` / `@ts-ignore` unless justified -(prefer narrow types or `@ts-expect-error` with comment). +- A trigger is always scoped to a (1) specific resource type, (2) specific resource, (3) some additional things. Examples: + - `semaphore.onPipelineDone`: we select the specific project we want to listen to + - `github.onPush`: we select the repository we want to listen to + - `pagerduty.onIncident`: we select the service -### Dependencies +## Code Quality -New imports are used; no dead code or unused helpers. +### Unit testing -### Constants +- Static methods like `Configuration()`, `Label()`, `Name()` do not need to be unit tested. +- Do not make dummy implementations of the `pkg/core` interfaces in unit tests. Use contexts already available in [test/support/contexts](https://github.com/superplanehq/superplane/blob/main/test/support/contexts/contexts.go) for that. +- Tests cover validation failures and error handling paths. +- For `Component` interface implementations, tests for `Setup()` and `Execute()` must be written. If the component has `Actions()`, they must be unit tested as well +- For `Trigger` interface implementations, tests for `Setup()` and `HandleWebhook()` must be written. If the component has `Actions()`, they must be unit tested as well +- For `Integration` interface implementations, tests for `Sync` must be written. If the component has `Actions()`, they must be unit tested as well -Reusable strings (names, payload types) are defined as constants when repeated. +### General principles -### JSON examples +- Favor early returns and the use of helper functions over nested `if/else` blocks. +- Reusable strings (names, payload types) are defined as constants when repeated. +- New imports are used; no dead code or unused helpers. -Example JSON files are valid and match the emitted payload structure. +### Golang -## Testing +- Prefer `any` over `interface{}` types +- When checking for the existence of an item on a list, use `slice.Contains` or `slice.ContainsFunc` +- When naming variables, avoid names like `*Str` or `*UUID`; Go is a typed language, we don't need types in the variables names +- When writing tests that require specific timestamps to be used, always use timestamps based off of `time.Now()`, instead of absolute times created with `time.Date` +- **Check transaction usage**: Any DB access in transactional flows uses `*InTransaction()` variants, never `database.Conn()` -### Unit tests +### TypeScript -Component logic has focused tests in `*_test.go` - -### Error paths - -Tests cover validation failures and error handling paths. - -### Setup/execute tests - -Tests cover `Setup()` and `Execute()` behavior where applicable. +- **No implicit any**: use explicit types for inline handler parameters +- **No `any` abuse**: avoid `as any` / `@ts-ignore` unless justified (prefer narrow types or `@ts-expect-error` with comment). ## Copy -### User-facing text - -All user-facing strings are short, clear, and consise - -### Clarity - -Labels, descriptions, and docs avoid internal jargon and explain intent. - -### Consistency - -Terms used in docs match configuration labels and output fields. - -### Formatting - -Markdown in the component documentation renders cleanly (no malformed code fences or headings). +- **User-facing text**: all user-facing strings are short, clear, and consise +- **Clarity**: Labels, descriptions, and docs avoid internal jargon and explain intent. +- **Consistency**: Terms used in docs match configuration labels and output fields. +- **Formatting**: Markdown in the component documentation renders cleanly (no malformed code fences or headings). From d3b67d48c65eac6cb8f05dad3f2c902e3a1ec58a Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 6 Feb 2026 18:55:05 -0300 Subject: [PATCH 005/160] fix: enforce limit on HTTP responses (#2918) Related to https://github.com/superplanehq/superplane/issues/2915 This pull request adds a limit on the amount of data the HTTPContext accepts on responses, safeguarding the engine against components using too much memory on HTTP requests. Also, it indirectly puts a limit on the size of the events that components and triggers emit since the events emitted usually come from HTTP responses. An initial limit of 512KB is used, but might adjusted based on new integrations and components. Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- pkg/registry/http.go | 85 ++++++++++++++++++++++++++++++++++----- pkg/registry/http_test.go | 48 ++++++++++++++++++++++ pkg/server/server.go | 12 +++++- 3 files changed, 133 insertions(+), 12 deletions(-) diff --git a/pkg/registry/http.go b/pkg/registry/http.go index c8ba813da4..66cb087392 100644 --- a/pkg/registry/http.go +++ b/pkg/registry/http.go @@ -3,6 +3,7 @@ package registry import ( "context" "fmt" + "io" "net" "net/http" "net/url" @@ -12,21 +13,24 @@ import ( ) type HTTPContext struct { - client *http.Client - dialer *net.Dialer - blockedHosts []string - privateIPRanges []*net.IPNet + client *http.Client + dialer *net.Dialer + blockedHosts []string + privateIPRanges []*net.IPNet + maxResponseBytes int64 } type HTTPOptions struct { - BlockedHosts []string - PrivateIPRanges []string + BlockedHosts []string + PrivateIPRanges []string + MaxResponseBytes int64 } func NewHTTPContext(options HTTPOptions) (*HTTPContext, error) { httpCtx := &HTTPContext{ - blockedHosts: options.BlockedHosts, - privateIPRanges: make([]*net.IPNet, 0), + blockedHosts: options.BlockedHosts, + privateIPRanges: make([]*net.IPNet, 0), + maxResponseBytes: options.MaxResponseBytes, } for _, cidr := range options.PrivateIPRanges { @@ -94,7 +98,7 @@ func NewHTTPContext(options HTTPOptions) (*HTTPContext, error) { func (c *HTTPContext) Do(request *http.Request) (*http.Response, error) { if len(c.privateIPRanges) == 0 && len(c.blockedHosts) == 0 { - return c.client.Do(request) + return c.do(request) } err := c.validateURL(request.URL) @@ -102,7 +106,68 @@ func (c *HTTPContext) Do(request *http.Request) (*http.Response, error) { return nil, err } - return c.client.Do(request) + return c.do(request) +} + +func (c *HTTPContext) do(request *http.Request) (*http.Response, error) { + resp, err := c.client.Do(request) + if err != nil { + return nil, err + } + + if c.maxResponseBytes <= 0 { + return resp, nil + } + + // + // Content-Length is not truly reliable, + // but it's a good first check to enforce the maximum response size. + // + if resp.ContentLength > c.maxResponseBytes { + _ = resp.Body.Close() + return nil, fmt.Errorf("response too large: %d bytes exceeds maximum size of %d bytes", resp.ContentLength, c.maxResponseBytes) + } + + // + // We replace the body with a LimitedReadCloser that will return an error + // if the response body is larger than the maximum allowed size. + // + resp.Body = &LimitedReadCloser{ + reader: resp.Body, + remaining: c.maxResponseBytes, + maxResponseSize: c.maxResponseBytes, + } + + return resp, nil +} + +type LimitedReadCloser struct { + reader io.ReadCloser + remaining int64 + maxResponseSize int64 +} + +func (r *LimitedReadCloser) Read(p []byte) (int, error) { + if r.remaining <= 0 { + var buf [1]byte + n, err := r.reader.Read(buf[:]) + if n > 0 { + return 0, fmt.Errorf("response too large: exceeds maximum size of %d bytes", r.maxResponseSize) + } + return 0, err + } + + if int64(len(p)) > r.remaining { + p = p[:r.remaining] + } + + n, err := r.reader.Read(p) + r.remaining -= int64(n) + return n, err +} + +func (r *LimitedReadCloser) Close() error { + return r.reader.Close() } /* diff --git a/pkg/registry/http_test.go b/pkg/registry/http_test.go index 49a5613823..8a4ee862e0 100644 --- a/pkg/registry/http_test.go +++ b/pkg/registry/http_test.go @@ -1,6 +1,7 @@ package registry import ( + "io" "net" "net/http" "net/http/httptest" @@ -243,6 +244,53 @@ func Test__HTTPContext__Do__RedirectToBlockedHost(t *testing.T) { assert.Equal(t, int32(1), hits.Load()) } +func Test__HTTPContext__Do__ResponseTooLarge_ContentLength(t *testing.T) { + ctx, err := NewHTTPContext(HTTPOptions{ + MaxResponseBytes: 5, + }) + require.NoError(t, err) + + testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Length", "6") + _, _ = w.Write([]byte("123456")) + })) + t.Cleanup(testServer.Close) + + req, err := http.NewRequest(http.MethodGet, testServer.URL, nil) + require.NoError(t, err) + + _, err = ctx.Do(req) + require.Error(t, err) + assert.Contains(t, err.Error(), "response too large") +} + +func Test__HTTPContext__Do__ResponseTooLarge_Streaming(t *testing.T) { + ctx, err := NewHTTPContext(HTTPOptions{ + MaxResponseBytes: 5, + }) + require.NoError(t, err) + + testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if flusher, ok := w.(http.Flusher); ok { + flusher.Flush() + } + _, _ = w.Write([]byte("123456")) + })) + t.Cleanup(testServer.Close) + + req, err := http.NewRequest(http.MethodGet, testServer.URL, nil) + require.NoError(t, err) + + resp, err := ctx.Do(req) + require.NoError(t, err) + t.Cleanup(func() { _ = resp.Body.Close() }) + + body, err := io.ReadAll(resp.Body) + require.Error(t, err) + assert.Contains(t, err.Error(), "response too large") + assert.Len(t, body, 5) +} + func Test__HTTPContext__ValidateIP__DefaultConfiguration(t *testing.T) { ctx, err := NewHTTPContext(defaultHTTPOptions()) require.NoError(t, err) diff --git a/pkg/server/server.go b/pkg/server/server.go index f8ae5cae63..40c4986339 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -340,8 +340,9 @@ func Start() { } registry, err := registry.NewRegistry(encryptorInstance, registry.HTTPOptions{ - BlockedHosts: getBlockedHTTPHosts(), - PrivateIPRanges: getPrivateIPRanges(), + BlockedHosts: getBlockedHTTPHosts(), + PrivateIPRanges: getPrivateIPRanges(), + MaxResponseBytes: DefaultMaxHTTPResponseBytes, }) if err != nil { @@ -378,6 +379,13 @@ func getWebhookBaseURL(baseURL string) string { return webhookBaseURL } +/* + * 512KB is the default maximum response size for HTTP responses. + * This prevents component/trigger implementations from using too much memory, + * and also from emitting large events. + */ +var DefaultMaxHTTPResponseBytes int64 = 512 * 1024 + /* * Default blocked HTTP hosts include: * - Cloud metadata endpoints From 094365c474ad3e0dc93bb4e7234eaad635f5acb2 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 6 Feb 2026 19:01:57 -0300 Subject: [PATCH 006/160] docs: include security considerations on component review rules (#2919) Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/component-review.rules.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.cursor/commands/component-review.rules.md b/.cursor/commands/component-review.rules.md index 172daafc3a..233993223b 100644 --- a/.cursor/commands/component-review.rules.md +++ b/.cursor/commands/component-review.rules.md @@ -82,6 +82,13 @@ The output channels shown in the UI include at least one channel (or rely on def - `github.onPush`: we select the repository we want to listen to - `pagerduty.onIncident`: we select the service +### Security + +- Components should always execute HTTP requests using the `HTTPContext` available to them, and never use `net/http` to do so +- Components should never import `pkg/models` and interact with database directly, only through methods provided through core interfaces +- HandleWebhook() implementations in components/triggers should always verify that the requests are authenticated using the secret in the webhook +- HandleRequest() implementations in integrations should always verify that the requests are authenticated using the secret in the webhook + ## Code Quality ### Unit testing From e0964cd700ab2fb61ab116ecf3a7418bd4ab2eea Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Sat, 7 Feb 2026 16:16:08 +0500 Subject: [PATCH 007/160] fix for grafana visibility in integrations list. Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/grafana/example.go | 28 +++++++++++++++++ .../grafana/example_data_on_alert_firing.json | 21 +++++++++++++ .../example_output_query_data_source.json | 30 +++++++++++++++++++ pkg/integrations/grafana/grafana.go | 4 +-- pkg/server/server.go | 1 + .../mappers/grafana/on_alert_firing.ts | 2 +- 6 files changed, 82 insertions(+), 4 deletions(-) create mode 100644 pkg/integrations/grafana/example.go create mode 100644 pkg/integrations/grafana/example_data_on_alert_firing.json create mode 100644 pkg/integrations/grafana/example_output_query_data_source.json diff --git a/pkg/integrations/grafana/example.go b/pkg/integrations/grafana/example.go new file mode 100644 index 0000000000..fdbe6c3ccf --- /dev/null +++ b/pkg/integrations/grafana/example.go @@ -0,0 +1,28 @@ +package grafana + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_output_query_data_source.json +var exampleOutputQueryDataSourceBytes []byte + +//go:embed example_data_on_alert_firing.json +var exampleDataOnAlertFiringBytes []byte + +var exampleOutputQueryDataSourceOnce sync.Once +var exampleOutputQueryDataSource map[string]any + +var exampleDataOnAlertFiringOnce sync.Once +var exampleDataOnAlertFiring map[string]any + +func (q *QueryDataSource) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputQueryDataSourceOnce, exampleOutputQueryDataSourceBytes, &exampleOutputQueryDataSource) +} + +func (t *OnAlertFiring) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnAlertFiringOnce, exampleDataOnAlertFiringBytes, &exampleDataOnAlertFiring) +} diff --git a/pkg/integrations/grafana/example_data_on_alert_firing.json b/pkg/integrations/grafana/example_data_on_alert_firing.json new file mode 100644 index 0000000000..c64a1dcae8 --- /dev/null +++ b/pkg/integrations/grafana/example_data_on_alert_firing.json @@ -0,0 +1,21 @@ +{ + "status": "firing", + "title": "High error rate", + "ruleUid": "alert_rule_uid", + "alerts": [ + { + "status": "firing", + "labels": { + "alertname": "HighErrorRate", + "service": "api" + }, + "annotations": { + "summary": "Error rate above threshold" + } + } + ], + "commonLabels": { + "alertname": "HighErrorRate" + }, + "externalURL": "http://grafana.local" +} diff --git a/pkg/integrations/grafana/example_output_query_data_source.json b/pkg/integrations/grafana/example_output_query_data_source.json new file mode 100644 index 0000000000..b8b8b5bb73 --- /dev/null +++ b/pkg/integrations/grafana/example_output_query_data_source.json @@ -0,0 +1,30 @@ +{ + "data": { + "results": { + "A": { + "frames": [ + { + "schema": { + "fields": [ + { + "name": "time", + "type": "time" + }, + { + "name": "value", + "type": "number" + } + ] + }, + "data": { + "values": [ + ["2026-02-07T08:00:00Z", "2026-02-07T08:01:00Z"], + [1, 1] + ] + } + } + ] + } + } + } +} diff --git a/pkg/integrations/grafana/grafana.go b/pkg/integrations/grafana/grafana.go index cdbf957a6b..33d56fa12f 100644 --- a/pkg/integrations/grafana/grafana.go +++ b/pkg/integrations/grafana/grafana.go @@ -109,9 +109,7 @@ func (g *Grafana) Sync(ctx core.SyncContext) error { } metadata.BaseURL = strings.TrimSuffix(strings.TrimSpace(string(baseURL)), "/") - if err := ctx.Integration.SetMetadata(metadata); err != nil { - return fmt.Errorf("failed to set metadata: %v", err) - } + ctx.Integration.SetMetadata(metadata) ctx.Integration.Ready() return nil diff --git a/pkg/server/server.go b/pkg/server/server.go index 40c4986339..d295d856db 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -39,6 +39,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/datadog" _ "github.com/superplanehq/superplane/pkg/integrations/daytona" _ "github.com/superplanehq/superplane/pkg/integrations/discord" + _ "github.com/superplanehq/superplane/pkg/integrations/grafana" _ "github.com/superplanehq/superplane/pkg/integrations/github" _ "github.com/superplanehq/superplane/pkg/integrations/jira" _ "github.com/superplanehq/superplane/pkg/integrations/openai" diff --git a/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts b/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts index d445f81ea3..07f238ae59 100644 --- a/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts +++ b/web_src/src/pages/workflowv2/mappers/grafana/on_alert_firing.ts @@ -99,7 +99,7 @@ function getAlertName(eventData?: OnAlertFiringEventData): string | undefined { function buildSubtitle(status: string, createdAt?: string): string { const timeAgo = createdAt ? formatTimeAgo(new Date(createdAt)) : ""; if (status && timeAgo) { - return `${status} · ${timeAgo}`; + return `${status} - ${timeAgo}`; } return status || timeAgo; From 526a2fae6fc71491b52b554df2eea19d2bdfbae0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sat, 7 Feb 2026 20:07:24 +0100 Subject: [PATCH 008/160] fix: Component docs sync (#2935) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fixes https://github.com/superplanehq/docs/issues/46 fixes https://github.com/superplanehq/superplane/issues/1845 --------- Signed-off-by: Igor Šarčević Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- docs/components/AWS.mdx | 2 -- docs/components/Claude.mdx | 2 -- docs/components/Cloudflare.mdx | 2 -- docs/components/Core.mdx | 2 +- docs/components/Dash0.mdx | 2 -- docs/components/Datadog.mdx | 2 -- docs/components/Daytona.mdx | 2 -- docs/components/Discord.mdx | 2 -- docs/components/GitHub.mdx | 2 -- docs/components/Jira.mdx | 2 -- docs/components/OpenAI.mdx | 2 -- docs/components/PagerDuty.mdx | 2 -- docs/components/Rootly.mdx | 2 -- docs/components/SMTP.mdx | 2 -- docs/components/Semaphore.mdx | 2 -- docs/components/SendGrid.mdx | 2 -- docs/components/Slack.mdx | 2 -- pkg/components/ssh/ssh.go | 2 +- scripts/generate_components_docs.go | 22 ++++++++++++---------- 19 files changed, 14 insertions(+), 44 deletions(-) diff --git a/docs/components/AWS.mdx b/docs/components/AWS.mdx index 3d213125eb..2cabcb5fd5 100644 --- a/docs/components/AWS.mdx +++ b/docs/components/AWS.mdx @@ -1,7 +1,5 @@ --- title: "AWS" -sidebar: - order: 2 --- Manage resources and execute AWS commands in workflows diff --git a/docs/components/Claude.mdx b/docs/components/Claude.mdx index 064248912b..7a269fdd84 100644 --- a/docs/components/Claude.mdx +++ b/docs/components/Claude.mdx @@ -1,7 +1,5 @@ --- title: "Claude" -sidebar: - order: 3 --- Use Claude models in workflows diff --git a/docs/components/Cloudflare.mdx b/docs/components/Cloudflare.mdx index 6b42cd58e2..e5ee42044f 100644 --- a/docs/components/Cloudflare.mdx +++ b/docs/components/Cloudflare.mdx @@ -1,7 +1,5 @@ --- title: "Cloudflare" -sidebar: - order: 4 --- Manage Cloudflare zones, rules, and DNS diff --git a/docs/components/Core.mdx b/docs/components/Core.mdx index 0cab1c832d..0c0fae5f5d 100644 --- a/docs/components/Core.mdx +++ b/docs/components/Core.mdx @@ -513,7 +513,7 @@ Choose **SSH key** or **Password**, then select the organization Secret and the - **Host**, **Port** (default 22), **Username**: Connection details. - **Command**: The command to run (supports expressions). -- **Working directory**: Optional; runs "cd && ". +- **Working directory**: Optional; Changes to this directory before running the command. - **Timeout (seconds)**: How long the command may run (default 60). ### Output diff --git a/docs/components/Dash0.mdx b/docs/components/Dash0.mdx index d17af3e98b..faacc53df3 100644 --- a/docs/components/Dash0.mdx +++ b/docs/components/Dash0.mdx @@ -1,7 +1,5 @@ --- title: "Dash0" -sidebar: - order: 5 --- Connect to Dash0 to query data using Prometheus API diff --git a/docs/components/Datadog.mdx b/docs/components/Datadog.mdx index ea8f7ee9ad..8369e4d954 100644 --- a/docs/components/Datadog.mdx +++ b/docs/components/Datadog.mdx @@ -1,7 +1,5 @@ --- title: "Datadog" -sidebar: - order: 6 --- Create events in Datadog diff --git a/docs/components/Daytona.mdx b/docs/components/Daytona.mdx index fa1189dc37..c89940cf73 100644 --- a/docs/components/Daytona.mdx +++ b/docs/components/Daytona.mdx @@ -1,7 +1,5 @@ --- title: "Daytona" -sidebar: - order: 7 --- Execute code in isolated sandbox environments diff --git a/docs/components/Discord.mdx b/docs/components/Discord.mdx index 5651752656..6104895626 100644 --- a/docs/components/Discord.mdx +++ b/docs/components/Discord.mdx @@ -1,7 +1,5 @@ --- title: "Discord" -sidebar: - order: 8 --- Send messages to Discord channels diff --git a/docs/components/GitHub.mdx b/docs/components/GitHub.mdx index 48a421cdb3..ea8c029e49 100644 --- a/docs/components/GitHub.mdx +++ b/docs/components/GitHub.mdx @@ -1,7 +1,5 @@ --- title: "GitHub" -sidebar: - order: 9 --- Manage and react to changes in your GitHub repositories diff --git a/docs/components/Jira.mdx b/docs/components/Jira.mdx index 649dd22ad5..449e0c4c5d 100644 --- a/docs/components/Jira.mdx +++ b/docs/components/Jira.mdx @@ -1,7 +1,5 @@ --- title: "Jira" -sidebar: - order: 10 --- Manage and react to issues in Jira diff --git a/docs/components/OpenAI.mdx b/docs/components/OpenAI.mdx index 6f8fff7856..341b25f546 100644 --- a/docs/components/OpenAI.mdx +++ b/docs/components/OpenAI.mdx @@ -1,7 +1,5 @@ --- title: "OpenAI" -sidebar: - order: 11 --- Generate text responses with OpenAI models diff --git a/docs/components/PagerDuty.mdx b/docs/components/PagerDuty.mdx index 2a9cb49b06..6a8c9e60d5 100644 --- a/docs/components/PagerDuty.mdx +++ b/docs/components/PagerDuty.mdx @@ -1,7 +1,5 @@ --- title: "PagerDuty" -sidebar: - order: 12 --- Manage and react to incidents in PagerDuty diff --git a/docs/components/Rootly.mdx b/docs/components/Rootly.mdx index 86eaa108bd..ba2ce32e62 100644 --- a/docs/components/Rootly.mdx +++ b/docs/components/Rootly.mdx @@ -1,7 +1,5 @@ --- title: "Rootly" -sidebar: - order: 13 --- Manage and react to incidents in Rootly diff --git a/docs/components/SMTP.mdx b/docs/components/SMTP.mdx index c0965c7207..357a35a352 100644 --- a/docs/components/SMTP.mdx +++ b/docs/components/SMTP.mdx @@ -1,7 +1,5 @@ --- title: "SMTP" -sidebar: - order: 14 --- Send emails via any SMTP server diff --git a/docs/components/Semaphore.mdx b/docs/components/Semaphore.mdx index 54e0ac2af8..0f054c97a3 100644 --- a/docs/components/Semaphore.mdx +++ b/docs/components/Semaphore.mdx @@ -1,7 +1,5 @@ --- title: "Semaphore" -sidebar: - order: 15 --- Run and react to your Semaphore workflows diff --git a/docs/components/SendGrid.mdx b/docs/components/SendGrid.mdx index 8cb7f547b4..f191b59141 100644 --- a/docs/components/SendGrid.mdx +++ b/docs/components/SendGrid.mdx @@ -1,7 +1,5 @@ --- title: "SendGrid" -sidebar: - order: 16 --- Send transactional and marketing email with SendGrid diff --git a/docs/components/Slack.mdx b/docs/components/Slack.mdx index 7891472e50..928d277077 100644 --- a/docs/components/Slack.mdx +++ b/docs/components/Slack.mdx @@ -1,7 +1,5 @@ --- title: "Slack" -sidebar: - order: 17 --- Send and react to Slack messages and interactions diff --git a/pkg/components/ssh/ssh.go b/pkg/components/ssh/ssh.go index 10b18a73fd..691bf342f1 100644 --- a/pkg/components/ssh/ssh.go +++ b/pkg/components/ssh/ssh.go @@ -74,7 +74,7 @@ Choose **SSH key** or **Password**, then select the organization Secret and the - **Host**, **Port** (default 22), **Username**: Connection details. - **Command**: The command to run (supports expressions). -- **Working directory**: Optional; runs "cd && ". +- **Working directory**: Optional; Changes to this directory before running the command. - **Timeout (seconds)**: How long the command may run (default 60). ## Output diff --git a/scripts/generate_components_docs.go b/scripts/generate_components_docs.go index 09813b68b9..483b53f1b1 100644 --- a/scripts/generate_components_docs.go +++ b/scripts/generate_components_docs.go @@ -41,8 +41,8 @@ func main() { exitWithError(err) } - for i, integration := range integrations { - if err := writeIntegrationDocs(integration, i+2); err != nil { + for _, integration := range integrations { + if err := writeIntegrationDocs(integration); err != nil { exitWithError(err) } } @@ -54,14 +54,14 @@ func createOutputDirectory() { } } -func writeIntegrationDocs(integration core.Integration, order int) error { +func writeIntegrationDocs(integration core.Integration) error { components := integration.Components() triggers := integration.Triggers() sort.Slice(components, func(i, j int) bool { return components[i].Name() < components[j].Name() }) sort.Slice(triggers, func(i, j int) bool { return triggers[i].Name() < triggers[j].Name() }) - return writeIntegrationIndex(filepath.Join(docsRoot, fmt.Sprintf("%s.mdx", integrationFilename(integration))), integration, components, triggers, order) + return writeIntegrationIndex(filepath.Join(docsRoot, fmt.Sprintf("%s.mdx", integrationFilename(integration))), integration, components, triggers) } func writeCoreComponentsDoc(components []core.Component, triggers []core.Trigger) error { @@ -73,7 +73,8 @@ func writeCoreComponentsDoc(components []core.Component, triggers []core.Trigger sort.Slice(triggers, func(i, j int) bool { return triggers[i].Name() < triggers[j].Name() }) var buf bytes.Buffer - writeFrontMatter(&buf, "Core", 1) + coreOrder := 1 + writeFrontMatter(&buf, "Core", &coreOrder) writeOverviewSection(&buf, "Built-in SuperPlane components.") writeCardGridTriggers(&buf, triggers) writeCardGridComponents(&buf, components) @@ -88,10 +89,9 @@ func writeIntegrationIndex( integration core.Integration, components []core.Component, triggers []core.Trigger, - order int, ) error { var buf bytes.Buffer - writeFrontMatter(&buf, integration.Label(), order) + writeFrontMatter(&buf, integration.Label(), nil) writeOverviewSection(&buf, integration.Description()) writeCardGridTriggers(&buf, triggers) @@ -109,11 +109,13 @@ func writeIntegrationIndex( return writeFile(path, buf.Bytes()) } -func writeFrontMatter(buf *bytes.Buffer, title string, order int) { +func writeFrontMatter(buf *bytes.Buffer, title string, order *int) { buf.WriteString("---\n") buf.WriteString(fmt.Sprintf("title: \"%s\"\n", escapeQuotes(title))) - buf.WriteString("sidebar:\n") - buf.WriteString(fmt.Sprintf(" order: %d\n", order)) + if order != nil { + buf.WriteString("sidebar:\n") + buf.WriteString(fmt.Sprintf(" order: %d\n", *order)) + } buf.WriteString("---\n\n") } From 5a962d49fc6cc39297bac9696bb9ea4d692b369e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sun, 8 Feb 2026 12:18:00 +0100 Subject: [PATCH 009/160] chore: Rename claude.createMessage -> claude.textPrompt (#2948) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For consistency with openai. No one is using this component yet, no need to write data migrations. --------- Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Claude.mdx | 8 +-- pkg/integrations/claude/claude.go | 2 +- pkg/integrations/claude/example.go | 15 +++--- .../claude/example_output_create_message.json | 31 ----------- .../claude/example_output_text_prompt.json | 31 +++++++++++ .../{create_message.go => text_prompt.go} | 51 ++++++++++--------- ...te_message_test.go => text_prompt_test.go} | 12 ++--- .../pages/workflowv2/mappers/claude/index.ts | 4 +- 8 files changed, 78 insertions(+), 76 deletions(-) delete mode 100644 pkg/integrations/claude/example_output_create_message.json create mode 100644 pkg/integrations/claude/example_output_text_prompt.json rename pkg/integrations/claude/{create_message.go => text_prompt.go} (81%) rename pkg/integrations/claude/{create_message_test.go => text_prompt_test.go} (97%) diff --git a/docs/components/Claude.mdx b/docs/components/Claude.mdx index 7a269fdd84..e15e71f41f 100644 --- a/docs/components/Claude.mdx +++ b/docs/components/Claude.mdx @@ -9,18 +9,18 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; ## Actions - + ## Instructions To get new Claude API key, go to [platform.claude.com](https://platform.claude.com). - + -## Create Message +## Text Prompt -The Create Message component uses Anthropic's Claude models to generate text responses. +The Text Prompt component uses Anthropic's Claude models to generate text responses. ### Use Cases diff --git a/pkg/integrations/claude/claude.go b/pkg/integrations/claude/claude.go index 6120238d71..45c3045dae 100644 --- a/pkg/integrations/claude/claude.go +++ b/pkg/integrations/claude/claude.go @@ -50,7 +50,7 @@ func (i *Claude) Configuration() []configuration.Field { func (i *Claude) Components() []core.Component { return []core.Component{ - &CreateMessage{}, + &TextPrompt{}, } } diff --git a/pkg/integrations/claude/example.go b/pkg/integrations/claude/example.go index f3b8fafdcc..21d2fa4f8f 100644 --- a/pkg/integrations/claude/example.go +++ b/pkg/integrations/claude/example.go @@ -2,16 +2,17 @@ package claude import ( _ "embed" - "github.com/superplanehq/superplane/pkg/utils" "sync" + + "github.com/superplanehq/superplane/pkg/utils" ) -//go:embed example_output_create_message.json -var exampleOutputCreateMessageBytes []byte +//go:embed example_output_text_prompt.json +var exampleOutputTextPromptBytes []byte -var exampleOutputCreateMessageOnce sync.Once -var exampleOutputCreateMessage map[string]any +var exampleOutputTextPromptOnce sync.Once +var exampleOutputTextPrompt map[string]any -func (c *CreateMessage) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateMessageOnce, exampleOutputCreateMessageBytes, &exampleOutputCreateMessage) +func (c *TextPrompt) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputTextPromptOnce, exampleOutputTextPromptBytes, &exampleOutputTextPrompt) } diff --git a/pkg/integrations/claude/example_output_create_message.json b/pkg/integrations/claude/example_output_create_message.json deleted file mode 100644 index 2fd3c0e729..0000000000 --- a/pkg/integrations/claude/example_output_create_message.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "type": "claude.message", - "data": { - "id": "msg_01X9JGt5...123456", - "model": "claude-3-5-sonnet-latest", - "text": "Here is the summary of the deployment logs you requested...", - "stopReason": "end_turn", - "usage": { - "input_tokens": 45, - "output_tokens": 120 - }, - "response": { - "id": "msg_01X9JGt5...123456", - "type": "message", - "role": "assistant", - "model": "claude-3-5-sonnet-latest", - "content": [ - { - "type": "text", - "text": "Here is the summary of the deployment logs you requested..." - } - ], - "stop_reason": "end_turn", - "usage": { - "input_tokens": 45, - "output_tokens": 120 - } - } - }, - "timestamp": "2026-02-06T12:00:00Z" -} \ No newline at end of file diff --git a/pkg/integrations/claude/example_output_text_prompt.json b/pkg/integrations/claude/example_output_text_prompt.json new file mode 100644 index 0000000000..5cf0a01ec7 --- /dev/null +++ b/pkg/integrations/claude/example_output_text_prompt.json @@ -0,0 +1,31 @@ +{ + "data": { + "id": "msg_01X9JGt5...123456", + "model": "claude-3-5-sonnet-latest", + "response": { + "content": [ + { + "text": "Here is the summary of the deployment logs you requested...", + "type": "text" + } + ], + "id": "msg_01X9JGt5...123456", + "model": "claude-3-5-sonnet-latest", + "role": "assistant", + "stop_reason": "end_turn", + "type": "message", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + } + }, + "stopReason": "end_turn", + "text": "Here is the summary of the deployment logs you requested...", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + } + }, + "timestamp": "2026-02-06T12:00:00Z", + "type": "claude.message" +} diff --git a/pkg/integrations/claude/create_message.go b/pkg/integrations/claude/text_prompt.go similarity index 81% rename from pkg/integrations/claude/create_message.go rename to pkg/integrations/claude/text_prompt.go index fa2f74004e..869d5f35e4 100644 --- a/pkg/integrations/claude/create_message.go +++ b/pkg/integrations/claude/text_prompt.go @@ -2,19 +2,20 @@ package claude import ( "fmt" + "net/http" + "strings" + "github.com/google/uuid" "github.com/mitchellh/mapstructure" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" - "net/http" - "strings" ) const MessagePayloadType = "claude.message" -type CreateMessage struct{} +type TextPrompt struct{} -type CreateMessageSpec struct { +type TextPromptSpec struct { Model string `json:"model"` Prompt string `json:"prompt"` SystemMessage string `json:"systemMessage"` @@ -31,20 +32,20 @@ type MessagePayload struct { Response *CreateMessageResponse `json:"response"` } -func (c *CreateMessage) Name() string { - return "claude.createMessage" +func (c *TextPrompt) Name() string { + return "claude.textPrompt" } -func (c *CreateMessage) Label() string { - return "Create Message" +func (c *TextPrompt) Label() string { + return "Text Prompt" } -func (c *CreateMessage) Description() string { +func (c *TextPrompt) Description() string { return "Generate a response using Anthropic's Claude models via the Messages API" } -func (c *CreateMessage) Documentation() string { - return `The Create Message component uses Anthropic's Claude models to generate text responses. +func (c *TextPrompt) Documentation() string { + return `The Text Prompt component uses Anthropic's Claude models to generate text responses. ## Use Cases @@ -76,19 +77,19 @@ Returns a payload containing: ` } -func (c *CreateMessage) Icon() string { +func (c *TextPrompt) Icon() string { return "message-square" } -func (c *CreateMessage) Color() string { +func (c *TextPrompt) Color() string { return "orange" } -func (c *CreateMessage) OutputChannels(configuration any) []core.OutputChannel { +func (c *TextPrompt) OutputChannels(configuration any) []core.OutputChannel { return []core.OutputChannel{core.DefaultOutputChannel} } -func (c *CreateMessage) Configuration() []configuration.Field { +func (c *TextPrompt) Configuration() []configuration.Field { return []configuration.Field{ { Name: "model", @@ -138,8 +139,8 @@ func (c *CreateMessage) Configuration() []configuration.Field { } } -func (c *CreateMessage) Setup(ctx core.SetupContext) error { - spec := CreateMessageSpec{} +func (c *TextPrompt) Setup(ctx core.SetupContext) error { + spec := TextPromptSpec{} if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { return fmt.Errorf("failed to decode configuration: %v", err) } @@ -155,8 +156,8 @@ func (c *CreateMessage) Setup(ctx core.SetupContext) error { return nil } -func (c *CreateMessage) Execute(ctx core.ExecutionContext) error { - spec := CreateMessageSpec{} +func (c *TextPrompt) Execute(ctx core.ExecutionContext) error { + spec := TextPromptSpec{} if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { return fmt.Errorf("failed to decode configuration: %v", err) } @@ -220,27 +221,27 @@ func (c *CreateMessage) Execute(ctx core.ExecutionContext) error { ) } -func (c *CreateMessage) Cancel(ctx core.ExecutionContext) error { +func (c *TextPrompt) Cancel(ctx core.ExecutionContext) error { return nil } -func (c *CreateMessage) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { +func (c *TextPrompt) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { return ctx.DefaultProcessing() } -func (c *CreateMessage) Actions() []core.Action { +func (c *TextPrompt) Actions() []core.Action { return []core.Action{} } -func (c *CreateMessage) HandleAction(ctx core.ActionContext) error { +func (c *TextPrompt) HandleAction(ctx core.ActionContext) error { return nil } -func (c *CreateMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { +func (c *TextPrompt) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { return http.StatusOK, nil } -func (c *CreateMessage) Cleanup(ctx core.SetupContext) error { +func (c *TextPrompt) Cleanup(ctx core.SetupContext) error { return nil } diff --git a/pkg/integrations/claude/create_message_test.go b/pkg/integrations/claude/text_prompt_test.go similarity index 97% rename from pkg/integrations/claude/create_message_test.go rename to pkg/integrations/claude/text_prompt_test.go index 5c78764850..d8263b22ea 100644 --- a/pkg/integrations/claude/create_message_test.go +++ b/pkg/integrations/claude/text_prompt_test.go @@ -48,8 +48,8 @@ func (m *mockExecutionState) Fail(reason, message string) error { // --- Tests --- -func TestCreateMessage_Configuration(t *testing.T) { - c := &CreateMessage{} +func TestTextPrompt_Configuration(t *testing.T) { + c := &TextPrompt{} config := c.Configuration() expectedFields := map[string]struct { @@ -78,8 +78,8 @@ func TestCreateMessage_Configuration(t *testing.T) { } } -func TestCreateMessage_Setup(t *testing.T) { - c := &CreateMessage{} +func TestTextPrompt_Setup(t *testing.T) { + c := &TextPrompt{} tests := []struct { name string @@ -126,8 +126,8 @@ func TestCreateMessage_Setup(t *testing.T) { } } -func TestCreateMessage_Execute(t *testing.T) { - c := &CreateMessage{} +func TestTextPrompt_Execute(t *testing.T) { + c := &TextPrompt{} // Helper to create a valid response JSON validResponseJSON := `{ diff --git a/web_src/src/pages/workflowv2/mappers/claude/index.ts b/web_src/src/pages/workflowv2/mappers/claude/index.ts index 5fa26c1f52..d7f30fda9c 100644 --- a/web_src/src/pages/workflowv2/mappers/claude/index.ts +++ b/web_src/src/pages/workflowv2/mappers/claude/index.ts @@ -3,11 +3,11 @@ import { baseMapper } from "./base"; import { buildActionStateRegistry } from "../utils"; export const componentMappers: Record = { - createMessage: baseMapper, + textPrompt: baseMapper, }; export const triggerRenderers: Record = {}; export const eventStateRegistry: Record = { - createMessage: buildActionStateRegistry("completed"), + textPrompt: buildActionStateRegistry("completed"), }; From 46d84c2ef22cc4ce51730b1707e452dd093708c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sun, 8 Feb 2026 15:06:23 +0100 Subject: [PATCH 010/160] chore: Add docs how to submit PRs for integrations (#2949) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- CONTRIBUTING.md | 1 + docs/contributing/integration-prs.md | 152 +++++++++++++++++++++++++++ 2 files changed, 153 insertions(+) create mode 100644 docs/contributing/integration-prs.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5c1378b7c8..b9c559ec5e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -81,6 +81,7 @@ When the process completes, you can access the SuperPlane at [http://localhost:8 ### Adding new integrations to SuperPlane +- **[Opening PRs for integrations](docs/contributing/integration-prs.md)** — Checklist for opening and submitting integration PRs (title, description, video, frontend/backend/docs, CI, DCO) - **[Integrations](docs/contributing/integrations.md)** — Instructions for adding new third-party integrations to SuperPlane - **[Component Implementation](docs/contributing/component-implementations.md)** — Step-by-step instructions for creating new components or triggers - **[Component Customization](docs/contributing/component-customization.md)** — Guide for customizing existing components or building behaviors diff --git a/docs/contributing/integration-prs.md b/docs/contributing/integration-prs.md new file mode 100644 index 0000000000..639fb02c52 --- /dev/null +++ b/docs/contributing/integration-prs.md @@ -0,0 +1,152 @@ +# Opening PRs for Integrations + +How to open a pull request for a new or updated integration. For general PR +workflow (fork, branch, push), see [Pull Requests](pull-requests.md). + +## Table of contents + +- [Title](#title) +- [Description](#description) + - [Start with link to the issue](#start-with-link-to-the-issue) + - [Describe the implementation](#describe-the-implementation) + - [Include a video demo](#include-a-video-demo) +- [Backend Implementation](#backend-implementation) +- [Frontend Implementation](#frontend-implementation) +- [Docs](#docs) +- [Tests](#tests) +- [CI and BugBot](#ci-and-bugbot) +- [BugBot](#bugbot) +- [DCO](#dco) + +## Title + +Use the semantic format: `feat: Add ` or `feat: Add `. + +Examples of what to do: + +- ✅ `feat: Add Rootly integration` +- ✅ `feat: Add Slack Send Message action` + +Examples of what not to do: + +- ❌ `Add Rootly integration` (missing `feat:`) +- ❌ `feat(Rootly): Add integration` (wrong format) +- ❌ `[Rootly] Add integration` (wrong format) +- ❌ `Add Rootly` (missing `feat:`) + +See [title rules](pull-requests.md#title-format-rules). + +## Description + +### Start with link to the issue + +Start with a link to the issue. e.g. `Implements #1234`. + +### Describe the implementation + +Say what was implemented, why, and how (e.g. which API, which endpoints, etc.). +If there are any limitations or things to note, include those as well. + +e.g. + +``` +This PR implements the Rootly integration, which allows users to create incidents in Rootly. + +Authorization is via API key, which users can generate in their Rootly account by +going to Settings > API Keys. +``` + +### Include a video demo + +Include a link to a short demo video. + +What to do: + +- ✅ Show how to set up the integration (e.g. where to find the API key in Rootly, how to enter it in SuperPlane, etc.). +- ✅ Show the workflow in action, e.g. creating an incident in Rootly triggers a workflow in SuperPlane. +- ✅ Show how to configure the component. +- ✅ Keep it short (1-2 minutes max). + +What not to do: + +- ❌ Don't just show the canvas without showing the integration in action. +- ❌ Don't make it too long or include unnecessary details. +- ❌ Don't show the code or implementation details in the video. +- ❌ Don't show unit tests or CI checks in the video. + +## Backend Implementation + +The backend implementation should include the integration code in `pkg/integrations//`. +e.g. for a Rootly integration, the code would be in `pkg/integrations/rootly/`. + +What to do: + +- ✅ Follow the existing structure and patterns in the codebase for integrations. +- ✅ Write clean, modular, and well-documented code. +- ✅ Add examples output for the components. + +What not to do: + +- ❌ Don't create a new structure or pattern for your integration, unless there's a good reason to do so. +- ❌ Don't include unrelated code or changes in the PR. +- ❌ Don't make breaking changes to existing code without a good reason and without documenting them. +- ❌ Don't make changes in the core workflow engine or other unrelated parts of the codebase unless necessary for the integration. + +## Frontend Implementation + +The frontend implementation should include mappers in `web_src/src/pages/workflowv2/mappers//`. +e.g. for a Rootly integration, the mappers would be in `web_src/src/pages/workflowv2/mappers/rootly/`. + +What to do: + +- ✅ Follow the existing structure and patterns in the codebase for integrations. +- ✅ Write clean, modular, and well-documented code. + +What not to do: + +- ❌ Don't create a new structure or pattern for your integration, unless there's a good reason to do so. +- ❌ Don't include unrelated code or changes in the PR. +- ❌ Don't make breaking changes to existing code without a good reason and without documenting them. +- ❌ Don't make changes in UI components or other unrelated parts of the codebase unless necessary for the integration. + +## Docs + +Documentation is generated based the code from the `pkg/integrations/`. +Run `make gen.components.docs` to generate the docs after implementing the backend code. +This will create a doc in `docs/components/` (e.g. `Rootly.mdx`). + +What to do: + +- ✅ Write documentation in `pkg/integrations//` that is clear and comprehensive. +- ✅ Include instructions on how to set up the integration, how to use it, and any limitations or things to note. +- ✅ Follow the existing structure and patterns in the codebase for integration docs. + +What not to do: + +- ❌ Don't write documentation in the `docs/components/` directly. It should be generated with `make gen.components.docs`. + +## Tests + +Write unit tests for the backend code in `pkg/integrations//`. + +What to do: + +- ✅ Write tests that cover the main functionality of the integration, including edge cases and error handling. +- ✅ Make sure the tests are deterministic and can be run in any order. + +What not to do: + +- ❌ Don't write tests for static content. e.g. Name or the Label of the component. + +## CI and BugBot + +Every PR must pass all CI checks, including unit tests and linting. + +## BugBot + +BugBot will automatically comment on the PR with any issues found in the code, such as linting errors, +test failures, or other issues. Make sure to address any comments from BugBot. + +## DCO + +Every commit must be signed off (`git commit -s`). See [Commit Sign-off](commit_sign-off.md). From cac883e7ee1e472d3d3a298380aa63cfa87c6a1f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 11:25:06 -0300 Subject: [PATCH 011/160] chore(deps): bump tar and @hey-api/openapi-ts in /web_src (#2950) Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 686 ++++++++++++++++++++++---------------- web_src/package.json | 2 +- 2 files changed, 391 insertions(+), 297 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index e75bfa7da7..f7845e8b70 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -74,7 +74,7 @@ "devDependencies": { "@chromatic-com/storybook": "^4.1.1", "@eslint/js": "^9.25.0", - "@hey-api/openapi-ts": "^0.67.4", + "@hey-api/openapi-ts": "^0.92.3", "@storybook/addon-a11y": "^9.1.7", "@storybook/addon-docs": "^9.1.7", "@storybook/addon-onboarding": "^9.1.7", @@ -1331,16 +1331,37 @@ "@hey-api/openapi-ts": "< 2" } }, + "node_modules/@hey-api/codegen-core": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@hey-api/codegen-core/-/codegen-core-0.7.0.tgz", + "integrity": "sha512-HglL4B4QwpzocE+c8qDU6XK8zMf8W8Pcv0RpFDYxHuYALWLTnpDUuEsglC7NQ4vC1maoXsBpMbmwpco0N4QviA==", + "license": "MIT", + "dependencies": { + "@hey-api/types": "0.1.3", + "ansi-colors": "4.1.3", + "c12": "3.3.3", + "color-support": "1.1.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/hey-api" + }, + "peerDependencies": { + "typescript": ">=5.5.3" + } + }, "node_modules/@hey-api/json-schema-ref-parser": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@hey-api/json-schema-ref-parser/-/json-schema-ref-parser-1.0.6.tgz", - "integrity": "sha512-yktiFZoWPtEW8QKS65eqKwA5MTKp88CyiL8q72WynrBs/73SAaxlSWlA2zW/DZlywZ5hX1OYzrCC0wFdvO9c2w==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@hey-api/json-schema-ref-parser/-/json-schema-ref-parser-1.2.4.tgz", + "integrity": "sha512-uuOaZ6tStUgRJFUqnX3Xdbs792++ezxOLI5NMxuikVklpbFWk2wcvIZbeX+qTWDv6kiS1Ik2EVKQgeQFWHML4A==", "license": "MIT", "dependencies": { "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", - "js-yaml": "^4.1.0", - "lodash": "^4.17.21" + "js-yaml": "^4.1.1", + "lodash": "^4.17.23" }, "engines": { "node": ">= 16" @@ -1350,36 +1371,107 @@ } }, "node_modules/@hey-api/openapi-ts": { - "version": "0.67.4", - "resolved": "https://registry.npmjs.org/@hey-api/openapi-ts/-/openapi-ts-0.67.4.tgz", - "integrity": "sha512-uI66X7twx1J1BSOzApVVxpNhJZs6AOt/u1N3WKqrFbAonCejMQm/vsySgfZVCPetAv40KyHYclk09vRlrffjMQ==", + "version": "0.92.3", + "resolved": "https://registry.npmjs.org/@hey-api/openapi-ts/-/openapi-ts-0.92.3.tgz", + "integrity": "sha512-D+2ySL+PXvp1iZtS+1gTEeGChwjHT3d/a6o9IwAaNdGJVsI1lPqESZx7vxqjoUtE/DruovGZC2/jPc/kA5IQPg==", "license": "MIT", "dependencies": { - "@hey-api/json-schema-ref-parser": "1.0.6", - "c12": "2.0.1", - "commander": "13.0.0", - "handlebars": "4.7.8" + "@hey-api/codegen-core": "0.7.0", + "@hey-api/json-schema-ref-parser": "1.2.4", + "@hey-api/shared": "0.1.2", + "@hey-api/types": "0.1.3", + "ansi-colors": "4.1.3", + "color-support": "1.1.3", + "commander": "14.0.3" }, "bin": { - "openapi-ts": "bin/index.cjs" + "openapi-ts": "bin/run.js" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=22.10.0" + "node": ">=20.19.0" }, "funding": { "url": "https://github.com/sponsors/hey-api" }, "peerDependencies": { - "typescript": "^5.5.3" + "typescript": ">=5.5.3" } }, - "node_modules/@hey-api/openapi-ts/node_modules/commander": { - "version": "13.0.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-13.0.0.tgz", - "integrity": "sha512-oPYleIY8wmTVzkvQq10AEok6YcTC4sRUBl8F9gVuwchGVUCTbl/vhLTaQqutuuySYOsu8YTgV+OxKc/8Yvx+mQ==", + "node_modules/@hey-api/shared": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@hey-api/shared/-/shared-0.1.2.tgz", + "integrity": "sha512-dcldulfNI1xiXl/zhdXKDlNX2bvY0TOBWRRyFXNtcfPddMEFcrlXGmi/wk6LN4fPyDO8lM7FAM9aEpkEdUo92A==", "license": "MIT", + "dependencies": { + "@hey-api/codegen-core": "0.7.0", + "@hey-api/json-schema-ref-parser": "1.2.4", + "@hey-api/types": "0.1.3", + "ansi-colors": "4.1.3", + "cross-spawn": "7.0.6", + "open": "11.0.0", + "semver": "7.7.3" + }, "engines": { - "node": ">=18" + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/hey-api" + }, + "peerDependencies": { + "typescript": ">=5.5.3" + } + }, + "node_modules/@hey-api/shared/node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@hey-api/shared/node_modules/open": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/open/-/open-11.0.0.tgz", + "integrity": "sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw==", + "license": "MIT", + "dependencies": { + "default-browser": "^5.4.0", + "define-lazy-prop": "^3.0.0", + "is-in-ssh": "^1.0.0", + "is-inside-container": "^1.0.0", + "powershell-utils": "^0.1.0", + "wsl-utils": "^0.3.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@hey-api/shared/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@hey-api/types": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@hey-api/types/-/types-0.1.3.tgz", + "integrity": "sha512-mZaiPOWH761yD4GjDQvtjS2ZYLu5o5pI1TVSvV/u7cmbybv51/FVtinFBeaE1kFQCKZ8OQpn2ezjLBJrKsGATw==", + "license": "MIT", + "peerDependencies": { + "typescript": ">=5.5.3" } }, "node_modules/@humanfs/core": { @@ -5044,6 +5136,7 @@ "version": "8.14.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, "bin": { "acorn": "bin/acorn" }, @@ -5086,6 +5179,15 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -5296,6 +5398,21 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, + "node_modules/bundle-name": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz", + "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==", + "license": "MIT", + "dependencies": { + "run-applescript": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -5306,26 +5423,26 @@ } }, "node_modules/c12": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/c12/-/c12-2.0.1.tgz", - "integrity": "sha512-Z4JgsKXHG37C6PYUtIxCfLJZvo6FyhHJoClwwb9ftUkLpPSkuYqn6Tr+vnaN8hymm0kIbcg6Ey3kv/Q71k5w/A==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/c12/-/c12-3.3.3.tgz", + "integrity": "sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q==", "license": "MIT", "dependencies": { - "chokidar": "^4.0.1", - "confbox": "^0.1.7", + "chokidar": "^5.0.0", + "confbox": "^0.2.2", "defu": "^6.1.4", - "dotenv": "^16.4.5", - "giget": "^1.2.3", - "jiti": "^2.3.0", - "mlly": "^1.7.1", - "ohash": "^1.1.4", - "pathe": "^1.1.2", - "perfect-debounce": "^1.0.0", - "pkg-types": "^1.2.0", + "dotenv": "^17.2.3", + "exsolve": "^1.0.8", + "giget": "^2.0.0", + "jiti": "^2.6.1", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^2.0.0", + "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { - "magicast": "^0.3.5" + "magicast": "*" }, "peerDependenciesMeta": { "magicast": { @@ -5333,6 +5450,12 @@ } } }, + "node_modules/c12/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, "node_modules/cac": { "version": "6.7.14", "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", @@ -5494,15 +5617,15 @@ } }, "node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", "license": "MIT", "dependencies": { - "readdirp": "^4.0.1" + "readdirp": "^5.0.0" }, "engines": { - "node": ">= 14.16.0" + "node": ">= 20.19.0" }, "funding": { "url": "https://paulmillr.com/funding/" @@ -5610,6 +5733,15 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -5633,6 +5765,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/commander": { + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.3.tgz", + "integrity": "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==", + "license": "MIT", + "engines": { + "node": ">=20" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -5640,9 +5781,9 @@ "dev": true }, "node_modules/confbox": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", - "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.4.tgz", + "integrity": "sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ==", "license": "MIT" }, "node_modules/consola": { @@ -5741,7 +5882,6 @@ "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -5971,6 +6111,34 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, + "node_modules/default-browser": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.5.0.tgz", + "integrity": "sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==", + "license": "MIT", + "dependencies": { + "bundle-name": "^4.1.0", + "default-browser-id": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.1.tgz", + "integrity": "sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/define-lazy-prop": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", @@ -6070,9 +6238,9 @@ "peer": true }, "node_modules/dotenv": { - "version": "16.5.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", - "integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", + "version": "17.2.4", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.4.tgz", + "integrity": "sha512-mudtfb4zRB4bVvdj0xRo+e6duH1csJRM8IukBqfTRvHotn9+LBXB8ynAidP9zHqoRC/fsllXgk4kCKlR21fIhw==", "license": "BSD-2-Clause", "engines": { "node": ">=12" @@ -6647,6 +6815,12 @@ "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, + "node_modules/exsolve": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.8.tgz", + "integrity": "sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==", + "license": "MIT" + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -6882,36 +7056,6 @@ "node": ">= 0.8" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs-minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -6989,107 +7133,28 @@ } }, "node_modules/giget": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.5.tgz", - "integrity": "sha512-r1ekGw/Bgpi3HLV3h1MRBIlSAdHoIMklpaQ3OQLFcRw9PwAj2rqigvIbg+dBUI51OxVI2jsEtDywDBjSiuf7Ug==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/giget/-/giget-2.0.0.tgz", + "integrity": "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==", "license": "MIT", "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "defu": "^6.1.4", "node-fetch-native": "^1.6.6", - "nypm": "^0.5.4", - "pathe": "^2.0.3", - "tar": "^6.2.1" + "nypm": "^0.6.0", + "pathe": "^2.0.3" }, "bin": { "giget": "dist/cli.mjs" } }, - "node_modules/giget/node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/giget/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/giget/node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/giget/node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/giget/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/giget/node_modules/pathe": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "license": "MIT" }, - "node_modules/giget/node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "license": "ISC", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/giget/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, "node_modules/glob": { "version": "10.4.5", "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", @@ -7184,27 +7249,6 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, - "node_modules/handlebars": { - "version": "4.7.8", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", - "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", - "license": "MIT", - "dependencies": { - "minimist": "^1.2.5", - "neo-async": "^2.6.2", - "source-map": "^0.6.1", - "wordwrap": "^1.0.0" - }, - "bin": { - "handlebars": "bin/handlebars" - }, - "engines": { - "node": ">=0.4.7" - }, - "optionalDependencies": { - "uglify-js": "^3.1.4" - } - }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -7569,6 +7613,51 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/is-in-ssh": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-in-ssh/-/is-in-ssh-1.0.0.tgz", + "integrity": "sha512-jYa6Q9rH90kR1vKB6NM7qqd1mge3Fx4Dhw5TVlK1MUBqhEOuCagrEHMevNuCcbECmXZ0ThXkRm+Ymr51HwEPAw==", + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-inside-container/node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -7619,8 +7708,7 @@ "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "node_modules/jackspeak": { "version": "3.4.3", @@ -7639,9 +7727,10 @@ } }, "node_modules/jiti": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", - "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "license": "MIT", "bin": { "jiti": "lib/jiti-cli.mjs" } @@ -7652,9 +7741,9 @@ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "license": "MIT", "dependencies": { "argparse": "^2.0.1" @@ -8026,9 +8115,9 @@ } }, "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", "license": "MIT" }, "node_modules/lodash.debounce": { @@ -8791,6 +8880,7 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8805,9 +8895,10 @@ } }, "node_modules/minizlib": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", - "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", + "license": "MIT", "dependencies": { "minipass": "^7.1.2" }, @@ -8815,38 +8906,6 @@ "node": ">= 18" } }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/mlly": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz", - "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", - "license": "MIT", - "dependencies": { - "acorn": "^8.14.0", - "pathe": "^2.0.1", - "pkg-types": "^1.3.0", - "ufo": "^1.5.4" - } - }, - "node_modules/mlly/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, "node_modules/monaco-editor": { "version": "0.52.2", "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz", @@ -8901,16 +8960,10 @@ "node": ">= 0.6" } }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "license": "MIT" - }, "node_modules/node-fetch-native": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.6.tgz", - "integrity": "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==", + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz", + "integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==", "license": "MIT" }, "node_modules/node-releases": { @@ -8927,31 +8980,43 @@ "license": "MIT" }, "node_modules/nypm": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.5.4.tgz", - "integrity": "sha512-X0SNNrZiGU8/e/zAB7sCTtdxWTMSIO73q+xuKgglm2Yvzwlo8UoC5FNySQFCvl84uPaeADkqHUZUkWy4aH4xOA==", + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.6.5.tgz", + "integrity": "sha512-K6AJy1GMVyfyMXRVB88700BJqNUkByijGJM8kEHpLdcAt+vSQAVfkWWHYzuRXHSY6xA2sNc5RjTj0p9rE2izVQ==", "license": "MIT", "dependencies": { - "citty": "^0.1.6", - "consola": "^3.4.0", + "citty": "^0.2.0", "pathe": "^2.0.3", - "pkg-types": "^1.3.1", - "tinyexec": "^0.3.2", - "ufo": "^1.5.4" + "tinyexec": "^1.0.2" }, "bin": { "nypm": "dist/cli.mjs" }, "engines": { - "node": "^14.16.0 || >=16.10.0" + "node": ">=18" } }, + "node_modules/nypm/node_modules/citty": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.2.0.tgz", + "integrity": "sha512-8csy5IBFI2ex2hTVpaHN2j+LNE199AgiI7y4dMintrr8i0lQiFn+0AWMZrWdHKIgMOer65f8IThysYhoReqjWA==", + "license": "MIT" + }, "node_modules/nypm/node_modules/pathe": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "license": "MIT" }, + "node_modules/nypm/node_modules/tinyexec": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -8974,9 +9039,9 @@ } }, "node_modules/ohash": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/ohash/-/ohash-1.1.6.tgz", - "integrity": "sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", + "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==", "license": "MIT" }, "node_modules/on-finished": { @@ -9161,7 +9226,6 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, "engines": { "node": ">=8" } @@ -9218,6 +9282,7 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "dev": true, "license": "MIT" }, "node_modules/pathval": { @@ -9231,9 +9296,9 @@ } }, "node_modules/perfect-debounce": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", - "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", "license": "MIT" }, "node_modules/picocolors": { @@ -9263,14 +9328,14 @@ } }, "node_modules/pkg-types": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", - "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", "license": "MIT", "dependencies": { - "confbox": "^0.1.8", - "mlly": "^1.7.4", - "pathe": "^2.0.1" + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" } }, "node_modules/pkg-types/node_modules/pathe": { @@ -9306,6 +9371,18 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/powershell-utils": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/powershell-utils/-/powershell-utils-0.1.0.tgz", + "integrity": "sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A==", + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -9725,12 +9802,12 @@ "license": "MIT" }, "node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", "license": "MIT", "engines": { - "node": ">= 14.18.0" + "node": ">= 20.19.0" }, "funding": { "type": "individual", @@ -9900,6 +9977,18 @@ "dev": true, "license": "MIT" }, + "node_modules/run-applescript": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz", + "integrity": "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -10029,7 +10118,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, @@ -10041,7 +10129,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, "engines": { "node": ">=8" } @@ -10167,6 +10254,7 @@ "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" @@ -10586,15 +10674,15 @@ } }, "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz", + "integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==", + "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", + "minizlib": "^3.1.0", "yallist": "^5.0.0" }, "engines": { @@ -10627,6 +10715,7 @@ "version": "0.3.2", "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, "license": "MIT" }, "node_modules/tinyglobby": { @@ -10917,25 +11006,6 @@ "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/ufo": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz", - "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", - "license": "MIT" - }, - "node_modules/uglify-js": { - "version": "3.19.3", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", - "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", - "license": "BSD-2-Clause", - "optional": true, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/undici-types": { "version": "6.21.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", @@ -12493,7 +12563,6 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -12530,12 +12599,6 @@ "node": ">=0.10.0" } }, - "node_modules/wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", - "license": "MIT" - }, "node_modules/wrap-ansi": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", @@ -12649,6 +12712,37 @@ } } }, + "node_modules/wsl-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.3.1.tgz", + "integrity": "sha512-g/eziiSUNBSsdDJtCLB8bdYEUMj4jR7AGeUo96p/3dTafgjHhpF4RiCFPiRILwjQoDXx5MqkBr4fwWtR3Ky4Wg==", + "license": "MIT", + "dependencies": { + "is-wsl": "^3.1.0", + "powershell-utils": "^0.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/wsl-utils/node_modules/is-wsl": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.0.tgz", + "integrity": "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==", + "license": "MIT", + "dependencies": { + "is-inside-container": "^1.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/xml-name-validator": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", diff --git a/web_src/package.json b/web_src/package.json index f104e6895c..9379a1def3 100644 --- a/web_src/package.json +++ b/web_src/package.json @@ -85,7 +85,7 @@ "devDependencies": { "@chromatic-com/storybook": "^4.1.1", "@eslint/js": "^9.25.0", - "@hey-api/openapi-ts": "^0.67.4", + "@hey-api/openapi-ts": "^0.92.3", "@storybook/addon-a11y": "^9.1.7", "@storybook/addon-docs": "^9.1.7", "@storybook/addon-onboarding": "^9.1.7", From b5b036d5fdb6c7051ebffe8b0807c0cad579856c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 11:34:21 -0300 Subject: [PATCH 012/160] chore(deps): bump react-router and react-router-dom in /web_src (#2951) Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 32 ++++++++++++++++++-------------- web_src/package.json | 2 +- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index f7845e8b70..19368c0a20 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -61,7 +61,7 @@ "react-dom": "^19.1.0", "react-markdown": "^10.1.0", "react-resizable-panels": "^3.0.6", - "react-router-dom": "^7.6.0", + "react-router-dom": "^7.13.0", "react-use-websocket": "^4.0.0", "sonner": "^2.0.7", "tailwind-merge": "^3.3.1", @@ -9727,9 +9727,9 @@ } }, "node_modules/react-router": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.0.tgz", - "integrity": "sha512-GGufuHIVCJDbnIAXP3P9Sxzq3UUsddG3rrI3ut1q6m0FI6vxVBF3JoPQ38+W/blslLH4a5Yutp8drkEpXoddGQ==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.0.tgz", + "integrity": "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw==", "license": "MIT", "dependencies": { "cookie": "^1.0.1", @@ -9749,12 +9749,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.6.0.tgz", - "integrity": "sha512-DYgm6RDEuKdopSyGOWZGtDfSm7Aofb8CCzgkliTjtu/eDuB0gcsv6qdFhhi8HdtmA+KHkt5MfZ5K2PdzjugYsA==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.0.tgz", + "integrity": "sha512-5CO/l5Yahi2SKC6rGZ+HDEjpjkGaG/ncEP7eWFTvFxbHP8yeeI0PxTDjimtpXYlR3b3i9/WIL4VJttPrESIf2g==", "license": "MIT", "dependencies": { - "react-router": "7.6.0" + "react-router": "7.13.0" }, "engines": { "node": ">=20.0.0" @@ -9765,12 +9765,16 @@ } }, "node_modules/react-router/node_modules/cookie": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz", - "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", "license": "MIT", "engines": { "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/react-style-singleton": { @@ -10103,9 +10107,9 @@ } }, "node_modules/set-cookie-parser": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz", - "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", "license": "MIT" }, "node_modules/setprototypeof": { diff --git a/web_src/package.json b/web_src/package.json index 9379a1def3..e8e17ec780 100644 --- a/web_src/package.json +++ b/web_src/package.json @@ -72,7 +72,7 @@ "react-dom": "^19.1.0", "react-markdown": "^10.1.0", "react-resizable-panels": "^3.0.6", - "react-router-dom": "^7.6.0", + "react-router-dom": "^7.13.0", "react-use-websocket": "^4.0.0", "sonner": "^2.0.7", "tailwind-merge": "^3.3.1", From 29f72037aee65147bd4ad2a555d71ad849b1b9fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 11:41:52 -0300 Subject: [PATCH 013/160] chore(deps): bump vite in /web_src (#2952) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) to 6.4.1 and updates ancestor dependency . These dependencies need to be updated together. Updates `vite` from 6.3.5 to 6.4.1
Release notes

Sourced from vite's releases.

create-vite@6.4.1

Please refer to CHANGELOG.md for details.

v6.4.1

Please refer to CHANGELOG.md for details.

create-vite@6.4.0

Please refer to CHANGELOG.md for details.

v6.4.0

Please refer to CHANGELOG.md for details.

v6.3.7

Please refer to CHANGELOG.md for details.

v6.3.6

Please refer to CHANGELOG.md for details.

Changelog

Sourced from vite's changelog.

8.0.0-beta.13 (2026-02-05)

Features

Bug Fixes

Miscellaneous Chores

Code Refactoring

Tests

8.0.0-beta.12 (2026-02-03)

Features

  • manifest: add assets field for standalone CSS entry points (#21015) (f289b9b)

Bug Fixes

  • avoid registering customization hook for import meta resolver multiple times (#21518) (8bb3203)
  • config: avoid watching rolldown runtime virtual module (#21545) (d18b139)
  • deps: update all non-major dependencies (#21540) (9ebaeaa)
  • populate originalFileNames when resolving CSS asset paths (#21542) (8b47ff7)

Miscellaneous Chores

  • deps: update dependency rolldown-plugin-dts to ^0.21.8 (#21539) (33881cb)

8.0.0-beta.11 (2026-01-29)

Features

Bug Fixes

  • deps: update all non-major dependencies (#21488) (2b32ca2)
  • disable tsconfig option when loading config (#21517) (5025c35)
  • optimizer: map relative new URL paths to correct relative file location (#21434) (ca96cbc)

... (truncated)

Commits
  • 0a0c50a refactor: simplify pluginFilter implementation (#19828)
  • 59d0b35 perf(css): avoid constructing renderedModules (#19775)
  • 175a839 fix: reject requests with # in request-target (#19830)
  • e2e11b1 fix(module-runner): allow already resolved id as entry (#19768)
  • 7200dee fix: correct the behavior when multiple transform filter options are specifie...
  • b125172 fix(css): remove empty chunk imports correctly when chunk file name contained...
  • 8fe3538 test: tweak generateCodeFrame test (#19812)
  • 36935b5 fix(types): remove the keepProcessEnv from the DefaultEnvironmentOptions ...
  • a0e1a04 docs(vite): fix description of transformIndexHtml hook (#19799)
  • 71227be fix: unbundle fdir to fix commonjsOptions.dynamicRequireTargets (#19791)
  • Additional commits viewable in compare view

Updates `vite` from 5.4.19 to 5.4.21
Release notes

Sourced from vite's releases.

create-vite@6.4.1

Please refer to CHANGELOG.md for details.

v6.4.1

Please refer to CHANGELOG.md for details.

create-vite@6.4.0

Please refer to CHANGELOG.md for details.

v6.4.0

Please refer to CHANGELOG.md for details.

v6.3.7

Please refer to CHANGELOG.md for details.

v6.3.6

Please refer to CHANGELOG.md for details.

Changelog

Sourced from vite's changelog.

8.0.0-beta.13 (2026-02-05)

Features

Bug Fixes

Miscellaneous Chores

Code Refactoring

Tests

8.0.0-beta.12 (2026-02-03)

Features

  • manifest: add assets field for standalone CSS entry points (#21015) (f289b9b)

Bug Fixes

  • avoid registering customization hook for import meta resolver multiple times (#21518) (8bb3203)
  • config: avoid watching rolldown runtime virtual module (#21545) (d18b139)
  • deps: update all non-major dependencies (#21540) (9ebaeaa)
  • populate originalFileNames when resolving CSS asset paths (#21542) (8b47ff7)

Miscellaneous Chores

  • deps: update dependency rolldown-plugin-dts to ^0.21.8 (#21539) (33881cb)

8.0.0-beta.11 (2026-01-29)

Features

Bug Fixes

  • deps: update all non-major dependencies (#21488) (2b32ca2)
  • disable tsconfig option when loading config (#21517) (5025c35)
  • optimizer: map relative new URL paths to correct relative file location (#21434) (ca96cbc)

... (truncated)

Commits
  • 0a0c50a refactor: simplify pluginFilter implementation (#19828)
  • 59d0b35 perf(css): avoid constructing renderedModules (#19775)
  • 175a839 fix: reject requests with # in request-target (#19830)
  • e2e11b1 fix(module-runner): allow already resolved id as entry (#19768)
  • 7200dee fix: correct the behavior when multiple transform filter options are specifie...
  • b125172 fix(css): remove empty chunk imports correctly when chunk file name contained...
  • 8fe3538 test: tweak generateCodeFrame test (#19812)
  • 36935b5 fix(types): remove the keepProcessEnv from the DefaultEnvironmentOptions ...
  • a0e1a04 docs(vite): fix description of transformIndexHtml hook (#19799)
  • 71227be fix: unbundle fdir to fix commonjsOptions.dynamicRequireTargets (#19791)
  • Additional commits viewable in compare view

Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/superplanehq/superplane/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 21 +++++++++++---------- web_src/package.json | 2 +- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index 19368c0a20..8f4349ad53 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -98,7 +98,7 @@ "tw-animate-css": "^1.4.0", "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", - "vite": "^6.3.5", + "vite": "^6.4.1", "vitest": "^2.1.8", "web-worker": "^1.5.0" } @@ -11291,9 +11291,10 @@ } }, "node_modules/vite": { - "version": "6.3.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", - "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "license": "MIT", "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", @@ -11817,9 +11818,9 @@ } }, "node_modules/vite-node/node_modules/vite": { - "version": "5.4.19", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.19.tgz", - "integrity": "sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==", + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", "dev": true, "license": "MIT", "dependencies": { @@ -12424,9 +12425,9 @@ } }, "node_modules/vitest/node_modules/vite": { - "version": "5.4.19", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.19.tgz", - "integrity": "sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==", + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", "dev": true, "license": "MIT", "dependencies": { diff --git a/web_src/package.json b/web_src/package.json index e8e17ec780..d080cdc204 100644 --- a/web_src/package.json +++ b/web_src/package.json @@ -109,7 +109,7 @@ "tw-animate-css": "^1.4.0", "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", - "vite": "^6.3.5", + "vite": "^6.4.1", "vitest": "^2.1.8", "web-worker": "^1.5.0" }, From 46f438a6a14cd8bd67e08237e1b5bce069573a62 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 11:49:25 -0300 Subject: [PATCH 014/160] chore(deps-dev): bump @modelcontextprotocol/sdk from 1.11.2 to 1.26.0 in /web_src (#2953) Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 489 +++++++++++++++++++++++++++++--------- 1 file changed, 378 insertions(+), 111 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index 8f4349ad53..0789c3e1aa 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -1474,6 +1474,19 @@ "typescript": ">=5.5.3" } }, + "node_modules/@hono/node-server": { + "version": "1.19.9", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.9.tgz", + "integrity": "sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" + } + }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -1654,24 +1667,88 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.11.2", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.11.2.tgz", - "integrity": "sha512-H9vwztj5OAqHg9GockCQC06k1natgcxWQSRpQcPJf6i5+MWBzfKkRtxGbjQf0X2ihii0ffLZCRGbYV2f2bjNCQ==", + "version": "1.26.0", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.26.0.tgz", + "integrity": "sha512-Y5RmPncpiDtTXDbLKswIJzTqu2hyBKxTNsgKqKclDbhIgg1wgtf1fRuvxgTnRfcnxtvvgbIEcqUOzZrJ6iSReg==", "dev": true, + "license": "MIT", "dependencies": { + "@hono/node-server": "^1.19.9", + "ajv": "^8.17.1", + "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", - "express": "^5.0.1", - "express-rate-limit": "^7.5.0", + "eventsource-parser": "^3.0.0", + "express": "^5.2.1", + "express-rate-limit": "^8.2.1", + "hono": "^4.11.4", + "jose": "^6.1.3", + "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", - "zod": "^3.23.8", - "zod-to-json-schema": "^3.24.1" + "zod": "^3.25 || ^4.0", + "zod-to-json-schema": "^3.25.1" }, "engines": { "node": ">=18" + }, + "peerDependencies": { + "@cfworker/json-schema": "^4.1.1", + "zod": "^3.25 || ^4.0" + }, + "peerDependenciesMeta": { + "@cfworker/json-schema": { + "optional": true + }, + "zod": { + "optional": false + } + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/zod-to-json-schema": { + "version": "3.25.1", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.25.1.tgz", + "integrity": "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==", + "dev": true, + "license": "ISC", + "peerDependencies": { + "zod": "^3.25 || ^4" } }, "node_modules/@monaco-editor/loader": { @@ -5124,6 +5201,7 @@ "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", "dev": true, + "license": "MIT", "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" @@ -5179,6 +5257,48 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-formats/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, "node_modules/ansi-colors": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", @@ -5325,23 +5445,45 @@ } }, "node_modules/body-parser": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", - "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz", + "integrity": "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==", "dev": true, + "license": "MIT", "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", - "debug": "^4.4.0", + "debug": "^4.4.3", "http-errors": "^2.0.0", - "iconv-lite": "^0.6.3", + "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", - "qs": "^6.14.0", - "raw-body": "^3.0.0", - "type-is": "^2.0.0" + "qs": "^6.14.1", + "raw-body": "^3.0.1", + "type-is": "^2.0.1" }, "engines": { "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/body-parser/node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/brace-expansion": { @@ -5418,6 +5560,7 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -5484,6 +5627,7 @@ "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" @@ -5796,15 +5940,17 @@ } }, "node_modules/content-disposition": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", - "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", "dev": true, - "dependencies": { - "safe-buffer": "5.2.1" - }, + "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/content-type": { @@ -5812,6 +5958,7 @@ "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -5827,6 +5974,7 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -5836,6 +5984,7 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.6.0" } @@ -6060,9 +6209,10 @@ "license": "MIT" }, "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { "ms": "^2.1.3" }, @@ -6170,6 +6320,7 @@ "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -6274,7 +6425,8 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/electron-to-chromium": { "version": "1.5.152", @@ -6328,6 +6480,7 @@ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -6484,7 +6637,8 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/escape-string-regexp": { "version": "4.0.0", @@ -6723,6 +6877,7 @@ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -6759,18 +6914,20 @@ } }, "node_modules/express": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", - "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", + "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", "dev": true, + "license": "MIT", "dependencies": { "accepts": "^2.0.0", - "body-parser": "^2.2.0", + "body-parser": "^2.2.1", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", + "depd": "^2.0.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", @@ -6801,10 +6958,14 @@ } }, "node_modules/express-rate-limit": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", - "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", + "version": "8.2.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.2.1.tgz", + "integrity": "sha512-PCZEIEIxqwhzw4KF0n7QF4QqruVTcF73O5kFKUnGOyjbCCgizBBiFaYpd/fnBLUMPw/BWw9OsiN7GgrNYr7j6g==", "dev": true, + "license": "MIT", + "dependencies": { + "ip-address": "10.0.1" + }, "engines": { "node": ">= 16" }, @@ -6812,7 +6973,7 @@ "url": "https://github.com/sponsors/express-rate-limit" }, "peerDependencies": { - "express": "^4.11 || 5 || ^5.0.0-beta.1" + "express": ">= 4.11" } }, "node_modules/exsolve": { @@ -6873,6 +7034,23 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, "node_modules/fastq": { "version": "1.19.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", @@ -6924,10 +7102,11 @@ } }, "node_modules/finalhandler": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", - "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz", + "integrity": "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^4.4.0", "encodeurl": "^2.0.0", @@ -6937,7 +7116,11 @@ "statuses": "^2.0.1" }, "engines": { - "node": ">= 0.8" + "node": ">= 18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/find-root": { @@ -7043,6 +7226,7 @@ "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -7052,6 +7236,7 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -7352,6 +7537,16 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "license": "MIT" }, + "node_modules/hono": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.11.9.tgz", + "integrity": "sha512-Eaw2YTGM6WOxA6CXbckaEvslr2Ne4NFsKrvc0v97JD5awbmeBLO5w9Ho9L9kmKonrwF9RJlW6BxT1PVv/agBHQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16.9.0" + } + }, "node_modules/html-encoding-sniffer": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", @@ -7382,19 +7577,24 @@ } }, "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", "dev": true, + "license": "MIT", "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" }, "engines": { "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/http-proxy-agent": { @@ -7484,7 +7684,8 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/inline-style-parser": { "version": "0.2.7", @@ -7492,11 +7693,22 @@ "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", "license": "MIT" }, + "node_modules/ip-address": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz", + "integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.10" } @@ -7690,7 +7902,8 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/is-wsl": { "version": "2.2.0", @@ -7735,6 +7948,16 @@ "jiti": "lib/jiti-cli.mjs" } }, + "node_modules/jose": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/jose/-/jose-6.1.3.tgz", + "integrity": "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -7823,6 +8046,13 @@ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "node_modules/json-schema-typed": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/json-schema-typed/-/json-schema-typed-8.0.2.tgz", + "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", + "dev": true, + "license": "BSD-2-Clause" + }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", @@ -8353,6 +8583,7 @@ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -8362,6 +8593,7 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", "dev": true, + "license": "MIT", "engines": { "node": ">=18" }, @@ -8838,20 +9070,26 @@ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", "dev": true, + "license": "MIT", "dependencies": { "mime-db": "^1.54.0" }, "engines": { - "node": ">= 0.6" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/min-indent": { @@ -8956,6 +9194,7 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -9031,6 +9270,7 @@ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9049,6 +9289,7 @@ "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "dev": true, + "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -9061,6 +9302,7 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "dev": true, + "license": "ISC", "dependencies": { "wrappy": "1" } @@ -9209,6 +9451,7 @@ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -9261,12 +9504,14 @@ "license": "ISC" }, "node_modules/path-to-regexp": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", - "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", + "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", "dev": true, - "engines": { - "node": ">=16" + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/path-type": { @@ -9461,6 +9706,7 @@ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", "dev": true, + "license": "MIT", "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" @@ -9519,23 +9765,42 @@ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/raw-body": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", - "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.2.tgz", + "integrity": "sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==", "dev": true, + "license": "MIT", "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.6.3", - "unpipe": "1.0.0" + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.7.0", + "unpipe": "~1.0.0" }, "engines": { - "node": ">= 0.8" + "node": ">= 0.10" + } + }, + "node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/rc9": { @@ -9882,6 +10147,16 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/resolve": { "version": "1.22.10", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", @@ -9963,6 +10238,7 @@ "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^4.4.0", "depd": "^2.0.0", @@ -10016,26 +10292,6 @@ "queue-microtask": "^1.2.2" } }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -10070,32 +10326,38 @@ } }, "node_modules/send": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", - "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz", + "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==", "dev": true, + "license": "MIT", "dependencies": { - "debug": "^4.3.5", + "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", + "http-errors": "^2.0.1", + "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", - "statuses": "^2.0.1" + "statuses": "^2.0.2" }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/serve-static": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", - "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz", + "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==", "dev": true, + "license": "MIT", "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", @@ -10104,6 +10366,10 @@ }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/set-cookie-parser": { @@ -10116,7 +10382,8 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/shebang-command": { "version": "2.0.0", @@ -10142,6 +10409,7 @@ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", "dev": true, + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", @@ -10161,6 +10429,7 @@ "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", "dev": true, + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" @@ -10177,6 +10446,7 @@ "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", "dev": true, + "license": "MIT", "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", @@ -10195,6 +10465,7 @@ "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", "dev": true, + "license": "MIT", "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", @@ -10296,10 +10567,11 @@ "license": "MIT" }, "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -10837,6 +11109,7 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.6" } @@ -10967,6 +11240,7 @@ "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", "dev": true, + "license": "MIT", "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", @@ -11131,6 +11405,7 @@ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -12693,7 +12968,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/ws": { "version": "8.18.3", @@ -12792,15 +13068,6 @@ "url": "https://github.com/sponsors/colinhacks" } }, - "node_modules/zod-to-json-schema": { - "version": "3.24.5", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz", - "integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==", - "dev": true, - "peerDependencies": { - "zod": "^3.24.1" - } - }, "node_modules/zustand": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.4.tgz", From 67ec33daaeff465404ea82867365c3cdc0cd3370 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 11:56:40 -0300 Subject: [PATCH 015/160] chore(deps): bump glob from 10.4.5 to 10.5.0 in /web_src (#2955) Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index 0789c3e1aa..4838f12442 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -7341,9 +7341,10 @@ "license": "MIT" }, "node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, "license": "ISC", "dependencies": { From 6eb447e8cea68ecf728233c3e1c454bb0dd95d92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 12:04:26 -0300 Subject: [PATCH 016/160] chore(deps-dev): bump storybook from 9.1.7 to 9.1.17 in /web_src (#2956) Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 8 ++++---- web_src/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index 4838f12442..d4e0f6c7e9 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -94,7 +94,7 @@ "globals": "^16.0.0", "jsdom": "^25.0.1", "prettier": "^3.6.2", - "storybook": "^9.1.7", + "storybook": "^9.1.17", "tw-animate-css": "^1.4.0", "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", @@ -10585,9 +10585,9 @@ "license": "MIT" }, "node_modules/storybook": { - "version": "9.1.7", - "resolved": "https://registry.npmjs.org/storybook/-/storybook-9.1.7.tgz", - "integrity": "sha512-X8YSQMNuqV9DklQLZH6mLKpDn15Z5tuUUTAIYsiGqx5BwsjtXnv5K04fXgl3jqTZyUauzV/ii8KdT04NVLtMwQ==", + "version": "9.1.17", + "resolved": "https://registry.npmjs.org/storybook/-/storybook-9.1.17.tgz", + "integrity": "sha512-kfr6kxQAjA96ADlH6FMALJwJ+eM80UqXy106yVHNgdsAP/CdzkkicglRAhZAvUycXK9AeadF6KZ00CWLtVMN4w==", "dev": true, "license": "MIT", "dependencies": { diff --git a/web_src/package.json b/web_src/package.json index d080cdc204..48d3ffdb9e 100644 --- a/web_src/package.json +++ b/web_src/package.json @@ -105,7 +105,7 @@ "globals": "^16.0.0", "jsdom": "^25.0.1", "prettier": "^3.6.2", - "storybook": "^9.1.7", + "storybook": "^9.1.17", "tw-animate-css": "^1.4.0", "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", From ba1d52ede05a29369804352834b9490ab7cddc7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 12:12:11 -0300 Subject: [PATCH 017/160] chore(deps): bump golang.org/x/crypto from 0.37.0 to 0.45.0 (#2958) Signed-off-by: Muhammad Fuzail Zubari --- go.mod | 10 +++++----- go.sum | 22 ++++++++++++---------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/go.mod b/go.mod index 4801f5904b..674228acec 100644 --- a/go.mod +++ b/go.mod @@ -32,7 +32,7 @@ require ( go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0 go.opentelemetry.io/otel/metric v1.38.0 go.opentelemetry.io/otel/trace v1.38.0 - golang.org/x/sync v0.14.0 + golang.org/x/sync v0.18.0 google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e google.golang.org/grpc v1.71.1 google.golang.org/protobuf v1.36.6 @@ -101,10 +101,10 @@ require ( go.opentelemetry.io/otel v1.38.0 go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 go.opentelemetry.io/otel/sdk/metric v1.38.0 - golang.org/x/crypto v0.37.0 - golang.org/x/net v0.39.0 // indirect - golang.org/x/sys v0.35.0 // indirect - golang.org/x/text v0.24.0 // indirect + golang.org/x/crypto v0.45.0 + golang.org/x/net v0.47.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/text v0.31.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect diff --git a/go.sum b/go.sum index 3d49a8d982..1b4ce6e425 100644 --- a/go.sum +++ b/go.sum @@ -577,8 +577,8 @@ golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -668,8 +668,8 @@ golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -702,8 +702,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= -golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -784,8 +784,8 @@ golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= -golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= @@ -797,6 +797,8 @@ golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -813,8 +815,8 @@ golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= From 00da81d501df445fa4267dc3594b30f428c2886b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 12:20:12 -0300 Subject: [PATCH 018/160] chore(deps): bump esbuild, @vitest/ui and vitest in /web_src (#2960) Signed-off-by: Muhammad Fuzail Zubari --- web_src/package-lock.json | 1486 ++++++------------------------------- web_src/package.json | 4 +- 2 files changed, 242 insertions(+), 1248 deletions(-) diff --git a/web_src/package-lock.json b/web_src/package-lock.json index d4e0f6c7e9..0fa64952fa 100644 --- a/web_src/package-lock.json +++ b/web_src/package-lock.json @@ -86,7 +86,7 @@ "@types/react": "^19.1.2", "@types/react-dom": "^19.1.2", "@vitejs/plugin-react": "^4.4.1", - "@vitest/ui": "^2.1.8", + "@vitest/ui": "^4.0.18", "eslint": "^9.25.0", "eslint-plugin-react-hooks": "^5.2.0", "eslint-plugin-react-refresh": "^0.4.19", @@ -99,7 +99,7 @@ "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", "vite": "^6.4.1", - "vitest": "^2.1.8", + "vitest": "^4.0.18", "web-worker": "^1.5.0" } }, @@ -1628,9 +1628,10 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==" + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.31", @@ -3704,6 +3705,13 @@ "react": "^16.14.0 || 17.x || 18.x || 19.x" } }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, "node_modules/@storybook/addon-a11y": { "version": "9.1.7", "resolved": "https://registry.npmjs.org/@storybook/addon-a11y/-/addon-a11y-9.1.7.tgz", @@ -4979,21 +4987,33 @@ } }, "node_modules/@vitest/expect": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.1.9.tgz", - "integrity": "sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.18.tgz", + "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "2.1.9", - "@vitest/utils": "2.1.9", - "chai": "^5.1.2", - "tinyrainbow": "^1.2.0" + "@standard-schema/spec": "^1.0.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "chai": "^6.2.1", + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, + "node_modules/@vitest/expect/node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/@vitest/mocker": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", @@ -5045,92 +5065,88 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz", - "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.18.tgz", + "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==", "dev": true, "license": "MIT", "dependencies": { - "tinyrainbow": "^1.2.0" + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/runner": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.1.9.tgz", - "integrity": "sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.18.tgz", + "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "2.1.9", - "pathe": "^1.1.2" + "@vitest/utils": "4.0.18", + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.1.9.tgz", - "integrity": "sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.18.tgz", + "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "2.1.9", - "magic-string": "^0.30.12", - "pathe": "^1.1.2" + "@vitest/pretty-format": "4.0.18", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/spy": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.1.9.tgz", - "integrity": "sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.18.tgz", + "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==", "dev": true, "license": "MIT", - "dependencies": { - "tinyspy": "^3.0.2" - }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/ui": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-2.1.9.tgz", - "integrity": "sha512-izzd2zmnk8Nl5ECYkW27328RbQ1nKvkm6Bb5DAaz1Gk59EbLkiCMa6OLT0NoaAYTjOFS6N+SMYW1nh4/9ljPiw==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-4.0.18.tgz", + "integrity": "sha512-CGJ25bc8fRi8Lod/3GHSvXRKi7nBo3kxh0ApW4yCjmrWmRmlT53B5E08XRSZRliygG0aVNxLrBEqPYdz/KcCtQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "2.1.9", + "@vitest/utils": "4.0.18", "fflate": "^0.8.2", - "flatted": "^3.3.1", - "pathe": "^1.1.2", - "sirv": "^3.0.0", - "tinyglobby": "^0.2.10", - "tinyrainbow": "^1.2.0" + "flatted": "^3.3.3", + "pathe": "^2.0.3", + "sirv": "^3.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "vitest": "2.1.9" + "vitest": "4.0.18" } }, "node_modules/@vitest/utils": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz", - "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==", + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.18.tgz", + "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "2.1.9", - "loupe": "^3.1.2", - "tinyrainbow": "^1.2.0" + "@vitest/pretty-format": "4.0.18", + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" @@ -5593,22 +5609,6 @@ } } }, - "node_modules/c12/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, - "node_modules/cac": { - "version": "6.7.14", - "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", - "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/call-bind-apply-helpers": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", @@ -6904,9 +6904,9 @@ } }, "node_modules/expect-type": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", - "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -7334,12 +7334,6 @@ "giget": "dist/cli.mjs" } }, - "node_modules/giget/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, "node_modules/glob": { "version": "10.5.0", "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", @@ -8410,11 +8404,12 @@ } }, "node_modules/magic-string": { - "version": "0.30.17", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" + "@jridgewell/sourcemap-codec": "^1.5.5" } }, "node_modules/math-intrinsics": { @@ -9242,21 +9237,6 @@ "integrity": "sha512-8csy5IBFI2ex2hTVpaHN2j+LNE199AgiI7y4dMintrr8i0lQiFn+0AWMZrWdHKIgMOer65f8IThysYhoReqjWA==", "license": "MIT" }, - "node_modules/nypm/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, - "node_modules/nypm/node_modules/tinyexec": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", - "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -9279,6 +9259,17 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, "node_modules/ohash": { "version": "2.0.11", "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", @@ -9525,10 +9516,9 @@ } }, "node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", - "dev": true, + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "license": "MIT" }, "node_modules/pathval": { @@ -9584,12 +9574,6 @@ "pathe": "^2.0.3" } }, - "node_modules/pkg-types/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, "node_modules/postcss": { "version": "8.5.3", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", @@ -10502,9 +10486,9 @@ } }, "node_modules/sirv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.1.tgz", - "integrity": "sha512-FoqMu0NCGBLCcAkS1qA+XJIQTR6/JHfQXl+uGteNCQ76T91DMUjPa9xfmeqMY3z80nLSg9yQmNjK0Px6RWsH/A==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", "dev": true, "license": "MIT", "dependencies": { @@ -10578,9 +10562,9 @@ } }, "node_modules/std-env": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", - "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", "dev": true, "license": "MIT" }, @@ -10989,19 +10973,22 @@ "license": "MIT" }, "node_modules/tinyexec": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", - "dev": true, - "license": "MIT" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "license": "MIT", + "engines": { + "node": ">=18" + } }, "node_modules/tinyglobby": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", - "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "license": "MIT", "dependencies": { - "fdir": "^6.4.4", - "picomatch": "^4.0.2" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">=12.0.0" @@ -11011,9 +10998,13 @@ } }, "node_modules/tinyglobby/node_modules/fdir": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", - "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, "peerDependencies": { "picomatch": "^3 || ^4" }, @@ -11024,9 +11015,10 @@ } }, "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", "engines": { "node": ">=12" }, @@ -11034,30 +11026,10 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/tinypool": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", - "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, "node_modules/tinyrainbow": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", - "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz", + "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", "dev": true, "license": "MIT", "engines": { @@ -11640,1124 +11612,146 @@ } } }, - "node_modules/vite-node": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.1.9.tgz", - "integrity": "sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==", - "dev": true, - "license": "MIT", - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.3.7", - "es-module-lexer": "^1.5.4", - "pathe": "^1.1.2", - "vite": "^5.0.0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" + "node_modules/vite/node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "peerDependencies": { + "picomatch": "^3 || ^4" }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/vite-node/node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=12" + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } } }, - "node_modules/vite-node/node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "engines": { "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/vite-node/node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], + "node_modules/vitest": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.18.tgz", + "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.0.18", + "@vitest/mocker": "4.0.18", + "@vitest/pretty-format": "4.0.18", + "@vitest/runner": "4.0.18", + "@vitest/snapshot": "4.0.18", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "es-module-lexer": "^1.7.0", + "expect-type": "^1.2.2", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^3.10.0", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3", + "vite": "^6.0.0 || ^7.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, "engines": { - "node": ">=12" + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.0.18", + "@vitest/browser-preview": "4.0.18", + "@vitest/browser-webdriverio": "4.0.18", + "@vitest/ui": "4.0.18", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } } }, - "node_modules/vite-node/node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", - "cpu": [ - "x64" - ], + "node_modules/vitest/node_modules/@vitest/mocker": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.18.tgz", + "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" + "dependencies": { + "@vitest/spy": "4.0.18", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } } }, - "node_modules/vite-node/node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", - "cpu": [ - "arm64" - ], + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], "engines": { "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite-node/node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" - } - }, - "node_modules/vite-node/node_modules/vite": { - "version": "5.4.21", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", - "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", - "dev": true, - "license": "MIT", - "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.43", - "rollup": "^4.20.0" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - } - } - }, - "node_modules/vite/node_modules/fdir": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", - "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/vite/node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/vitest": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.1.9.tgz", - "integrity": "sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/expect": "2.1.9", - "@vitest/mocker": "2.1.9", - "@vitest/pretty-format": "^2.1.9", - "@vitest/runner": "2.1.9", - "@vitest/snapshot": "2.1.9", - "@vitest/spy": "2.1.9", - "@vitest/utils": "2.1.9", - "chai": "^5.1.2", - "debug": "^4.3.7", - "expect-type": "^1.1.0", - "magic-string": "^0.30.12", - "pathe": "^1.1.2", - "std-env": "^3.8.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.1", - "tinypool": "^1.0.1", - "tinyrainbow": "^1.2.0", - "vite": "^5.0.0", - "vite-node": "2.1.9", - "why-is-node-running": "^2.3.0" - }, - "bin": { - "vitest": "vitest.mjs" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@types/node": "^18.0.0 || >=20.0.0", - "@vitest/browser": "2.1.9", - "@vitest/ui": "2.1.9", - "happy-dom": "*", - "jsdom": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@types/node": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - } - } - }, - "node_modules/vitest/node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vitest/node_modules/@vitest/mocker": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-2.1.9.tgz", - "integrity": "sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/spy": "2.1.9", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.12" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0" - }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } - } - }, - "node_modules/vitest/node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" - } - }, - "node_modules/vitest/node_modules/vite": { - "version": "5.4.21", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", - "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", - "dev": true, - "license": "MIT", - "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.43", - "rollup": "^4.20.0" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" }, "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - } + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/w3c-xmlserializer": { diff --git a/web_src/package.json b/web_src/package.json index 48d3ffdb9e..1a26a8ea28 100644 --- a/web_src/package.json +++ b/web_src/package.json @@ -97,7 +97,7 @@ "@types/react": "^19.1.2", "@types/react-dom": "^19.1.2", "@vitejs/plugin-react": "^4.4.1", - "@vitest/ui": "^2.1.8", + "@vitest/ui": "^4.0.18", "eslint": "^9.25.0", "eslint-plugin-react-hooks": "^5.2.0", "eslint-plugin-react-refresh": "^0.4.19", @@ -110,7 +110,7 @@ "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", "vite": "^6.4.1", - "vitest": "^2.1.8", + "vitest": "^4.0.18", "web-worker": "^1.5.0" }, "prettier": { From 74baea10848e8d66b650a16cb11be26259be420e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Feb 2026 12:21:49 -0300 Subject: [PATCH 019/160] chore(deps): bump github.com/go-chi/chi/v5 from 5.1.0 to 5.2.2 (#2962) Signed-off-by: Muhammad Fuzail Zubari --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 674228acec..044290866c 100644 --- a/go.mod +++ b/go.mod @@ -65,7 +65,7 @@ require ( github.com/fsnotify/fsnotify v1.5.1 // indirect github.com/glebarez/go-sqlite v1.20.3 // indirect github.com/glebarez/sqlite v1.7.0 // indirect - github.com/go-chi/chi/v5 v5.1.0 // indirect + github.com/go-chi/chi/v5 v5.2.2 // indirect github.com/go-jose/go-jose/v3 v3.0.4 // indirect github.com/go-sql-driver/mysql v1.9.2 // indirect github.com/go-stack/stack v1.8.1 // indirect diff --git a/go.sum b/go.sum index 1b4ce6e425..a52c8806c8 100644 --- a/go.sum +++ b/go.sum @@ -162,8 +162,8 @@ github.com/glebarez/go-sqlite v1.20.3 h1:89BkqGOXR9oRmG58ZrzgoY/Fhy5x0M+/WV48U5z github.com/glebarez/go-sqlite v1.20.3/go.mod h1:u3N6D/wftiAzIOJtZl6BmedqxmmkDfH3q+ihjqxC9u0= github.com/glebarez/sqlite v1.7.0 h1:A7Xj/KN2Lvie4Z4rrgQHY8MsbebX3NyWsL3n2i82MVI= github.com/glebarez/sqlite v1.7.0/go.mod h1:PkeevrRlF/1BhQBCnzcMWzgrIk7IOop+qS2jUYLfHhk= -github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw= -github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618= +github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= From 9940b0d1438cc1390e878b43c4efd430c6a76e9d Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Sun, 8 Feb 2026 13:38:08 -0300 Subject: [PATCH 020/160] chore: add WebhookHandler interface (#2937) The `Integration` interface was starting to get full of webhook-related methods, but not all integrations make use of those. In fact, from our current integrations, 11/16 do not use it, so more than half do not use it. Due to that, we move that logic into a more specialized `WebhookHandler` interface. That keeps the Integration interface clean, and makes it a lot more explicit when an integration makes use of it. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/component-review.rules.md | 4 +- docs/contributing/integrations.md | 26 ++- pkg/core/integration.go | 37 ++-- pkg/integrations/aws/aws.go | 17 -- pkg/integrations/claude/claude.go | 12 -- pkg/integrations/claude/claude_test.go | 12 -- pkg/integrations/cloudflare/cloudflare.go | 12 -- pkg/integrations/dash0/dash0.go | 12 -- pkg/integrations/datadog/datadog.go | 16 -- pkg/integrations/daytona/daytona.go | 12 -- pkg/integrations/discord/discord.go | 12 -- pkg/integrations/github/github.go | 139 +------------- pkg/integrations/github/github_test.go | 112 ----------- pkg/integrations/github/webhook_handler.go | 149 +++++++++++++++ .../github/webhook_handler_test.go | 120 ++++++++++++ pkg/integrations/jira/jira.go | 12 -- pkg/integrations/openai/openai.go | 12 -- pkg/integrations/pagerduty/pagerduty.go | 139 +------------- pkg/integrations/pagerduty/pagerduty_test.go | 140 -------------- pkg/integrations/pagerduty/webhook_handler.go | 128 +++++++++++++ .../pagerduty/webhook_handler_test.go | 148 +++++++++++++++ pkg/integrations/rootly/rootly.go | 82 +------- pkg/integrations/rootly/rootly_test.go | 100 ---------- pkg/integrations/rootly/webhook_handler.go | 90 +++++++++ .../rootly/webhook_handler_test.go | 108 +++++++++++ pkg/integrations/semaphore/semaphore.go | 168 +---------------- pkg/integrations/semaphore/semaphore_test.go | 78 -------- pkg/integrations/semaphore/webhook_handler.go | 176 ++++++++++++++++++ .../semaphore/webhook_handler_test.go | 86 +++++++++ pkg/integrations/sendgrid/sendgrid.go | 84 +-------- pkg/integrations/sendgrid/webhook_handler.go | 91 +++++++++ ...ebhook_test.go => webhook_handler_test.go} | 24 +-- pkg/integrations/slack/slack.go | 18 -- pkg/integrations/smtp/smtp.go | 12 -- pkg/registry/integration.go | 31 --- pkg/registry/integration_test.go | 43 ----- pkg/registry/registry.go | 63 +++++-- pkg/registry/webhook_handler.go | 43 +++++ pkg/registry/webhook_handler_test.go | 49 +++++ pkg/workers/contexts/integration_context.go | 10 +- .../contexts/integration_context_test.go | 5 +- pkg/workers/webhook_cleanup_worker.go | 8 +- pkg/workers/webhook_provisioner.go | 10 +- pkg/workers/webhook_provisioner_test.go | 10 +- test/support/application.go | 71 ++++--- 45 files changed, 1355 insertions(+), 1376 deletions(-) create mode 100644 pkg/integrations/github/webhook_handler.go create mode 100644 pkg/integrations/github/webhook_handler_test.go create mode 100644 pkg/integrations/pagerduty/webhook_handler.go create mode 100644 pkg/integrations/pagerduty/webhook_handler_test.go create mode 100644 pkg/integrations/rootly/webhook_handler.go create mode 100644 pkg/integrations/rootly/webhook_handler_test.go create mode 100644 pkg/integrations/semaphore/webhook_handler.go create mode 100644 pkg/integrations/semaphore/webhook_handler_test.go create mode 100644 pkg/integrations/sendgrid/webhook_handler.go rename pkg/integrations/sendgrid/{sendgrid_webhook_test.go => webhook_handler_test.go} (92%) create mode 100644 pkg/registry/webhook_handler.go create mode 100644 pkg/registry/webhook_handler_test.go diff --git a/.cursor/commands/component-review.rules.md b/.cursor/commands/component-review.rules.md index 233993223b..5e66cc0965 100644 --- a/.cursor/commands/component-review.rules.md +++ b/.cursor/commands/component-review.rules.md @@ -72,8 +72,8 @@ The output channels shown in the UI include at least one channel (or rely on def ### Webhooks -- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, `ctx.Integration.RequestWebhook()` and implement the integration's `SetupWebhook`, `CleanupWebhook` -- We should always aim to share webhooks between components, if they use the same underlying event configuration. Use `CompareWebhookConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. +- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, use `ctx.Integration.RequestWebhook()` and implement a `core.WebhookHandler` with `Setup`, `Cleanup`, and `CompareConfig`, registered via `registry.RegisterIntegrationWithWebhookHandler`. +- We should always aim to share webhooks between components if they use the same underlying event configuration. Use `CompareConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. ### Triggers diff --git a/docs/contributing/integrations.md b/docs/contributing/integrations.md index 587b2a1d14..3ebe6b14c3 100644 --- a/docs/contributing/integrations.md +++ b/docs/contributing/integrations.md @@ -143,6 +143,14 @@ func (i *MyIntegration) HandleRequest(ctx core.HTTPRequestContext) { 3. **Register the integration** in the `init()` function (shown above) +If the integration manages webhooks, register a `core.WebhookHandler` along with the integration: + +```go +func init() { + registry.RegisterIntegrationWithWebhookHandler("myintegration", &MyIntegration{}, &MyIntegrationWebhookHandler{}) +} +``` + ## Adding Triggers Triggers listen to external events and start workflow executions. Here's how to add a new trigger: @@ -335,7 +343,7 @@ func (a *MyApp) Triggers() []core.Trigger { ### 3. Implement Webhook Setup (if needed) -If your triggers or components require webhooks, implement the webhook setup methods in your main integration file: +If your triggers or components require webhooks, implement a `core.WebhookHandler` and register it with the integration: ```go type WebhookConfiguration struct { @@ -343,9 +351,11 @@ type WebhookConfiguration struct { Resource string `json:"resource"` } -// CompareWebhookConfig defines when two webhook configurations are equal. +type MyIntegrationWebhookHandler struct{} + +// CompareConfig defines when two webhook configurations are equal. // This is used to determine if an existing webhook can be reused. -func (i *MyIntegration) CompareWebhookConfig(a, b any) (bool, error) { +func (h *MyIntegrationWebhookHandler) CompareConfig(a, b any) (bool, error) { configA := WebhookConfiguration{} if err := mapstructure.Decode(a, &configA); err != nil { return false, err @@ -361,17 +371,17 @@ func (i *MyIntegration) CompareWebhookConfig(a, b any) (bool, error) { return configA.Resource == configB.Resource && configA.EventType == configB.EventType, nil } -// SetupWebhook creates a webhook in the external service. +// Setup creates a webhook in the external service. // This is called by the webhook provisioner for pending webhook records. -func (i *MyIntegration) SetupWebhook(ctx core.IntegrationContext, options core.WebhookOptions) (any, error) { +func (h *MyIntegrationWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { // Create webhook in the external service // Return metadata about the created webhook (e.g., webhook ID) return nil, nil } -// CleanupWebhook deletes a webhook from the external service. +// Cleanup deletes a webhook from the external service. // This is called by the webhook cleanup worker for deleted webhook records. -func (i *MyIntegration) CleanupWebhook(ctx core.IntegrationContext, options core.WebhookOptions) error { +func (h *MyIntegrationWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { // Delete webhook from the external service using the metadata return nil } @@ -382,7 +392,7 @@ func (i *MyIntegration) CleanupWebhook(ctx core.IntegrationContext, options core The webhook management logic is centralized in `Integration.RequestWebhook()`. When a trigger or component requests a webhook: 1. The context lists all existing webhooks for the integration -2. For each existing webhook, it calls your integration's `CompareWebhookConfig()` to check if configurations match +2. For each existing webhook, it calls your webhook handler's `CompareConfig()` to check if configurations match 3. If a match is found, the node is associated with the existing webhook 4. If no match is found, a new webhook is created diff --git a/pkg/core/integration.go b/pkg/core/integration.go index efa58b8605..9c61833dfd 100644 --- a/pkg/core/integration.go +++ b/pkg/core/integration.go @@ -81,25 +81,33 @@ type Integration interface { * HTTP request handler */ HandleRequest(ctx HTTPRequestContext) +} - /* - * Used to compare webhook configurations. - * If the configuration is the same, - * the system will reuse the existing webhook. - */ - CompareWebhookConfig(a, b any) (bool, error) +type WebhookHandler interface { /* * Set up webhooks through the integration, in the external system. * This is called by the webhook provisioner, for pending webhook records. */ - SetupWebhook(ctx SetupWebhookContext) (any, error) + Setup(ctx WebhookHandlerContext) (any, error) /* * Delete webhooks through the integration, in the external system. * This is called by the webhook cleanup worker, for webhook records that were deleted. */ - CleanupWebhook(ctx CleanupWebhookContext) error + Cleanup(ctx WebhookHandlerContext) error + + /* + * Compare two webhook configurations to see if they are the same. + */ + CompareConfig(a, b any) (bool, error) +} + +type WebhookHandlerContext struct { + Logger *logrus.Entry + HTTP HTTPContext + Integration IntegrationContext + Webhook WebhookContext } type IntegrationComponent interface { @@ -147,19 +155,6 @@ type ListResourcesContext struct { Parameters map[string]string } -type SetupWebhookContext struct { - HTTP HTTPContext - Webhook WebhookContext - Logger *logrus.Entry - Integration IntegrationContext -} - -type CleanupWebhookContext struct { - HTTP HTTPContext - Webhook WebhookContext - Integration IntegrationContext -} - type WebhookOptions struct { ID string URL string diff --git a/pkg/integrations/aws/aws.go b/pkg/integrations/aws/aws.go index c6a90ca2a1..510a69430f 100644 --- a/pkg/integrations/aws/aws.go +++ b/pkg/integrations/aws/aws.go @@ -708,10 +708,6 @@ func (a *AWS) subscriptionApplies(subscription core.IntegrationSubscriptionConte return true } -func (a *AWS) CompareWebhookConfig(aConfig, bConfig any) (bool, error) { - return false, nil -} - func (a *AWS) Actions() []core.Action { return []core.Action{ { @@ -937,16 +933,3 @@ func (a *AWS) createRule( return nil } - -/* - * No additional webhook endpoints are used for AWS triggers. - * Events from AWS are received through the API destinations configured - * in the integration itself, using the integration HTTP URL. - */ -func (a *AWS) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (a *AWS) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} diff --git a/pkg/integrations/claude/claude.go b/pkg/integrations/claude/claude.go index 45c3045dae..b078fac610 100644 --- a/pkg/integrations/claude/claude.go +++ b/pkg/integrations/claude/claude.go @@ -92,10 +92,6 @@ func (i *Claude) Sync(ctx core.SyncContext) error { func (i *Claude) HandleRequest(ctx core.HTTPRequestContext) { } -func (i *Claude) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (i *Claude) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { if resourceType != "model" { return []core.IntegrationResource{}, nil @@ -127,14 +123,6 @@ func (i *Claude) ListResources(resourceType string, ctx core.ListResourcesContex return resources, nil } -func (i *Claude) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (i *Claude) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (i *Claude) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/claude/claude_test.go b/pkg/integrations/claude/claude_test.go index 5a7f0eb51b..eeaf96d40b 100644 --- a/pkg/integrations/claude/claude_test.go +++ b/pkg/integrations/claude/claude_test.go @@ -290,15 +290,3 @@ func TestClaude_ListResources(t *testing.T) { }) } } - -func TestClaude_CompareWebhookConfig(t *testing.T) { - i := &Claude{} - // Should always return true, nil based on implementation - ok, err := i.CompareWebhookConfig(nil, nil) - if err != nil { - t.Errorf("unexpected error: %v", err) - } - if !ok { - t.Error("expected CompareWebhookConfig to return true") - } -} diff --git a/pkg/integrations/cloudflare/cloudflare.go b/pkg/integrations/cloudflare/cloudflare.go index 7f1fff148b..af6172a2da 100644 --- a/pkg/integrations/cloudflare/cloudflare.go +++ b/pkg/integrations/cloudflare/cloudflare.go @@ -198,18 +198,6 @@ func (c *Cloudflare) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -func (c *Cloudflare) CompareWebhookConfig(a, b any) (bool, error) { - return false, nil -} - -func (c *Cloudflare) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (c *Cloudflare) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (c *Cloudflare) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/dash0/dash0.go b/pkg/integrations/dash0/dash0.go index 326dc0f73c..a5f96699e3 100644 --- a/pkg/integrations/dash0/dash0.go +++ b/pkg/integrations/dash0/dash0.go @@ -145,18 +145,6 @@ func (d *Dash0) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -func (d *Dash0) CompareWebhookConfig(a, b any) (bool, error) { - return false, nil -} - -func (d *Dash0) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (d *Dash0) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (d *Dash0) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/datadog/datadog.go b/pkg/integrations/datadog/datadog.go index c5385420b9..ed85d57dd6 100644 --- a/pkg/integrations/datadog/datadog.go +++ b/pkg/integrations/datadog/datadog.go @@ -140,26 +140,10 @@ func (d *Datadog) HandleRequest(ctx core.HTTPRequestContext) { // no-op - webhooks are handled by triggers } -func (d *Datadog) CleanupWebhook(ctx core.CleanupWebhookContext) error { - // no-op - Datadog webhooks are manually configured by users - return nil -} - -func (d *Datadog) CompareWebhookConfig(a, b any) (bool, error) { - // Datadog webhooks are manually configured, so we don't compare configurations - return true, nil -} - func (d *Datadog) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { return []core.IntegrationResource{}, nil } -func (d *Datadog) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - // Datadog webhooks are manually configured by users in the Datadog UI - // No automatic provisioning is supported - return nil, nil -} - func (d *Datadog) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/daytona/daytona.go b/pkg/integrations/daytona/daytona.go index 2ce5010a3f..68ac7eb4c3 100644 --- a/pkg/integrations/daytona/daytona.go +++ b/pkg/integrations/daytona/daytona.go @@ -109,10 +109,6 @@ func (d *Daytona) HandleRequest(ctx core.HTTPRequestContext) { // no-op - Daytona does not emit external events } -func (d *Daytona) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (d *Daytona) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { if resourceType != "snapshot" { return []core.IntegrationResource{}, nil @@ -140,14 +136,6 @@ func (d *Daytona) ListResources(resourceType string, ctx core.ListResourcesConte return resources, nil } -func (d *Daytona) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (d *Daytona) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (d *Daytona) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/discord/discord.go b/pkg/integrations/discord/discord.go index 4ee3e07928..3257d5fa50 100644 --- a/pkg/integrations/discord/discord.go +++ b/pkg/integrations/discord/discord.go @@ -114,10 +114,6 @@ func (d *Discord) HandleRequest(ctx core.HTTPRequestContext) { // no-op: Discord bot integration doesn't receive incoming HTTP requests } -func (d *Discord) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (d *Discord) Cleanup(ctx core.IntegrationCleanupContext) error { return nil } @@ -181,14 +177,6 @@ func (d *Discord) ListResources(resourceType string, ctx core.ListResourcesConte return resources, nil } -func (d *Discord) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (d *Discord) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (d *Discord) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/github/github.go b/pkg/integrations/github/github.go index 0d3d3e3c87..523886ba5e 100644 --- a/pkg/integrations/github/github.go +++ b/pkg/integrations/github/github.go @@ -36,7 +36,7 @@ To complete the GitHub app setup: ) func init() { - registry.RegisterIntegration("github", &GitHub{}) + registry.RegisterIntegrationWithWebhookHandler("github", &GitHub{}, &GitHubWebhookHandler{}) } type GitHub struct { @@ -312,143 +312,6 @@ func (g *GitHub) handleInstallationRepositoriesEvent(ctx core.HTTPRequestContext ctx.Integration.SetMetadata(metadata) } -type WebhookConfiguration struct { - EventType string `json:"eventType"` - EventTypes []string `json:"eventTypes"` // Multiple event types (takes precedence over EventType if set) - Repository string `json:"repository"` -} - -func (g *GitHub) CompareWebhookConfig(a, b any) (bool, error) { - configA := WebhookConfiguration{} - configB := WebhookConfiguration{} - - err := mapstructure.Decode(a, &configA) - if err != nil { - return false, err - } - - err = mapstructure.Decode(b, &configB) - if err != nil { - return false, err - } - - if configA.Repository != configB.Repository { - return false, nil - } - - // Compare event types - normalize to slices for comparison - eventsA := configA.EventTypes - if len(eventsA) == 0 && configA.EventType != "" { - eventsA = []string{configA.EventType} - } - - eventsB := configB.EventTypes - if len(eventsB) == 0 && configB.EventType != "" { - eventsB = []string{configB.EventType} - } - - if len(eventsA) != len(eventsB) { - return false, nil - } - - // Create a map to compare events regardless of order - eventMap := make(map[string]bool) - for _, e := range eventsA { - eventMap[e] = true - } - for _, e := range eventsB { - if !eventMap[e] { - return false, nil - } - } - - return true, nil -} - -type Webhook struct { - ID int64 `json:"id"` - WebhookName string `json:"name"` -} - -func (g *GitHub) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - metadata := Metadata{} - err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata) - if err != nil { - return nil, err - } - - client, err := NewClient(ctx.Integration, metadata.GitHubApp.ID, metadata.InstallationID) - if err != nil { - return nil, err - } - - config := WebhookConfiguration{} - err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config) - if err != nil { - return nil, err - } - - secret, err := ctx.Webhook.GetSecret() - if err != nil { - return nil, fmt.Errorf("error getting webhook secret: %v", err) - } - - // Use EventTypes if set, otherwise fall back to single EventType - events := config.EventTypes - if len(events) == 0 && config.EventType != "" { - events = []string{config.EventType} - } - - hook := &github.Hook{ - Active: github.Ptr(true), - Events: events, - Config: &github.HookConfig{ - URL: github.Ptr(ctx.Webhook.GetURL()), - Secret: github.Ptr(string(secret)), - ContentType: github.Ptr("json"), - }, - } - - createdHook, _, err := client.Repositories.CreateHook(context.Background(), metadata.Owner, config.Repository, hook) - if err != nil { - return nil, fmt.Errorf("error creating webhook: %v", err) - } - - return &Webhook{ID: createdHook.GetID(), WebhookName: *createdHook.Name}, nil -} - -func (g *GitHub) CleanupWebhook(ctx core.CleanupWebhookContext) error { - metadata := Metadata{} - err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata) - if err != nil { - return err - } - - client, err := NewClient(ctx.Integration, metadata.GitHubApp.ID, metadata.InstallationID) - if err != nil { - return err - } - - webhook := Webhook{} - err = mapstructure.Decode(ctx.Webhook.GetMetadata(), &webhook) - if err != nil { - return err - } - - configuration := WebhookConfiguration{} - err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &configuration) - if err != nil { - return err - } - - _, err = client.Repositories.DeleteHook(context.Background(), metadata.Owner, configuration.Repository, webhook.ID) - if err != nil { - return fmt.Errorf("error deleting webhook: %v", err) - } - - return nil -} - func (g *GitHub) afterAppCreation(ctx core.HTTPRequestContext, metadata Metadata) { code := ctx.Request.URL.Query().Get("code") state := ctx.Request.URL.Query().Get("state") diff --git a/pkg/integrations/github/github_test.go b/pkg/integrations/github/github_test.go index 11312bebf4..421e901333 100644 --- a/pkg/integrations/github/github_test.go +++ b/pkg/integrations/github/github_test.go @@ -57,115 +57,3 @@ func Test__GitHub__Setup(t *testing.T) { assert.NotEmpty(t, metadata.State) }) } - -func Test__GitHub__CompareWebhookConfig(t *testing.T) { - g := &GitHub{} - - testCases := []struct { - name string - configA any - configB any - expectEqual bool - expectError bool - }{ - { - name: "identical configurations", - configA: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - configB: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - expectEqual: true, - expectError: false, - }, - { - name: "different event types", - configA: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - configB: WebhookConfiguration{ - EventType: "pull_request", - Repository: "superplane", - }, - expectEqual: false, - expectError: false, - }, - { - name: "different repositories", - configA: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - configB: WebhookConfiguration{ - EventType: "push", - Repository: "other-repo", - }, - expectEqual: false, - expectError: false, - }, - { - name: "both fields different", - configA: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - configB: WebhookConfiguration{ - EventType: "issues", - Repository: "other-repo", - }, - expectEqual: false, - expectError: false, - }, - { - name: "comparing map representations", - configA: map[string]any{ - "eventType": "push", - "repository": "superplane", - }, - configB: map[string]any{ - "eventType": "push", - "repository": "superplane", - }, - expectEqual: true, - expectError: false, - }, - { - name: "invalid first configuration", - configA: "invalid", - configB: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - expectEqual: false, - expectError: true, - }, - { - name: "invalid second configuration", - configA: WebhookConfiguration{ - EventType: "push", - Repository: "superplane", - }, - configB: "invalid", - expectEqual: false, - expectError: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - equal, err := g.CompareWebhookConfig(tc.configA, tc.configB) - - if tc.expectError { - assert.Error(t, err) - } else { - require.NoError(t, err) - } - - assert.Equal(t, tc.expectEqual, equal) - }) - } -} diff --git a/pkg/integrations/github/webhook_handler.go b/pkg/integrations/github/webhook_handler.go new file mode 100644 index 0000000000..c3c7d0c06f --- /dev/null +++ b/pkg/integrations/github/webhook_handler.go @@ -0,0 +1,149 @@ +package github + +import ( + "context" + "fmt" + + "github.com/google/go-github/v74/github" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type WebhookConfiguration struct { + EventType string `json:"eventType"` + EventTypes []string `json:"eventTypes"` // Multiple event types (takes precedence over EventType if set) + Repository string `json:"repository"` +} + +type Webhook struct { + ID int64 `json:"id"` + WebhookName string `json:"name"` +} + +type GitHubWebhookHandler struct{} + +func (h *GitHubWebhookHandler) CompareConfig(a, b any) (bool, error) { + configA := WebhookConfiguration{} + configB := WebhookConfiguration{} + + err := mapstructure.Decode(a, &configA) + if err != nil { + return false, err + } + + err = mapstructure.Decode(b, &configB) + if err != nil { + return false, err + } + + if configA.Repository != configB.Repository { + return false, nil + } + + // Compare event types - normalize to slices for comparison + eventsA := configA.EventTypes + if len(eventsA) == 0 && configA.EventType != "" { + eventsA = []string{configA.EventType} + } + + eventsB := configB.EventTypes + if len(eventsB) == 0 && configB.EventType != "" { + eventsB = []string{configB.EventType} + } + + if len(eventsA) != len(eventsB) { + return false, nil + } + + // Create a map to compare events regardless of order + eventMap := make(map[string]bool) + for _, e := range eventsA { + eventMap[e] = true + } + for _, e := range eventsB { + if !eventMap[e] { + return false, nil + } + } + + return true, nil +} + +func (h *GitHubWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + metadata := Metadata{} + err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata) + if err != nil { + return nil, err + } + + client, err := NewClient(ctx.Integration, metadata.GitHubApp.ID, metadata.InstallationID) + if err != nil { + return nil, err + } + + config := WebhookConfiguration{} + err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config) + if err != nil { + return nil, err + } + + secret, err := ctx.Webhook.GetSecret() + if err != nil { + return nil, fmt.Errorf("error getting webhook secret: %v", err) + } + + // Use EventTypes if set, otherwise fall back to single EventType + events := config.EventTypes + if len(events) == 0 && config.EventType != "" { + events = []string{config.EventType} + } + + hook := &github.Hook{ + Active: github.Ptr(true), + Events: events, + Config: &github.HookConfig{ + URL: github.Ptr(ctx.Webhook.GetURL()), + Secret: github.Ptr(string(secret)), + ContentType: github.Ptr("json"), + }, + } + + createdHook, _, err := client.Repositories.CreateHook(context.Background(), metadata.Owner, config.Repository, hook) + if err != nil { + return nil, fmt.Errorf("error creating webhook: %v", err) + } + + return &Webhook{ID: createdHook.GetID(), WebhookName: *createdHook.Name}, nil +} + +func (h *GitHubWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + metadata := Metadata{} + err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata) + if err != nil { + return err + } + + client, err := NewClient(ctx.Integration, metadata.GitHubApp.ID, metadata.InstallationID) + if err != nil { + return err + } + + webhook := Webhook{} + err = mapstructure.Decode(ctx.Webhook.GetMetadata(), &webhook) + if err != nil { + return err + } + + configuration := WebhookConfiguration{} + err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &configuration) + if err != nil { + return err + } + + _, err = client.Repositories.DeleteHook(context.Background(), metadata.Owner, configuration.Repository, webhook.ID) + if err != nil { + return fmt.Errorf("error deleting webhook: %v", err) + } + + return nil +} diff --git a/pkg/integrations/github/webhook_handler_test.go b/pkg/integrations/github/webhook_handler_test.go new file mode 100644 index 0000000000..de4ea8f730 --- /dev/null +++ b/pkg/integrations/github/webhook_handler_test.go @@ -0,0 +1,120 @@ +package github + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test__GitHubWebhookHandler__CompareConfig(t *testing.T) { + handler := &GitHubWebhookHandler{} + + testCases := []struct { + name string + configA any + configB any + expectEqual bool + expectError bool + }{ + { + name: "identical configurations", + configA: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + configB: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + expectEqual: true, + expectError: false, + }, + { + name: "different event types", + configA: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + configB: WebhookConfiguration{ + EventType: "pull_request", + Repository: "superplane", + }, + expectEqual: false, + expectError: false, + }, + { + name: "different repositories", + configA: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + configB: WebhookConfiguration{ + EventType: "push", + Repository: "other-repo", + }, + expectEqual: false, + expectError: false, + }, + { + name: "both fields different", + configA: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + configB: WebhookConfiguration{ + EventType: "issues", + Repository: "other-repo", + }, + expectEqual: false, + expectError: false, + }, + { + name: "comparing map representations", + configA: map[string]any{ + "eventType": "push", + "repository": "superplane", + }, + configB: map[string]any{ + "eventType": "push", + "repository": "superplane", + }, + expectEqual: true, + expectError: false, + }, + { + name: "invalid first configuration", + configA: "invalid", + configB: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + expectEqual: false, + expectError: true, + }, + { + name: "invalid second configuration", + configA: WebhookConfiguration{ + EventType: "push", + Repository: "superplane", + }, + configB: "invalid", + expectEqual: false, + expectError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + equal, err := handler.CompareConfig(tc.configA, tc.configB) + + if tc.expectError { + assert.Error(t, err) + } else { + require.NoError(t, err) + } + + assert.Equal(t, tc.expectEqual, equal) + }) + } +} diff --git a/pkg/integrations/jira/jira.go b/pkg/integrations/jira/jira.go index c3d4329efc..374a506271 100644 --- a/pkg/integrations/jira/jira.go +++ b/pkg/integrations/jira/jira.go @@ -129,18 +129,6 @@ func (j *Jira) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -func (j *Jira) CompareWebhookConfig(a, b any) (bool, error) { - return false, nil -} - -func (j *Jira) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (j *Jira) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (j *Jira) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/openai/openai.go b/pkg/integrations/openai/openai.go index d2b66e795f..2388d41d6e 100644 --- a/pkg/integrations/openai/openai.go +++ b/pkg/integrations/openai/openai.go @@ -93,10 +93,6 @@ func (o *OpenAI) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -func (o *OpenAI) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (o *OpenAI) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { if resourceType != "model" { return []core.IntegrationResource{}, nil @@ -128,14 +124,6 @@ func (o *OpenAI) ListResources(resourceType string, ctx core.ListResourcesContex return resources, nil } -func (o *OpenAI) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (o *OpenAI) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (o *OpenAI) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/pagerduty/pagerduty.go b/pkg/integrations/pagerduty/pagerduty.go index fda7050496..a474f7baf7 100644 --- a/pkg/integrations/pagerduty/pagerduty.go +++ b/pkg/integrations/pagerduty/pagerduty.go @@ -6,7 +6,6 @@ import ( "io" "net/http" "net/url" - "slices" "strings" "time" @@ -16,27 +15,8 @@ import ( "github.com/superplanehq/superplane/pkg/registry" ) -/* - * 1. Integrations > App Registration > New App - * 2. Set the name and description for new app - * 3. Functionality -> select "OAuth 2.0" only - * 4. Authorization -> "Scoped Auth" - * 5. Permission Scope -> - * - incidents.read, incidents.write - * - webhook_subscriptions.read, webhook_subscriptions.write - * - users.read - * - teams.read - * - services.read - * - schedules.read - * - priorities.read - * - oncalls.read - * - incident_types.read - * - escalation_policies.read - * - custom_fields.read - */ - func init() { - registry.RegisterIntegration("pagerduty", &PagerDuty{}) + registry.RegisterIntegrationWithWebhookHandler("pagerduty", &PagerDuty{}, &PagerDutyWebhookHandler{}) } type PagerDuty struct{} @@ -336,123 +316,6 @@ func (r *TokenResponse) GetExpiration() time.Duration { return time.Hour } -type WebhookConfiguration struct { - - // - // Specific event types, e.g., ["incident.resolved", "incident.triggered"] - // - Events []string `json:"events"` - - // - // Filter for webhook. - // - Filter WebhookFilter `json:"filter"` -} - -type WebhookFilter struct { - // - // Type of filter for event subscription: - // - account_reference: webhook is created on account level - // - team_reference: events will be sent only for events related to the specified team - // - service_reference: events will be sent only for events related ot the specified service. - // - Type string `json:"type"` - - // - // If team_reference is used, this must be the ID of a team. - // If service_reference is used, this must be the ID of a service. - // - ID string `json:"id"` -} - -type WebhookMetadata struct { - SubscriptionID string `json:"subscriptionId"` -} - -func (p *PagerDuty) CompareWebhookConfig(a, b any) (bool, error) { - configA := WebhookConfiguration{} - configB := WebhookConfiguration{} - - err := mapstructure.Decode(a, &configA) - if err != nil { - return false, err - } - - err = mapstructure.Decode(b, &configB) - if err != nil { - return false, err - } - - // - // The event subscription filter on the webhook must match exactly - // - if configA.Filter.Type != configB.Filter.Type || configA.Filter.ID != configB.Filter.ID { - return false, nil - } - - // Check if A contains all events from B (A is superset of B) - // This allows webhook sharing when existing webhook has more events than needed - for _, eventB := range configB.Events { - if !slices.Contains(configA.Events, eventB) { - return false, nil - } - } - - return true, nil -} - -func (p *PagerDuty) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return nil, err - } - - configuration := WebhookConfiguration{} - err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &configuration) - if err != nil { - return nil, fmt.Errorf("error decoding webhook configuration: %v", err) - } - - // - // Create webhook subscription. - // NOTE: PagerDuty returns the secret used for signing webhooks - // on the subscription response, so we need to update the webhook secret on our end. - // - subscription, err := client.CreateWebhookSubscription(ctx.Webhook.GetURL(), configuration.Events, configuration.Filter) - if err != nil { - return nil, fmt.Errorf("error creating webhook subscription: %v", err) - } - - err = ctx.Webhook.SetSecret([]byte(subscription.DeliveryMethod.Secret)) - if err != nil { - return nil, fmt.Errorf("error updating webhook secret: %v", err) - } - - return WebhookMetadata{ - SubscriptionID: subscription.ID, - }, nil -} - -func (p *PagerDuty) CleanupWebhook(ctx core.CleanupWebhookContext) error { - metadata := WebhookMetadata{} - err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata) - if err != nil { - return fmt.Errorf("error decoding webhook metadata: %v", err) - } - - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return err - } - - err = client.DeleteWebhookSubscription(metadata.SubscriptionID) - if err != nil { - return fmt.Errorf("error deleting webhook subscription: %v", err) - } - - return nil -} - func (p *PagerDuty) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/pagerduty/pagerduty_test.go b/pkg/integrations/pagerduty/pagerduty_test.go index ea080bf065..d7d9c9e105 100644 --- a/pkg/integrations/pagerduty/pagerduty_test.go +++ b/pkg/integrations/pagerduty/pagerduty_test.go @@ -328,143 +328,3 @@ func Test__Pagerduty__Sync(t *testing.T) { assert.Equal(t, "Test Service", metadata.Services[0].Name) }) } - -func Test__PagerDuty__CompareWebhookConfig(t *testing.T) { - p := &PagerDuty{} - - testCases := []struct { - name string - configA any - configB any - expectEqual bool - expectError bool - }{ - { - name: "identical events", - configA: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - expectEqual: true, - expectError: false, - }, - { - name: "different service", - configA: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service2", - }, - }, - expectEqual: false, - expectError: false, - }, - { - name: "different event", - configA: WebhookConfiguration{ - Events: []string{"incident.resolved"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - expectEqual: false, - expectError: false, - }, - { - name: "subset of events", - configA: WebhookConfiguration{ - Events: []string{"incident.triggered", "incident.resolved"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - Filter: WebhookFilter{ - Type: "service_reference", - ID: "service1", - }, - }, - expectEqual: true, - expectError: false, - }, - { - name: "comparing map representations", - configA: map[string]any{ - "events": []string{"incident.triggered"}, - "filter": map[string]string{ - "type": "service_reference", - "id": "service1", - }, - }, - configB: map[string]any{ - "events": []string{"incident.triggered"}, - "filter": map[string]string{ - "type": "service_reference", - "id": "service1", - }, - }, - expectEqual: true, - expectError: false, - }, - { - name: "invalid first configuration", - configA: "invalid", - configB: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - }, - expectEqual: false, - expectError: true, - }, - { - name: "invalid second configuration", - configA: WebhookConfiguration{ - Events: []string{"incident.triggered"}, - }, - configB: "invalid", - expectEqual: false, - expectError: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - equal, err := p.CompareWebhookConfig(tc.configA, tc.configB) - - if tc.expectError { - assert.Error(t, err, "expected error, but got none") - } else { - require.NoError(t, err, "did not expect, but got an error") - } - - assert.Equal(t, tc.expectEqual, equal, "expected config to be equal, but they are different") - }) - } -} diff --git a/pkg/integrations/pagerduty/webhook_handler.go b/pkg/integrations/pagerduty/webhook_handler.go new file mode 100644 index 0000000000..e0e43842bc --- /dev/null +++ b/pkg/integrations/pagerduty/webhook_handler.go @@ -0,0 +1,128 @@ +package pagerduty + +import ( + "fmt" + "slices" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type WebhookConfiguration struct { + + // + // Specific event types, e.g., ["incident.resolved", "incident.triggered"] + // + Events []string `json:"events"` + + // + // Filter for webhook. + // + Filter WebhookFilter `json:"filter"` +} + +type WebhookFilter struct { + // + // Type of filter for event subscription: + // - account_reference: webhook is created on account level + // - team_reference: events will be sent only for events related to the specified team + // - service_reference: events will be sent only for events related ot the specified service. + // + Type string `json:"type"` + + // + // If team_reference is used, this must be the ID of a team. + // If service_reference is used, this must be the ID of a service. + // + ID string `json:"id"` +} + +type WebhookMetadata struct { + SubscriptionID string `json:"subscriptionId"` +} + +type PagerDutyWebhookHandler struct{} + +func (h *PagerDutyWebhookHandler) CompareConfig(a, b any) (bool, error) { + configA := WebhookConfiguration{} + configB := WebhookConfiguration{} + + err := mapstructure.Decode(a, &configA) + if err != nil { + return false, err + } + + err = mapstructure.Decode(b, &configB) + if err != nil { + return false, err + } + + // + // The event subscription filter on the webhook must match exactly + // + if configA.Filter.Type != configB.Filter.Type || configA.Filter.ID != configB.Filter.ID { + return false, nil + } + + // Check if A contains all events from B (A is superset of B) + // This allows webhook sharing when existing webhook has more events than needed + for _, eventB := range configB.Events { + if !slices.Contains(configA.Events, eventB) { + return false, nil + } + } + + return true, nil +} + +func (h *PagerDutyWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + configuration := WebhookConfiguration{} + err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &configuration) + if err != nil { + return nil, fmt.Errorf("error decoding webhook configuration: %v", err) + } + + // + // Create webhook subscription. + // NOTE: PagerDuty returns the secret used for signing webhooks + // on the subscription response, so we need to update the webhook secret on our end. + // + subscription, err := client.CreateWebhookSubscription(ctx.Webhook.GetURL(), configuration.Events, configuration.Filter) + if err != nil { + return nil, fmt.Errorf("error creating webhook subscription: %v", err) + } + + err = ctx.Webhook.SetSecret([]byte(subscription.DeliveryMethod.Secret)) + if err != nil { + return nil, fmt.Errorf("error updating webhook secret: %v", err) + } + + return WebhookMetadata{ + SubscriptionID: subscription.ID, + }, nil +} + +func (h *PagerDutyWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + metadata := WebhookMetadata{} + err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata) + if err != nil { + return fmt.Errorf("error decoding webhook metadata: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + err = client.DeleteWebhookSubscription(metadata.SubscriptionID) + if err != nil { + return fmt.Errorf("error deleting webhook subscription: %v", err) + } + + return nil +} diff --git a/pkg/integrations/pagerduty/webhook_handler_test.go b/pkg/integrations/pagerduty/webhook_handler_test.go new file mode 100644 index 0000000000..540cc34c4b --- /dev/null +++ b/pkg/integrations/pagerduty/webhook_handler_test.go @@ -0,0 +1,148 @@ +package pagerduty + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test__PagerDutyWebhookHandler__CompareConfig(t *testing.T) { + handler := &PagerDutyWebhookHandler{} + + testCases := []struct { + name string + configA any + configB any + expectEqual bool + expectError bool + }{ + { + name: "identical events", + configA: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + expectEqual: true, + expectError: false, + }, + { + name: "different service", + configA: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service2", + }, + }, + expectEqual: false, + expectError: false, + }, + { + name: "different event", + configA: WebhookConfiguration{ + Events: []string{"incident.resolved"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + expectEqual: false, + expectError: false, + }, + { + name: "subset of events", + configA: WebhookConfiguration{ + Events: []string{"incident.triggered", "incident.resolved"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + Filter: WebhookFilter{ + Type: "service_reference", + ID: "service1", + }, + }, + expectEqual: true, + expectError: false, + }, + { + name: "comparing map representations", + configA: map[string]any{ + "events": []string{"incident.triggered"}, + "filter": map[string]string{ + "type": "service_reference", + "id": "service1", + }, + }, + configB: map[string]any{ + "events": []string{"incident.triggered"}, + "filter": map[string]string{ + "type": "service_reference", + "id": "service1", + }, + }, + expectEqual: true, + expectError: false, + }, + { + name: "invalid first configuration", + configA: "invalid", + configB: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + }, + expectEqual: false, + expectError: true, + }, + { + name: "invalid second configuration", + configA: WebhookConfiguration{ + Events: []string{"incident.triggered"}, + }, + configB: "invalid", + expectEqual: false, + expectError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + equal, err := handler.CompareConfig(tc.configA, tc.configB) + + if tc.expectError { + assert.Error(t, err, "expected error, but got none") + } else { + require.NoError(t, err, "did not expect, but got an error") + } + + assert.Equal(t, tc.expectEqual, equal, "expected config to be equal, but they are different") + }) + } +} diff --git a/pkg/integrations/rootly/rootly.go b/pkg/integrations/rootly/rootly.go index 9b7d8cfc18..1b58b5f646 100644 --- a/pkg/integrations/rootly/rootly.go +++ b/pkg/integrations/rootly/rootly.go @@ -4,7 +4,6 @@ import ( "crypto/hmac" "crypto/sha256" "fmt" - "slices" "strings" "github.com/mitchellh/mapstructure" @@ -14,7 +13,7 @@ import ( ) func init() { - registry.RegisterIntegration("rootly", &Rootly{}) + registry.RegisterIntegrationWithWebhookHandler("rootly", &Rootly{}, &RootlyWebhookHandler{}) } type Rootly struct{} @@ -106,85 +105,6 @@ func (r *Rootly) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -type WebhookConfiguration struct { - Events []string `json:"events"` -} - -type WebhookMetadata struct { - EndpointID string `json:"endpointId"` -} - -func (r *Rootly) CompareWebhookConfig(a, b any) (bool, error) { - configA := WebhookConfiguration{} - configB := WebhookConfiguration{} - - err := mapstructure.Decode(a, &configA) - if err != nil { - return false, err - } - - err = mapstructure.Decode(b, &configB) - if err != nil { - return false, err - } - - // Check if A contains all events from B (A is superset of B) - for _, eventB := range configB.Events { - if !slices.Contains(configA.Events, eventB) { - return false, nil - } - } - - return true, nil -} - -func (r *Rootly) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return nil, err - } - - config := WebhookConfiguration{} - err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config) - if err != nil { - return nil, fmt.Errorf("error decoding webhook configuration: %v", err) - } - - endpoint, err := client.CreateWebhookEndpoint(ctx.Webhook.GetURL(), config.Events) - if err != nil { - return nil, fmt.Errorf("error creating webhook endpoint: %v", err) - } - - err = ctx.Webhook.SetSecret([]byte(endpoint.Secret)) - if err != nil { - return nil, fmt.Errorf("error updating webhook secret: %v", err) - } - - return WebhookMetadata{ - EndpointID: endpoint.ID, - }, nil -} - -func (r *Rootly) CleanupWebhook(ctx core.CleanupWebhookContext) error { - metadata := WebhookMetadata{} - err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata) - if err != nil { - return fmt.Errorf("error decoding webhook metadata: %v", err) - } - - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return err - } - - err = client.DeleteWebhookEndpoint(metadata.EndpointID) - if err != nil { - return fmt.Errorf("error deleting webhook endpoint: %v", err) - } - - return nil -} - func (r *Rootly) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/rootly/rootly_test.go b/pkg/integrations/rootly/rootly_test.go index 3b2da39437..a197200160 100644 --- a/pkg/integrations/rootly/rootly_test.go +++ b/pkg/integrations/rootly/rootly_test.go @@ -105,106 +105,6 @@ func Test__Rootly__Sync(t *testing.T) { }) } -func Test__Rootly__CompareWebhookConfig(t *testing.T) { - r := &Rootly{} - - testCases := []struct { - name string - configA any - configB any - expectEqual bool - expectError bool - }{ - { - name: "identical events", - configA: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - expectEqual: true, - expectError: false, - }, - { - name: "different events", - configA: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.resolved"}, - }, - expectEqual: false, - expectError: false, - }, - { - name: "superset of events (A contains all of B)", - configA: WebhookConfiguration{ - Events: []string{"incident.created", "incident.updated", "incident.resolved"}, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - expectEqual: true, - expectError: false, - }, - { - name: "subset of events (A does not contain all of B)", - configA: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - configB: WebhookConfiguration{ - Events: []string{"incident.created", "incident.resolved"}, - }, - expectEqual: false, - expectError: false, - }, - { - name: "comparing map representations", - configA: map[string]any{ - "events": []string{"incident.created", "incident.updated"}, - }, - configB: map[string]any{ - "events": []string{"incident.created"}, - }, - expectEqual: true, - expectError: false, - }, - { - name: "invalid first configuration", - configA: "invalid", - configB: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - expectEqual: false, - expectError: true, - }, - { - name: "invalid second configuration", - configA: WebhookConfiguration{ - Events: []string{"incident.created"}, - }, - configB: "invalid", - expectEqual: false, - expectError: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - equal, err := r.CompareWebhookConfig(tc.configA, tc.configB) - - if tc.expectError { - assert.Error(t, err, "expected error, but got none") - } else { - require.NoError(t, err, "did not expect, but got an error") - } - - assert.Equal(t, tc.expectEqual, equal, "expected config comparison result to match") - }) - } -} - func Test__verifyWebhookSignature(t *testing.T) { t.Run("missing signature -> error", func(t *testing.T) { err := verifyWebhookSignature("", []byte("body"), []byte("secret")) diff --git a/pkg/integrations/rootly/webhook_handler.go b/pkg/integrations/rootly/webhook_handler.go new file mode 100644 index 0000000000..cd7ed33327 --- /dev/null +++ b/pkg/integrations/rootly/webhook_handler.go @@ -0,0 +1,90 @@ +package rootly + +import ( + "fmt" + "slices" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type WebhookConfiguration struct { + Events []string `json:"events"` +} + +type WebhookMetadata struct { + EndpointID string `json:"endpointId"` +} + +type RootlyWebhookHandler struct{} + +func (h *RootlyWebhookHandler) CompareConfig(a, b any) (bool, error) { + configA := WebhookConfiguration{} + configB := WebhookConfiguration{} + + err := mapstructure.Decode(a, &configA) + if err != nil { + return false, err + } + + err = mapstructure.Decode(b, &configB) + if err != nil { + return false, err + } + + // Check if A contains all events from B (A is superset of B) + for _, eventB := range configB.Events { + if !slices.Contains(configA.Events, eventB) { + return false, nil + } + } + + return true, nil +} + +func (h *RootlyWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + config := WebhookConfiguration{} + err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config) + if err != nil { + return nil, fmt.Errorf("error decoding webhook configuration: %v", err) + } + + endpoint, err := client.CreateWebhookEndpoint(ctx.Webhook.GetURL(), config.Events) + if err != nil { + return nil, fmt.Errorf("error creating webhook endpoint: %v", err) + } + + err = ctx.Webhook.SetSecret([]byte(endpoint.Secret)) + if err != nil { + return nil, fmt.Errorf("error updating webhook secret: %v", err) + } + + return WebhookMetadata{ + EndpointID: endpoint.ID, + }, nil +} + +func (h *RootlyWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + metadata := WebhookMetadata{} + err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata) + if err != nil { + return fmt.Errorf("error decoding webhook metadata: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + err = client.DeleteWebhookEndpoint(metadata.EndpointID) + if err != nil { + return fmt.Errorf("error deleting webhook endpoint: %v", err) + } + + return nil +} diff --git a/pkg/integrations/rootly/webhook_handler_test.go b/pkg/integrations/rootly/webhook_handler_test.go new file mode 100644 index 0000000000..f7eb999fe5 --- /dev/null +++ b/pkg/integrations/rootly/webhook_handler_test.go @@ -0,0 +1,108 @@ +package rootly + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test__RootlyWebhookHandler__CompareConfig(t *testing.T) { + handler := &RootlyWebhookHandler{} + + testCases := []struct { + name string + configA any + configB any + expectEqual bool + expectError bool + }{ + { + name: "identical events", + configA: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + expectEqual: true, + expectError: false, + }, + { + name: "different events", + configA: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.resolved"}, + }, + expectEqual: false, + expectError: false, + }, + { + name: "superset of events (A contains all of B)", + configA: WebhookConfiguration{ + Events: []string{"incident.created", "incident.updated", "incident.resolved"}, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + expectEqual: true, + expectError: false, + }, + { + name: "subset of events (A does not contain all of B)", + configA: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + configB: WebhookConfiguration{ + Events: []string{"incident.created", "incident.resolved"}, + }, + expectEqual: false, + expectError: false, + }, + { + name: "comparing map representations", + configA: map[string]any{ + "events": []string{"incident.created", "incident.updated"}, + }, + configB: map[string]any{ + "events": []string{"incident.created"}, + }, + expectEqual: true, + expectError: false, + }, + { + name: "invalid first configuration", + configA: "invalid", + configB: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + expectEqual: false, + expectError: true, + }, + { + name: "invalid second configuration", + configA: WebhookConfiguration{ + Events: []string{"incident.created"}, + }, + configB: "invalid", + expectEqual: false, + expectError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + equal, err := handler.CompareConfig(tc.configA, tc.configB) + + if tc.expectError { + assert.Error(t, err, "expected error, but got none") + } else { + require.NoError(t, err, "did not expect, but got an error") + } + + assert.Equal(t, tc.expectEqual, equal, "expected config comparison result to match") + }) + } +} diff --git a/pkg/integrations/semaphore/semaphore.go b/pkg/integrations/semaphore/semaphore.go index d90733e9e9..e08bb78dd2 100644 --- a/pkg/integrations/semaphore/semaphore.go +++ b/pkg/integrations/semaphore/semaphore.go @@ -1,7 +1,6 @@ package semaphore import ( - "crypto/sha256" "fmt" "github.com/mitchellh/mapstructure" @@ -11,7 +10,7 @@ import ( ) func init() { - registry.RegisterIntegration("semaphore", &Semaphore{}) + registry.RegisterIntegrationWithWebhookHandler("semaphore", &Semaphore{}, &SemaphoreWebhookHandler{}) } type Semaphore struct{} @@ -105,24 +104,6 @@ func (s *Semaphore) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -type WebhookConfiguration struct { - Project string `json:"project"` -} - -func (s *Semaphore) CompareWebhookConfig(a, b any) (bool, error) { - configA := WebhookConfiguration{} - if err := mapstructure.Decode(a, &configA); err != nil { - return false, err - } - - configB := WebhookConfiguration{} - if err := mapstructure.Decode(b, &configB); err != nil { - return false, err - } - - return configA.Project == configB.Project, nil -} - func (s *Semaphore) Actions() []core.Action { return []core.Action{} } @@ -131,89 +112,6 @@ func (s *Semaphore) HandleAction(ctx core.IntegrationActionContext) error { return nil } -type WebhookMetadata struct { - Secret WebhookSecretMetadata `json:"secret"` - Notification WebhookNotificationMetadata `json:"notification"` -} - -type WebhookSecretMetadata struct { - ID string `json:"id"` - Name string `json:"name"` -} - -type WebhookNotificationMetadata struct { - ID string `json:"id"` - Name string `json:"name"` -} - -func (s *Semaphore) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return nil, err - } - - configuration := WebhookConfiguration{} - err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &configuration) - if err != nil { - return nil, fmt.Errorf("error decoding configuration: %v", err) - } - - // - // Semaphore doesn't let us use UUIDs in secret names, - // so we sha256 the ID before creating the secret. - // - hash := sha256.New() - hash.Write([]byte(ctx.Webhook.GetID())) - suffix := fmt.Sprintf("%x", hash.Sum(nil)) - name := fmt.Sprintf("superplane-webhook-%x", suffix[:16]) - - webhookSecret, err := ctx.Webhook.GetSecret() - if err != nil { - return nil, fmt.Errorf("error getting webhook secret: %v", err) - } - - // - // Create Semaphore secret to store the event source key. - // - secret, err := upsertSecret(client, name, webhookSecret) - if err != nil { - return nil, fmt.Errorf("error creating Semaphore secret: %v", err) - } - - // - // Create a notification resource to receive events from Semaphore - // - notification, err := upsertNotification(client, name, ctx.Webhook.GetURL(), configuration.Project) - if err != nil { - return nil, fmt.Errorf("error creating Semaphore notification: %v", err) - } - - return WebhookMetadata{ - Secret: WebhookSecretMetadata{ID: secret.Metadata.ID, Name: secret.Metadata.Name}, - Notification: WebhookNotificationMetadata{ID: notification.Metadata.ID, Name: notification.Metadata.Name}, - }, nil -} - -func (s *Semaphore) CleanupWebhook(ctx core.CleanupWebhookContext) error { - metadata := WebhookMetadata{} - err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata) - if err != nil { - return fmt.Errorf("error decoding webhook metadata: %v", err) - } - - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return err - } - - err = client.DeleteNotification(metadata.Notification.ID) - if err != nil { - return fmt.Errorf("error deleting notification: %v", err) - } - - return client.DeleteSecret(metadata.Secret.Name) -} - func (s *Semaphore) Components() []core.Component { return []core.Component{ &RunWorkflow{}, @@ -225,67 +123,3 @@ func (s *Semaphore) Triggers() []core.Trigger { &OnPipelineDone{}, } } - -func upsertSecret(client *Client, name string, key []byte) (*Secret, error) { - // - // Check if secret already exists. - // - secret, err := client.GetSecret(name) - if err == nil { - return secret, nil - } - - // - // Secret does not exist, create it. - // - secret, err = client.CreateWebhookSecret(name, string(key)) - if err != nil { - return nil, fmt.Errorf("error creating secret: %v", err) - } - - return secret, nil -} - -func upsertNotification(client *Client, name, URL, project string) (*Notification, error) { - // - // Check if notification already exists. - // - notification, err := client.GetNotification(name) - if err == nil { - return notification, nil - } - - // - // Notification does not exist, create it. - // - notification, err = client.CreateNotification(&Notification{ - Metadata: NotificationMetadata{ - Name: name, - }, - Spec: NotificationSpec{ - Rules: []NotificationRule{ - { - Name: fmt.Sprintf("webhook-for-%s", project), - Filter: NotificationRuleFilter{ - Branches: []string{}, - Pipelines: []string{}, - Projects: []string{project}, - Results: []string{}, - }, - Notify: NotificationRuleNotify{ - Webhook: NotificationNotifyWebhook{ - Endpoint: URL, - Secret: name, - }, - }, - }, - }, - }, - }) - - if err != nil { - return nil, fmt.Errorf("error creating notification: %v", err) - } - - return notification, nil -} diff --git a/pkg/integrations/semaphore/semaphore_test.go b/pkg/integrations/semaphore/semaphore_test.go index 1ff699a4a1..fa11c62433 100644 --- a/pkg/integrations/semaphore/semaphore_test.go +++ b/pkg/integrations/semaphore/semaphore_test.go @@ -75,81 +75,3 @@ func Test__Semaphore__Sync(t *testing.T) { assert.Equal(t, "https://example.semaphoreci.com/api/v1alpha/projects", httpContext.Requests[0].URL.String()) }) } - -func Test__Semaphore__CompareWebhookConfig(t *testing.T) { - s := &Semaphore{} - - testCases := []struct { - name string - configA any - configB any - expectEqual bool - expectError bool - }{ - { - name: "identical configurations", - configA: WebhookConfiguration{ - Project: "my-project", - }, - configB: WebhookConfiguration{ - Project: "my-project", - }, - expectEqual: true, - expectError: false, - }, - { - name: "different projects", - configA: WebhookConfiguration{ - Project: "my-project", - }, - configB: WebhookConfiguration{ - Project: "other-project", - }, - expectEqual: false, - expectError: false, - }, - { - name: "comparing map representations", - configA: map[string]any{ - "project": "my-project", - }, - configB: map[string]any{ - "project": "my-project", - }, - expectEqual: true, - expectError: false, - }, - { - name: "invalid first configuration", - configA: "invalid", - configB: WebhookConfiguration{ - Project: "my-project", - }, - expectEqual: false, - expectError: true, - }, - { - name: "invalid second configuration", - configA: WebhookConfiguration{ - Project: "my-project", - }, - configB: "invalid", - expectEqual: false, - expectError: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - equal, err := s.CompareWebhookConfig(tc.configA, tc.configB) - - if tc.expectError { - assert.Error(t, err) - } else { - require.NoError(t, err) - } - - assert.Equal(t, tc.expectEqual, equal) - }) - } -} diff --git a/pkg/integrations/semaphore/webhook_handler.go b/pkg/integrations/semaphore/webhook_handler.go new file mode 100644 index 0000000000..5e47e7e01b --- /dev/null +++ b/pkg/integrations/semaphore/webhook_handler.go @@ -0,0 +1,176 @@ +package semaphore + +import ( + "crypto/sha256" + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type WebhookMetadata struct { + Secret WebhookSecretMetadata `json:"secret"` + Notification WebhookNotificationMetadata `json:"notification"` +} + +type WebhookSecretMetadata struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type WebhookNotificationMetadata struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type WebhookConfiguration struct { + Project string `json:"project"` +} + +type SemaphoreWebhookHandler struct{} + +func (h *SemaphoreWebhookHandler) CompareConfig(a, b any) (bool, error) { + configA := WebhookConfiguration{} + if err := mapstructure.Decode(a, &configA); err != nil { + return false, err + } + + configB := WebhookConfiguration{} + if err := mapstructure.Decode(b, &configB); err != nil { + return false, err + } + + return configA.Project == configB.Project, nil +} + +func (h *SemaphoreWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + configuration := WebhookConfiguration{} + err = mapstructure.Decode(ctx.Webhook.GetConfiguration(), &configuration) + if err != nil { + return nil, fmt.Errorf("error decoding configuration: %v", err) + } + + // + // Semaphore doesn't let us use UUIDs in secret names, + // so we sha256 the ID before creating the secret. + // + hash := sha256.New() + hash.Write([]byte(ctx.Webhook.GetID())) + suffix := fmt.Sprintf("%x", hash.Sum(nil)) + name := fmt.Sprintf("superplane-webhook-%x", suffix[:16]) + + webhookSecret, err := ctx.Webhook.GetSecret() + if err != nil { + return nil, fmt.Errorf("error getting webhook secret: %v", err) + } + + // + // Create Semaphore secret to store the event source key. + // + secret, err := upsertSecret(client, name, webhookSecret) + if err != nil { + return nil, fmt.Errorf("error creating Semaphore secret: %v", err) + } + + // + // Create a notification resource to receive events from Semaphore + // + notification, err := upsertNotification(client, name, ctx.Webhook.GetURL(), configuration.Project) + if err != nil { + return nil, fmt.Errorf("error creating Semaphore notification: %v", err) + } + + return WebhookMetadata{ + Secret: WebhookSecretMetadata{ID: secret.Metadata.ID, Name: secret.Metadata.Name}, + Notification: WebhookNotificationMetadata{ID: notification.Metadata.ID, Name: notification.Metadata.Name}, + }, nil +} + +func (h *SemaphoreWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + metadata := WebhookMetadata{} + err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata) + if err != nil { + return fmt.Errorf("error decoding webhook metadata: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + err = client.DeleteNotification(metadata.Notification.ID) + if err != nil { + return fmt.Errorf("error deleting notification: %v", err) + } + + return client.DeleteSecret(metadata.Secret.Name) +} + +func upsertSecret(client *Client, name string, key []byte) (*Secret, error) { + // + // Check if secret already exists. + // + secret, err := client.GetSecret(name) + if err == nil { + return secret, nil + } + + // + // Secret does not exist, create it. + // + secret, err = client.CreateWebhookSecret(name, string(key)) + if err != nil { + return nil, fmt.Errorf("error creating secret: %v", err) + } + + return secret, nil +} + +func upsertNotification(client *Client, name, URL, project string) (*Notification, error) { + // + // Check if notification already exists. + // + notification, err := client.GetNotification(name) + if err == nil { + return notification, nil + } + + // + // Notification does not exist, create it. + // + notification, err = client.CreateNotification(&Notification{ + Metadata: NotificationMetadata{ + Name: name, + }, + Spec: NotificationSpec{ + Rules: []NotificationRule{ + { + Name: fmt.Sprintf("webhook-for-%s", project), + Filter: NotificationRuleFilter{ + Branches: []string{}, + Pipelines: []string{}, + Projects: []string{project}, + Results: []string{}, + }, + Notify: NotificationRuleNotify{ + Webhook: NotificationNotifyWebhook{ + Endpoint: URL, + Secret: name, + }, + }, + }, + }, + }, + }) + + if err != nil { + return nil, fmt.Errorf("error creating notification: %v", err) + } + + return notification, nil +} diff --git a/pkg/integrations/semaphore/webhook_handler_test.go b/pkg/integrations/semaphore/webhook_handler_test.go new file mode 100644 index 0000000000..3cfea70e4a --- /dev/null +++ b/pkg/integrations/semaphore/webhook_handler_test.go @@ -0,0 +1,86 @@ +package semaphore + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test__SemaphoreWebhookHandler__CompareConfig(t *testing.T) { + handler := &SemaphoreWebhookHandler{} + + testCases := []struct { + name string + configA any + configB any + expectEqual bool + expectError bool + }{ + { + name: "identical configurations", + configA: WebhookConfiguration{ + Project: "my-project", + }, + configB: WebhookConfiguration{ + Project: "my-project", + }, + expectEqual: true, + expectError: false, + }, + { + name: "different projects", + configA: WebhookConfiguration{ + Project: "my-project", + }, + configB: WebhookConfiguration{ + Project: "other-project", + }, + expectEqual: false, + expectError: false, + }, + { + name: "comparing map representations", + configA: map[string]any{ + "project": "my-project", + }, + configB: map[string]any{ + "project": "my-project", + }, + expectEqual: true, + expectError: false, + }, + { + name: "invalid first configuration", + configA: "invalid", + configB: WebhookConfiguration{ + Project: "my-project", + }, + expectEqual: false, + expectError: true, + }, + { + name: "invalid second configuration", + configA: WebhookConfiguration{ + Project: "my-project", + }, + configB: "invalid", + expectEqual: false, + expectError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + equal, err := handler.CompareConfig(tc.configA, tc.configB) + + if tc.expectError { + assert.Error(t, err) + } else { + require.NoError(t, err) + } + + assert.Equal(t, tc.expectEqual, equal) + }) + } +} diff --git a/pkg/integrations/sendgrid/sendgrid.go b/pkg/integrations/sendgrid/sendgrid.go index 72a98bc843..b1a3ddb579 100644 --- a/pkg/integrations/sendgrid/sendgrid.go +++ b/pkg/integrations/sendgrid/sendgrid.go @@ -2,8 +2,6 @@ package sendgrid import ( "fmt" - "net/url" - "strings" "github.com/mitchellh/mapstructure" "github.com/superplanehq/superplane/pkg/configuration" @@ -12,7 +10,7 @@ import ( ) func init() { - registry.RegisterIntegration("sendgrid", &SendGrid{}) + registry.RegisterIntegrationWithWebhookHandler("sendgrid", &SendGrid{}, &SendGridWebhookHandler{}) } type SendGrid struct{} @@ -133,90 +131,10 @@ func (s *SendGrid) HandleRequest(ctx core.HTTPRequestContext) { // no-op } -func (s *SendGrid) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (s *SendGrid) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { return []core.IntegrationResource{}, nil } -func (s *SendGrid) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return nil, err - } - - settings := EventWebhookSettings{ - Enabled: true, - URL: ctx.Webhook.GetURL(), - Processed: true, - Delivered: true, - Deferred: true, - Bounce: true, - Dropped: true, - Open: true, - Click: true, - SpamReport: true, - Unsubscribe: true, - GroupUnsubscribe: true, - GroupResubscribe: true, - } - - if err := client.UpdateEventWebhookSettings(settings); err != nil { - return nil, fmt.Errorf("failed to update SendGrid webhook settings: %w", err) - } - - publicKey, err := client.EnableEventWebhookSignature() - if err != nil { - return nil, fmt.Errorf("failed to enable SendGrid signed webhook: %w", err) - } - verificationKey := strings.TrimSpace(publicKey) - - if verificationKey != "" { - if err := ctx.Integration.SetSecret(webhookVerificationKeySecret, []byte(verificationKey)); err != nil { - return nil, fmt.Errorf("failed to store integration verification key: %w", err) - } - - if err := ctx.Webhook.SetSecret([]byte(verificationKey)); err != nil { - return nil, fmt.Errorf("failed to store webhook verification key: %w", err) - } - } - - return nil, nil -} - -func (s *SendGrid) CleanupWebhook(ctx core.CleanupWebhookContext) error { - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return err - } - - settings, err := client.GetEventWebhookSettings() - if err != nil { - return fmt.Errorf("failed to fetch SendGrid webhook settings: %w", err) - } - - webhookURL := strings.TrimSpace(settings.URL) - if webhookURL == "" { - webhookURL = ctx.Webhook.GetURL() - } - - parsedURL, err := url.Parse(webhookURL) - if err != nil || strings.ToLower(parsedURL.Scheme) != "https" { - return nil - } - - settings.Enabled = false - settings.URL = webhookURL - - if err := client.UpdateEventWebhookSettings(*settings); err != nil { - return fmt.Errorf("failed to disable SendGrid webhook: %w", err) - } - - return nil -} - func (s *SendGrid) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/integrations/sendgrid/webhook_handler.go b/pkg/integrations/sendgrid/webhook_handler.go new file mode 100644 index 0000000000..d621334d46 --- /dev/null +++ b/pkg/integrations/sendgrid/webhook_handler.go @@ -0,0 +1,91 @@ +package sendgrid + +import ( + "fmt" + "net/url" + "strings" + + "github.com/superplanehq/superplane/pkg/core" +) + +type SendGridWebhookHandler struct{} + +func (s *SendGridWebhookHandler) CompareConfig(a, b any) (bool, error) { + return true, nil +} + +func (s *SendGridWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + settings := EventWebhookSettings{ + Enabled: true, + URL: ctx.Webhook.GetURL(), + Processed: true, + Delivered: true, + Deferred: true, + Bounce: true, + Dropped: true, + Open: true, + Click: true, + SpamReport: true, + Unsubscribe: true, + GroupUnsubscribe: true, + GroupResubscribe: true, + } + + if err := client.UpdateEventWebhookSettings(settings); err != nil { + return nil, fmt.Errorf("failed to update SendGrid webhook settings: %w", err) + } + + publicKey, err := client.EnableEventWebhookSignature() + if err != nil { + return nil, fmt.Errorf("failed to enable SendGrid signed webhook: %w", err) + } + verificationKey := strings.TrimSpace(publicKey) + + if verificationKey != "" { + if err := ctx.Integration.SetSecret(webhookVerificationKeySecret, []byte(verificationKey)); err != nil { + return nil, fmt.Errorf("failed to store integration verification key: %w", err) + } + + if err := ctx.Webhook.SetSecret([]byte(verificationKey)); err != nil { + return nil, fmt.Errorf("failed to store webhook verification key: %w", err) + } + } + + return nil, nil +} + +func (s *SendGridWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + settings, err := client.GetEventWebhookSettings() + if err != nil { + return fmt.Errorf("failed to fetch SendGrid webhook settings: %w", err) + } + + webhookURL := strings.TrimSpace(settings.URL) + if webhookURL == "" { + webhookURL = ctx.Webhook.GetURL() + } + + parsedURL, err := url.Parse(webhookURL) + if err != nil || strings.ToLower(parsedURL.Scheme) != "https" { + return nil + } + + settings.Enabled = false + settings.URL = webhookURL + + if err := client.UpdateEventWebhookSettings(*settings); err != nil { + return fmt.Errorf("failed to disable SendGrid webhook: %w", err) + } + + return nil +} diff --git a/pkg/integrations/sendgrid/sendgrid_webhook_test.go b/pkg/integrations/sendgrid/webhook_handler_test.go similarity index 92% rename from pkg/integrations/sendgrid/sendgrid_webhook_test.go rename to pkg/integrations/sendgrid/webhook_handler_test.go index 54ccb39f5c..a3fbf9ed58 100644 --- a/pkg/integrations/sendgrid/sendgrid_webhook_test.go +++ b/pkg/integrations/sendgrid/webhook_handler_test.go @@ -26,8 +26,8 @@ func (t *testWebhookContext) GetMetadata() any { return nil } func (t *testWebhookContext) GetConfiguration() any { return t.configuration } func (t *testWebhookContext) SetSecret(secret []byte) error { t.secret = secret; return nil } -func Test__SendGrid__SetupWebhook_EnablesSignedWebhook(t *testing.T) { - integration := &SendGrid{} +func Test__SendGrid__Setup_EnablesSignedWebhook(t *testing.T) { + handler := &SendGridWebhookHandler{} httpCtx := &contexts.HTTPContext{ Responses: []*http.Response{ { @@ -47,17 +47,17 @@ func Test__SendGrid__SetupWebhook_EnablesSignedWebhook(t *testing.T) { }, } - webhookCtx := &testWebhookContext{ - url: "https://example.com/webhook", - configuration: struct{}{}, - } - integrationCtx := &contexts.IntegrationContext{ Configuration: map[string]any{"apiKey": "sg-test"}, Secrets: map[string]core.IntegrationSecret{}, } - _, err := integration.SetupWebhook(core.SetupWebhookContext{ + webhookCtx := &testWebhookContext{ + url: "https://example.com/webhook", + configuration: struct{}{}, + } + + _, err := handler.Setup(core.WebhookHandlerContext{ HTTP: httpCtx, Webhook: webhookCtx, Integration: integrationCtx, @@ -71,7 +71,7 @@ func Test__SendGrid__SetupWebhook_EnablesSignedWebhook(t *testing.T) { } func Test__SendGrid__CleanupWebhook_DisablesWebhook(t *testing.T) { - integration := &SendGrid{} + handler := &SendGridWebhookHandler{} httpCtx := &contexts.HTTPContext{ Responses: []*http.Response{ { @@ -96,7 +96,7 @@ func Test__SendGrid__CleanupWebhook_DisablesWebhook(t *testing.T) { configuration: struct{}{}, } - err := integration.CleanupWebhook(core.CleanupWebhookContext{ + err := handler.Cleanup(core.WebhookHandlerContext{ HTTP: httpCtx, Webhook: webhookCtx, Integration: &contexts.IntegrationContext{ @@ -126,7 +126,7 @@ func Test__SendGrid__CleanupWebhook_DisablesWebhook(t *testing.T) { } func Test__SendGrid__CleanupWebhook_SkipsNonHTTPS(t *testing.T) { - integration := &SendGrid{} + handler := &SendGridWebhookHandler{} httpCtx := &contexts.HTTPContext{ Responses: []*http.Response{ { @@ -144,7 +144,7 @@ func Test__SendGrid__CleanupWebhook_SkipsNonHTTPS(t *testing.T) { configuration: struct{}{}, } - err := integration.CleanupWebhook(core.CleanupWebhookContext{ + err := handler.Cleanup(core.WebhookHandlerContext{ HTTP: httpCtx, Webhook: webhookCtx, Integration: &contexts.IntegrationContext{ diff --git a/pkg/integrations/slack/slack.go b/pkg/integrations/slack/slack.go index cc0dd0f566..0020a2d3bb 100644 --- a/pkg/integrations/slack/slack.go +++ b/pkg/integrations/slack/slack.go @@ -446,21 +446,3 @@ func (s *Slack) readAndVerify(ctx core.HTTPRequestContext) ([]byte, error) { return body, nil } - -/* - * All the events we receive from Slack are on the app's HandleWebhook(), - * so all the Slack components and triggers use app subscriptions, - * and not webhooks. - */ - -func (s *Slack) CompareWebhookConfig(a, b any) (bool, error) { - return false, nil -} - -func (s *Slack) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (s *Slack) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} diff --git a/pkg/integrations/smtp/smtp.go b/pkg/integrations/smtp/smtp.go index f11f5ff31e..0fcb1f47f7 100644 --- a/pkg/integrations/smtp/smtp.go +++ b/pkg/integrations/smtp/smtp.go @@ -153,23 +153,11 @@ func (s *SMTP) HandleRequest(ctx core.HTTPRequestContext) { // SMTP doesn't handle incoming webhooks } -func (s *SMTP) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (s *SMTP) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { // SMTP doesn't have resources to list return []core.IntegrationResource{}, nil } -func (s *SMTP) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (s *SMTP) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} - func (s *SMTP) Actions() []core.Action { return []core.Action{} } diff --git a/pkg/registry/integration.go b/pkg/registry/integration.go index fdf3bd4e11..da08ba1fbd 100644 --- a/pkg/registry/integration.go +++ b/pkg/registry/integration.go @@ -128,34 +128,3 @@ func (s *PanicableIntegration) HandleRequest(ctx core.HTTPRequestContext) { }() s.underlying.HandleRequest(ctx) } - -func (s *PanicableIntegration) CompareWebhookConfig(a, b any) (result bool, err error) { - defer func() { - if r := recover(); r != nil { - result = false - err = fmt.Errorf("integration %s panicked in CompareWebhookConfig(): %v", - s.underlying.Name(), r) - } - }() - return s.underlying.CompareWebhookConfig(a, b) -} - -func (s *PanicableIntegration) SetupWebhook(ctx core.SetupWebhookContext) (metadata any, err error) { - defer func() { - if r := recover(); r != nil { - err = fmt.Errorf("integration %s panicked in SetupWebhook(): %v", - s.underlying.Name(), r) - } - }() - return s.underlying.SetupWebhook(ctx) -} - -func (s *PanicableIntegration) CleanupWebhook(ctx core.CleanupWebhookContext) (err error) { - defer func() { - if r := recover(); r != nil { - err = fmt.Errorf("integration %s panicked in CleanupWebhook(): %v", - s.underlying.Name(), r) - } - }() - return s.underlying.CleanupWebhook(ctx) -} diff --git a/pkg/registry/integration_test.go b/pkg/registry/integration_test.go index e0d305e01f..061df6a2a2 100644 --- a/pkg/registry/integration_test.go +++ b/pkg/registry/integration_test.go @@ -40,16 +40,6 @@ func (p *panickingIntegration) ListResources(resourceType string, ctx core.ListR func (p *panickingIntegration) HandleRequest(ctx core.HTTPRequestContext) { panic("handle request panic") } -func (p *panickingIntegration) CompareWebhookConfig(a, b any) (bool, error) { - panic("compare webhook config panic") -} -func (p *panickingIntegration) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - panic("setup webhook panic") -} -func (p *panickingIntegration) CleanupWebhook(ctx core.CleanupWebhookContext) error { - panic("cleanup webhook panic") -} - func TestPanicableIntegration_Sync_CatchesPanic(t *testing.T) { integration := &panickingIntegration{} panicable := NewPanicableIntegration(integration) @@ -75,36 +65,3 @@ func TestPanicableIntegration_HandleRequest_CatchesPanic(t *testing.T) { assert.Equal(t, 500, recorder.Code) } - -func TestPanicableIntegration_CompareWebhookConfig_CatchesPanic(t *testing.T) { - integration := &panickingIntegration{} - panicable := NewPanicableIntegration(integration) - - result, err := panicable.CompareWebhookConfig(nil, nil) - - assert.False(t, result) - require.Error(t, err) - assert.Contains(t, err.Error(), "integration panicking-integration panicked in CompareWebhookConfig()") - assert.Contains(t, err.Error(), "compare webhook config panic") -} - -func TestPanicableIntegration_SetupWebhook_CatchesPanic(t *testing.T) { - integration := &panickingIntegration{} - panicable := NewPanicableIntegration(integration) - - metadata, err := panicable.SetupWebhook(core.SetupWebhookContext{}) - require.Error(t, err) - assert.Nil(t, metadata) - assert.Contains(t, err.Error(), "integration panicking-integration panicked in SetupWebhook()") - assert.Contains(t, err.Error(), "setup webhook panic") -} - -func TestPanicableIntegration_CleanupWebhook_CatchesPanic(t *testing.T) { - integration := &panickingIntegration{} - panicable := NewPanicableIntegration(integration) - - err := panicable.CleanupWebhook(core.CleanupWebhookContext{}) - require.Error(t, err) - assert.Contains(t, err.Error(), "integration panicking-integration panicked in CleanupWebhook()") - assert.Contains(t, err.Error(), "cleanup webhook panic") -} diff --git a/pkg/registry/registry.go b/pkg/registry/registry.go index cde420c7bd..9d27b68c61 100644 --- a/pkg/registry/registry.go +++ b/pkg/registry/registry.go @@ -11,11 +11,12 @@ import ( ) var ( - registeredComponents = make(map[string]core.Component) - registeredTriggers = make(map[string]core.Trigger) - registeredIntegrations = make(map[string]core.Integration) - registeredWidgets = make(map[string]core.Widget) - mu sync.RWMutex + registeredComponents = make(map[string]core.Component) + registeredTriggers = make(map[string]core.Trigger) + registeredIntegrations = make(map[string]core.Integration) + registeredWebhookHandlers = make(map[string]core.WebhookHandler) + registeredWidgets = make(map[string]core.Widget) + mu sync.RWMutex ) func RegisterComponent(name string, c core.Component) { @@ -36,19 +37,33 @@ func RegisterIntegration(name string, i core.Integration) { registeredIntegrations[name] = i } +func RegisterIntegrationWithWebhookHandler(name string, i core.Integration, h core.WebhookHandler) { + mu.Lock() + defer mu.Unlock() + registeredIntegrations[name] = i + registeredWebhookHandlers[name] = h +} + func RegisterWidget(name string, w core.Widget) { mu.Lock() defer mu.Unlock() registeredWidgets[name] = w } +type IntegrationRegistration struct { + Name string + Integration core.Integration + WebhookHandler core.WebhookHandler +} + type Registry struct { - httpCtx *HTTPContext - Encryptor crypto.Encryptor - Integrations map[string]core.Integration - Components map[string]core.Component - Triggers map[string]core.Trigger - Widgets map[string]core.Widget + httpCtx *HTTPContext + Encryptor crypto.Encryptor + Integrations map[string]core.Integration + WebhookHandlers map[string]core.WebhookHandler + Components map[string]core.Component + Triggers map[string]core.Trigger + Widgets map[string]core.Widget } func NewRegistry(encryptor crypto.Encryptor, httpOptions HTTPOptions) (*Registry, error) { @@ -58,12 +73,13 @@ func NewRegistry(encryptor crypto.Encryptor, httpOptions HTTPOptions) (*Registry } r := &Registry{ - Encryptor: encryptor, - httpCtx: httpCtx, - Components: map[string]core.Component{}, - Triggers: map[string]core.Trigger{}, - Integrations: map[string]core.Integration{}, - Widgets: map[string]core.Widget{}, + Encryptor: encryptor, + httpCtx: httpCtx, + Components: map[string]core.Component{}, + Triggers: map[string]core.Trigger{}, + Integrations: map[string]core.Integration{}, + WebhookHandlers: map[string]core.WebhookHandler{}, + Widgets: map[string]core.Widget{}, } r.Init() @@ -90,6 +106,10 @@ func (r *Registry) Init() { r.Integrations[name] = NewPanicableIntegration(integration) } + for name, webhookHandler := range registeredWebhookHandlers { + r.WebhookHandlers[name] = NewPanicableWebhookHandler(webhookHandler) + } + // // Widgets are not required to be panicable, since they just carry Configuration data // and no logic is executed. @@ -197,6 +217,15 @@ func (r *Registry) GetIntegration(name string) (core.Integration, error) { return integration, nil } +func (r *Registry) GetWebhookHandler(name string) (core.WebhookHandler, error) { + webhookHandler, ok := r.WebhookHandlers[name] + if !ok { + return nil, fmt.Errorf("webhook handler %s not registered", name) + } + + return webhookHandler, nil +} + func (r *Registry) ListIntegrations() []core.Integration { integrations := make([]core.Integration, 0, len(r.Integrations)) for _, integration := range r.Integrations { diff --git a/pkg/registry/webhook_handler.go b/pkg/registry/webhook_handler.go new file mode 100644 index 0000000000..b66158f246 --- /dev/null +++ b/pkg/registry/webhook_handler.go @@ -0,0 +1,43 @@ +package registry + +import ( + "fmt" + "runtime/debug" + + "github.com/superplanehq/superplane/pkg/core" +) + +type PanicableWebhookHandler struct { + underlying core.WebhookHandler +} + +func NewPanicableWebhookHandler(underlying core.WebhookHandler) *PanicableWebhookHandler { + return &PanicableWebhookHandler{underlying: underlying} +} + +func (h *PanicableWebhookHandler) CompareConfig(a, b any) (bool, error) { + return h.underlying.CompareConfig(a, b) +} + +func (h *PanicableWebhookHandler) Setup(ctx core.WebhookHandlerContext) (metadata any, err error) { + defer func() { + if r := recover(); r != nil { + ctx.Logger.Errorf("Webhook handler panicked in Setup(): %v\nStack: %s", + r, debug.Stack()) + metadata = nil + err = fmt.Errorf("webhook handler panicked in Setup(): %v", r) + } + }() + return h.underlying.Setup(ctx) +} + +func (h *PanicableWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) (err error) { + defer func() { + if r := recover(); r != nil { + ctx.Logger.Errorf("Webhook handler panicked in Cleanup(): %v\nStack: %s", + r, debug.Stack()) + err = fmt.Errorf("webhook handler panicked in Cleanup(): %v", r) + } + }() + return h.underlying.Cleanup(ctx) +} diff --git a/pkg/registry/webhook_handler_test.go b/pkg/registry/webhook_handler_test.go new file mode 100644 index 0000000000..ed3cf3945a --- /dev/null +++ b/pkg/registry/webhook_handler_test.go @@ -0,0 +1,49 @@ +package registry + +import ( + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" +) + +// panickingWebhookHandler is a webhook handler that panics in all panicable methods +type panickingWebhookHandler struct{} + +func (p *panickingWebhookHandler) CompareConfig(a, b any) (bool, error) { + panic("compare config panic") +} + +func (p *panickingWebhookHandler) Setup(ctx core.WebhookHandlerContext) (metadata any, err error) { + panic("setup panic") +} + +func (p *panickingWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + panic("cleanup panic") +} + +func Test_PanicableWebhookHandler_Setup_CatchesPanic(t *testing.T) { + handler := &panickingWebhookHandler{} + panicable := NewPanicableWebhookHandler(handler) + ctx := core.WebhookHandlerContext{ + Logger: log.NewEntry(log.StandardLogger()), + } + + _, err := panicable.Setup(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "setup panic") +} + +func Test_PanicableWebhookHandler_Cleanup_CatchesPanic(t *testing.T) { + handler := &panickingWebhookHandler{} + panicable := NewPanicableWebhookHandler(handler) + ctx := core.WebhookHandlerContext{ + Logger: log.NewEntry(log.StandardLogger()), + } + + err := panicable.Cleanup(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "cleanup panic") +} diff --git a/pkg/workers/contexts/integration_context.go b/pkg/workers/contexts/integration_context.go index bc73d921c0..25401f566b 100644 --- a/pkg/workers/contexts/integration_context.go +++ b/pkg/workers/contexts/integration_context.go @@ -41,12 +41,12 @@ func (c *IntegrationContext) ID() uuid.UUID { } func (c *IntegrationContext) RequestWebhook(configuration any) error { - impl, err := c.registry.GetIntegration(c.integration.AppName) + handler, err := c.registry.GetWebhookHandler(c.integration.AppName) if err != nil { return err } - if err := c.replaceMismatchedWebhook(configuration, impl); err != nil { + if err := c.replaceMismatchedWebhook(configuration, handler); err != nil { return err } @@ -56,7 +56,7 @@ func (c *IntegrationContext) RequestWebhook(configuration any) error { } for _, hook := range webhooks { - ok, err := impl.CompareWebhookConfig(hook.Configuration.Data(), configuration) + ok, err := handler.CompareConfig(hook.Configuration.Data(), configuration) if err != nil { return err } @@ -70,7 +70,7 @@ func (c *IntegrationContext) RequestWebhook(configuration any) error { return c.createWebhook(configuration) } -func (c *IntegrationContext) replaceMismatchedWebhook(configuration any, impl core.Integration) error { +func (c *IntegrationContext) replaceMismatchedWebhook(configuration any, handler core.WebhookHandler) error { if c.node == nil || c.node.WebhookID == nil { return nil } @@ -80,7 +80,7 @@ func (c *IntegrationContext) replaceMismatchedWebhook(configuration any, impl co return err } - matches, err := impl.CompareWebhookConfig(webhook.Configuration.Data(), configuration) + matches, err := handler.CompareConfig(webhook.Configuration.Data(), configuration) if err != nil { return err } diff --git a/pkg/workers/contexts/integration_context_test.go b/pkg/workers/contexts/integration_context_test.go index e119f2b635..92889bf347 100644 --- a/pkg/workers/contexts/integration_context_test.go +++ b/pkg/workers/contexts/integration_context_test.go @@ -80,8 +80,9 @@ func Test__IntegrationContext_RequestWebhook_ReplacesWebhookOnConfigChange(t *te r := support.Setup(t) defer r.Close() - r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{ - OnCompareWebhookConfig: func(a, b any) (bool, error) { + r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{}) + r.Registry.WebhookHandlers["dummy"] = support.NewDummyWebhookHandler(support.DummyWebhookHandlerOptions{ + CompareConfigFunc: func(a, b any) (bool, error) { return reflect.DeepEqual(a, b), nil }, }) diff --git a/pkg/workers/webhook_cleanup_worker.go b/pkg/workers/webhook_cleanup_worker.go index 67b2341d19..47471ce9b2 100644 --- a/pkg/workers/webhook_cleanup_worker.go +++ b/pkg/workers/webhook_cleanup_worker.go @@ -11,6 +11,7 @@ import ( "github.com/superplanehq/superplane/pkg/core" "github.com/superplanehq/superplane/pkg/crypto" "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/logging" "github.com/superplanehq/superplane/pkg/models" "github.com/superplanehq/superplane/pkg/registry" "github.com/superplanehq/superplane/pkg/workers/contexts" @@ -91,15 +92,16 @@ func (w *WebhookCleanupWorker) processAppInstallationWebhook(tx *gorm.DB, webhoo return err } - integration, err := w.registry.GetIntegration(instance.AppName) + handler, err := w.registry.GetWebhookHandler(instance.AppName) if err != nil { return err } - err = integration.CleanupWebhook(core.CleanupWebhookContext{ + err = handler.Cleanup(core.WebhookHandlerContext{ HTTP: w.registry.HTTPContext(), - Webhook: contexts.NewWebhookContext(tx, webhook, w.encryptor, w.baseURL), Integration: contexts.NewIntegrationContext(tx, nil, instance, w.encryptor, w.registry), + Webhook: contexts.NewWebhookContext(tx, webhook, w.encryptor, w.baseURL), + Logger: logging.ForIntegration(*instance), }) if err != nil { diff --git a/pkg/workers/webhook_provisioner.go b/pkg/workers/webhook_provisioner.go index 4080d6f47b..9e6f2b9124 100644 --- a/pkg/workers/webhook_provisioner.go +++ b/pkg/workers/webhook_provisioner.go @@ -11,6 +11,7 @@ import ( "github.com/superplanehq/superplane/pkg/core" "github.com/superplanehq/superplane/pkg/crypto" "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/logging" "github.com/superplanehq/superplane/pkg/models" "github.com/superplanehq/superplane/pkg/registry" "github.com/superplanehq/superplane/pkg/workers/contexts" @@ -90,22 +91,23 @@ func (w *WebhookProvisioner) processIntegrationWebhook(tx *gorm.DB, webhook *mod return w.handleWebhookError(tx, webhook, err) } - integration, err := w.registry.GetIntegration(instance.AppName) + handler, err := w.registry.GetWebhookHandler(instance.AppName) if err != nil { return w.handleWebhookError(tx, webhook, err) } - webhookMetadata, err := integration.SetupWebhook(core.SetupWebhookContext{ + metadata, err := handler.Setup(core.WebhookHandlerContext{ HTTP: w.registry.HTTPContext(), - Webhook: contexts.NewWebhookContext(tx, webhook, w.encryptor, w.baseURL), Integration: contexts.NewIntegrationContext(tx, nil, instance, w.encryptor, w.registry), + Webhook: contexts.NewWebhookContext(tx, webhook, w.encryptor, w.baseURL), + Logger: logging.ForIntegration(*instance), }) if err != nil { return w.handleWebhookError(tx, webhook, err) } - return webhook.ReadyWithMetadata(tx, webhookMetadata) + return webhook.ReadyWithMetadata(tx, metadata) } func (w *WebhookProvisioner) handleWebhookError(tx *gorm.DB, webhook *models.Webhook, originalErr error) error { diff --git a/pkg/workers/webhook_provisioner_test.go b/pkg/workers/webhook_provisioner_test.go index 0423d30c03..b690803912 100644 --- a/pkg/workers/webhook_provisioner_test.go +++ b/pkg/workers/webhook_provisioner_test.go @@ -55,8 +55,9 @@ func Test__WebhookProvisioner_RetryOnError(t *testing.T) { provisioner := NewWebhookProvisioner("https://example.com", &BadEncryptor{}, r.Registry) - r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{ - OnSetupWebhook: func(ctx core.SetupWebhookContext) (any, error) { + r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{}) + r.Registry.WebhookHandlers["dummy"] = support.NewDummyWebhookHandler(support.DummyWebhookHandlerOptions{ + SetupFunc: func(ctx core.WebhookHandlerContext) (any, error) { return nil, errors.New("oops") }, }) @@ -97,8 +98,9 @@ func Test__WebhookProvisioner_MaxRetriesExceeded(t *testing.T) { provisioner := NewWebhookProvisioner("https://example.com", &BadEncryptor{}, r.Registry) - r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{ - OnSetupWebhook: func(ctx core.SetupWebhookContext) (any, error) { + r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{}) + r.Registry.WebhookHandlers["dummy"] = support.NewDummyWebhookHandler(support.DummyWebhookHandlerOptions{ + SetupFunc: func(ctx core.WebhookHandlerContext) (any, error) { return nil, errors.New("oops") }, }) diff --git a/test/support/application.go b/test/support/application.go index 4b9cd69ce1..4b80f7f63a 100644 --- a/test/support/application.go +++ b/test/support/application.go @@ -10,33 +10,25 @@ import ( // type DummyIntegration struct { - actions []core.Action - handleAction func(ctx core.IntegrationActionContext) error - onSync func(ctx core.SyncContext) error - onCompareWebhookConfig func(a, b any) (bool, error) - onSetupWebhook func(ctx core.SetupWebhookContext) (any, error) - onCleanup func(ctx core.IntegrationCleanupContext) error + actions []core.Action + handleAction func(ctx core.IntegrationActionContext) error + onSync func(ctx core.SyncContext) error + onCleanup func(ctx core.IntegrationCleanupContext) error } type DummyIntegrationOptions struct { - Actions []core.Action - HandleAction func(ctx core.IntegrationActionContext) error - OnSync func(ctx core.SyncContext) error - OnCompareWebhookConfig func(a, b any) (bool, error) - OnSetupWebhook func(ctx core.SetupWebhookContext) (any, error) - OnCleanup func(ctx core.IntegrationCleanupContext) error + Actions []core.Action + HandleAction func(ctx core.IntegrationActionContext) error + OnSync func(ctx core.SyncContext) error } func NewDummyIntegration( options DummyIntegrationOptions, ) *DummyIntegration { return &DummyIntegration{ - actions: options.Actions, - handleAction: options.HandleAction, - onSync: options.OnSync, - onCompareWebhookConfig: options.OnCompareWebhookConfig, - onSetupWebhook: options.OnSetupWebhook, - onCleanup: options.OnCleanup, + actions: options.Actions, + handleAction: options.HandleAction, + onSync: options.OnSync, } } @@ -104,20 +96,43 @@ func (t *DummyIntegration) ListResources(resourceType string, ctx core.ListResou func (t *DummyIntegration) HandleRequest(ctx core.HTTPRequestContext) { } -func (t *DummyIntegration) CompareWebhookConfig(a, b any) (bool, error) { - if t.onCompareWebhookConfig != nil { - return t.onCompareWebhookConfig(a, b) +type DummyWebhookHandlerOptions struct { + SetupFunc func(ctx core.WebhookHandlerContext) (any, error) + CleanupFunc func(ctx core.WebhookHandlerContext) error + CompareConfigFunc func(a, b any) (bool, error) +} + +type DummyWebhookHandler struct { + setupFunc func(ctx core.WebhookHandlerContext) (any, error) + cleanupFunc func(ctx core.WebhookHandlerContext) error + compareConfigFunc func(a, b any) (bool, error) +} + +func NewDummyWebhookHandler(options DummyWebhookHandlerOptions) *DummyWebhookHandler { + return &DummyWebhookHandler{ + setupFunc: options.SetupFunc, + cleanupFunc: options.CleanupFunc, + compareConfigFunc: options.CompareConfigFunc, } - return true, nil } -func (t *DummyIntegration) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - if t.onSetupWebhook == nil { - return nil, nil +func (t *DummyWebhookHandler) CompareConfig(a, b any) (bool, error) { + if t.compareConfigFunc == nil { + return false, nil } - return t.onSetupWebhook(ctx) + return t.compareConfigFunc(a, b) } -func (t *DummyIntegration) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil +func (t *DummyWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + if t.setupFunc == nil { + return map[string]any{}, nil + } + return t.setupFunc(ctx) +} + +func (t *DummyWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + if t.cleanupFunc == nil { + return nil + } + return t.cleanupFunc(ctx) } From 54416df5929f5ec0185a4908dd9642910d6862b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sun, 8 Feb 2026 18:44:24 +0100 Subject: [PATCH 021/160] chore: Use standard unit tests without explicit mocks (#2968) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/claude/claude_test.go | 118 +++------- pkg/integrations/claude/client_test.go | 231 +++++++------------- pkg/integrations/claude/text_prompt_test.go | 128 ++++------- 3 files changed, 153 insertions(+), 324 deletions(-) diff --git a/pkg/integrations/claude/claude_test.go b/pkg/integrations/claude/claude_test.go index eeaf96d40b..50dce8169a 100644 --- a/pkg/integrations/claude/claude_test.go +++ b/pkg/integrations/claude/claude_test.go @@ -2,89 +2,16 @@ package claude import ( "bytes" - "fmt" "io" "net/http" "testing" - "time" - "github.com/google/uuid" "github.com/sirupsen/logrus" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" ) -// --- Mocks --- - -// mockHTTPContext implements core.HTTPContext for testing -type mockHTTPContext struct { - RoundTripFunc func(req *http.Request) *http.Response -} - -func (m *mockHTTPContext) Do(req *http.Request) (*http.Response, error) { - if m.RoundTripFunc != nil { - return m.RoundTripFunc(req), nil - } - return &http.Response{ - StatusCode: http.StatusOK, - Body: io.NopCloser(bytes.NewBufferString(`{}`)), - }, nil -} - -// mockIntegrationContext implements core.IntegrationContext for testing -type mockIntegrationContext struct { - config map[string][]byte - ready bool - errMsg string -} - -func newMockIntegrationContext() *mockIntegrationContext { - return &mockIntegrationContext{ - config: make(map[string][]byte), - } -} - -func (m *mockIntegrationContext) GetConfig(name string) ([]byte, error) { - val, ok := m.config[name] - if !ok { - return nil, fmt.Errorf("config not found: %s", name) - } - return val, nil -} - -func (m *mockIntegrationContext) Ready() { - m.ready = true -} - -func (m *mockIntegrationContext) Error(message string) { - m.errMsg = message -} - -// Stubs for other interface methods -func (m *mockIntegrationContext) ID() uuid.UUID { return uuid.New() } -func (m *mockIntegrationContext) GetMetadata() any { return nil } -func (m *mockIntegrationContext) SetMetadata(any) {} -func (m *mockIntegrationContext) NewBrowserAction(core.BrowserAction) {} -func (m *mockIntegrationContext) RemoveBrowserAction() {} -func (m *mockIntegrationContext) SetSecret(string, []byte) error { return nil } -func (m *mockIntegrationContext) GetSecrets() ([]core.IntegrationSecret, error) { - return nil, nil -} -func (m *mockIntegrationContext) RequestWebhook(any) error { return nil } -func (m *mockIntegrationContext) Subscribe(any) (*uuid.UUID, error) { - u := uuid.New() - return &u, nil -} -func (m *mockIntegrationContext) ScheduleResync(time.Duration) error { return nil } -func (m *mockIntegrationContext) ScheduleActionCall(string, any, time.Duration) error { - return nil -} -func (m *mockIntegrationContext) ListSubscriptions() ([]core.IntegrationSubscriptionContext, error) { - return nil, nil -} - -// --- Tests --- - func TestClaude_Configuration(t *testing.T) { i := &Claude{} configs := i.Configuration() @@ -170,19 +97,25 @@ func TestClaude_Sync(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { i := &Claude{} - mockInt := newMockIntegrationContext() - mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponses} - - // Populate mock integration config (used by NewClient) + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } if v, ok := tt.config["apiKey"].(string); ok { - mockInt.config["apiKey"] = []byte(v) + integrationCtx.Configuration["apiKey"] = v + } + + var responses []*http.Response + if tt.mockResponses != nil { + req, _ := http.NewRequest(http.MethodGet, "https://api.anthropic.com/v1/models", nil) + responses = []*http.Response{tt.mockResponses(req)} } + httpCtx := &contexts.HTTPContext{Responses: responses} ctx := core.SyncContext{ Logger: logger, - Configuration: tt.config, // Used by mapstructure decode - HTTP: mockHTTP, - Integration: mockInt, + Configuration: tt.config, + HTTP: httpCtx, + Integration: integrationCtx, } err := i.Sync(ctx) @@ -193,7 +126,7 @@ func TestClaude_Sync(t *testing.T) { if !tt.expectError && err != nil { t.Errorf("unexpected error: %v", err) } - if tt.expectReady && !mockInt.ready { + if tt.expectReady && integrationCtx.State != "ready" { t.Error("expected integration to be marked ready") } }) @@ -254,14 +187,23 @@ func TestClaude_ListResources(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { i := &Claude{} - mockInt := newMockIntegrationContext() - mockInt.config = tt.config - mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponses} + configAny := make(map[string]any) + for k, v := range tt.config { + configAny[k] = string(v) + } + integrationCtx := &contexts.IntegrationContext{Configuration: configAny} + + var responses []*http.Response + if tt.mockResponses != nil { + req, _ := http.NewRequest(http.MethodGet, "https://api.anthropic.com/v1/models", nil) + responses = []*http.Response{tt.mockResponses(req)} + } + httpCtx := &contexts.HTTPContext{Responses: responses} ctx := core.ListResourcesContext{ Logger: logger, - HTTP: mockHTTP, - Integration: mockInt, + HTTP: httpCtx, + Integration: integrationCtx, } resources, err := i.ListResources(tt.resourceType, ctx) diff --git a/pkg/integrations/claude/client_test.go b/pkg/integrations/claude/client_test.go index 4c4ffbd3b9..e183214995 100644 --- a/pkg/integrations/claude/client_test.go +++ b/pkg/integrations/claude/client_test.go @@ -3,105 +3,38 @@ package claude import ( "bytes" "encoding/json" - "fmt" "io" "net/http" "testing" - "time" - "github.com/google/uuid" "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" ) -// --- Mocks --- - -// mockHTTPContextForClient implements core.HTTPContext -type mockHTTPContextForClient struct { - // RoundTripFunc allows us to define the response for a specific request - RoundTripFunc func(req *http.Request) *http.Response -} - -func (m *mockHTTPContextForClient) Do(req *http.Request) (*http.Response, error) { - if m.RoundTripFunc != nil { - return m.RoundTripFunc(req), nil - } - // Default fallback - return &http.Response{ - StatusCode: http.StatusOK, - Body: io.NopCloser(bytes.NewBufferString(`{}`)), - }, nil -} - -// mockIntegrationContextForClient implements core.IntegrationContext -type mockIntegrationContextForClient struct { - config map[string][]byte -} - -func newMockIntegrationContextForClient() *mockIntegrationContextForClient { - return &mockIntegrationContextForClient{ - config: make(map[string][]byte), - } -} - -func (m *mockIntegrationContextForClient) GetConfig(name string) ([]byte, error) { - val, ok := m.config[name] - if !ok { - return nil, fmt.Errorf("config not found: %s", name) - } - return val, nil -} - -// Stubs to satisfy the core.IntegrationContext interface -func (m *mockIntegrationContextForClient) ID() uuid.UUID { return uuid.New() } -func (m *mockIntegrationContextForClient) GetMetadata() any { return nil } -func (m *mockIntegrationContextForClient) SetMetadata(any) {} -func (m *mockIntegrationContextForClient) Ready() {} -func (m *mockIntegrationContextForClient) Error(message string) {} -func (m *mockIntegrationContextForClient) NewBrowserAction(core.BrowserAction) {} -func (m *mockIntegrationContextForClient) RemoveBrowserAction() {} -func (m *mockIntegrationContextForClient) SetSecret(string, []byte) error { return nil } -func (m *mockIntegrationContextForClient) GetSecrets() ([]core.IntegrationSecret, error) { - return nil, nil -} -func (m *mockIntegrationContextForClient) RequestWebhook(any) error { return nil } -func (m *mockIntegrationContextForClient) Subscribe(any) (*uuid.UUID, error) { - return nil, nil -} -func (m *mockIntegrationContextForClient) ScheduleResync(time.Duration) error { return nil } -func (m *mockIntegrationContextForClient) ScheduleActionCall(string, any, time.Duration) error { - return nil -} -func (m *mockIntegrationContextForClient) ListSubscriptions() ([]core.IntegrationSubscriptionContext, error) { - return nil, nil -} - -// --- Tests --- - func TestNewClient(t *testing.T) { - mockHTTP := &mockHTTPContextForClient{} + httpCtx := &contexts.HTTPContext{} tests := []struct { - name string - setupMock func(*mockIntegrationContextForClient) - ctx core.IntegrationContext - expectError bool + name string + integrationCtx *contexts.IntegrationContext + expectError bool }{ { name: "Success", - setupMock: func(m *mockIntegrationContextForClient) { - m.config["apiKey"] = []byte("sk-123") + integrationCtx: &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "sk-123"}, }, expectError: false, }, { - name: "Nil Context", - ctx: nil, - expectError: true, + name: "Nil Context", + integrationCtx: nil, + expectError: true, }, { name: "Missing API Key", - setupMock: func(m *mockIntegrationContextForClient) { - // No API Key + integrationCtx: &contexts.IntegrationContext{ + Configuration: map[string]any{}, }, expectError: true, }, @@ -110,20 +43,11 @@ func TestNewClient(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var integrationCtx core.IntegrationContext - - if tt.ctx != nil { - // Use explicitly provided context (e.g. nil) - integrationCtx = tt.ctx - } else { - // Use the mock - mockInt := newMockIntegrationContextForClient() - if tt.setupMock != nil { - tt.setupMock(mockInt) - } - integrationCtx = mockInt + if tt.integrationCtx != nil { + integrationCtx = tt.integrationCtx } - client, err := NewClient(mockHTTP, integrationCtx) + client, err := NewClient(httpCtx, integrationCtx) if tt.expectError { if err == nil { @@ -157,25 +81,19 @@ func TestClient_Verify(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - mockHTTP := &mockHTTPContextForClient{ - RoundTripFunc: func(req *http.Request) *http.Response { - if req.Method != http.MethodGet { - t.Errorf("expected method GET, got %s", req.Method) - } - if req.URL.String() != "https://api.anthropic.com/v1/models" { - t.Errorf("expected URL .../models, got %s", req.URL.String()) - } - return &http.Response{ + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { StatusCode: tt.responseStatus, Body: io.NopCloser(bytes.NewBufferString(`{}`)), - } + }, }, } client := &Client{ APIKey: "test-key", BaseURL: defaultBaseURL, - http: mockHTTP, + http: httpCtx, } err := client.Verify() @@ -190,22 +108,22 @@ func TestClient_Verify(t *testing.T) { } func TestClient_ListModels(t *testing.T) { - mockHTTP := &mockHTTPContextForClient{ - RoundTripFunc: func(req *http.Request) *http.Response { - jsonResp := `{ - "data": [ - {"id": "claude-3-opus"}, - {"id": "claude-3-sonnet"} - ] - }` - return &http.Response{ + jsonResp := `{ + "data": [ + {"id": "claude-3-opus"}, + {"id": "claude-3-sonnet"} + ] + }` + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { StatusCode: 200, Body: io.NopCloser(bytes.NewBufferString(jsonResp)), - } + }, }, } - client := &Client{http: mockHTTP, BaseURL: defaultBaseURL} + client := &Client{http: httpCtx, BaseURL: defaultBaseURL} models, err := client.ListModels() if err != nil { @@ -221,52 +139,30 @@ func TestClient_ListModels(t *testing.T) { } func TestClient_CreateMessage(t *testing.T) { - mockHTTP := &mockHTTPContextForClient{ - RoundTripFunc: func(req *http.Request) *http.Response { - // Verify Headers - if req.Header.Get("x-api-key") != "my-secret-key" { - t.Errorf("missing or wrong x-api-key header") - } - if req.Header.Get("anthropic-version") != "2023-06-01" { - t.Errorf("missing or wrong anthropic-version header") - } - if req.Header.Get("Content-Type") != "application/json" { - t.Errorf("missing or wrong Content-Type") - } - - // Verify Body - bodyBytes, _ := io.ReadAll(req.Body) - var sentReq CreateMessageRequest - if err := json.Unmarshal(bodyBytes, &sentReq); err != nil { - t.Errorf("failed to unmarshal sent body: %v", err) - } - if sentReq.Model != "claude-3-opus" { - t.Errorf("sent wrong model: %s", sentReq.Model) - } - - // Return Success - jsonResp := `{ - "id": "msg_123", - "type": "message", - "role": "assistant", - "content": [ - {"type": "text", "text": "Hello there"} - ], - "model": "claude-3-opus", - "stop_reason": "end_turn", - "usage": {"input_tokens": 10, "output_tokens": 5} - }` - return &http.Response{ + jsonResp := `{ + "id": "msg_123", + "type": "message", + "role": "assistant", + "content": [ + {"type": "text", "text": "Hello there"} + ], + "model": "claude-3-opus", + "stop_reason": "end_turn", + "usage": {"input_tokens": 10, "output_tokens": 5} + }` + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { StatusCode: 200, Body: io.NopCloser(bytes.NewBufferString(jsonResp)), - } + }, }, } client := &Client{ APIKey: "my-secret-key", BaseURL: defaultBaseURL, - http: mockHTTP, + http: httpCtx, } req := CreateMessageRequest{ @@ -288,6 +184,29 @@ func TestClient_CreateMessage(t *testing.T) { if len(resp.Content) == 0 || resp.Content[0].Text != "Hello there" { t.Error("response content mismatch") } + + // Verify request that was sent + if len(httpCtx.Requests) != 1 { + t.Fatalf("expected 1 request, got %d", len(httpCtx.Requests)) + } + sentReq := httpCtx.Requests[0] + if sentReq.Header.Get("x-api-key") != "my-secret-key" { + t.Errorf("missing or wrong x-api-key header") + } + if sentReq.Header.Get("anthropic-version") != "2023-06-01" { + t.Errorf("missing or wrong anthropic-version header") + } + if sentReq.Header.Get("Content-Type") != "application/json" { + t.Errorf("missing or wrong Content-Type") + } + bodyBytes, _ := io.ReadAll(sentReq.Body) + var sentBody CreateMessageRequest + if err := json.Unmarshal(bodyBytes, &sentBody); err != nil { + t.Errorf("failed to unmarshal sent body: %v", err) + } + if sentBody.Model != "claude-3-opus" { + t.Errorf("sent wrong model: %s", sentBody.Model) + } } func TestClient_ErrorHandling(t *testing.T) { @@ -329,16 +248,16 @@ func TestClient_ErrorHandling(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - mockHTTP := &mockHTTPContextForClient{ - RoundTripFunc: func(req *http.Request) *http.Response { - return &http.Response{ + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { StatusCode: tt.statusCode, Body: io.NopCloser(bytes.NewBufferString(tt.responseBody)), - } + }, }, } - client := &Client{http: mockHTTP, BaseURL: defaultBaseURL} + client := &Client{http: httpCtx, BaseURL: defaultBaseURL} // We use ListModels as a simple way to trigger execRequest _, err := client.ListModels() diff --git a/pkg/integrations/claude/text_prompt_test.go b/pkg/integrations/claude/text_prompt_test.go index d8263b22ea..bfa48e5c0c 100644 --- a/pkg/integrations/claude/text_prompt_test.go +++ b/pkg/integrations/claude/text_prompt_test.go @@ -9,45 +9,9 @@ import ( "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" ) -// --- Mocks --- - -// mockExecutionState implements core.ExecutionStateContext -type mockExecutionState struct { - EmittedChannel string - EmittedType string - EmittedPayloads []any - Finished bool - Failed bool - FailReason, FailMsg string -} - -func (m *mockExecutionState) IsFinished() bool { return m.Finished } -func (m *mockExecutionState) SetKV(key, value string) error { return nil } - -func (m *mockExecutionState) Emit(channel, payloadType string, payloads []any) error { - m.EmittedChannel = channel - m.EmittedType = payloadType - m.EmittedPayloads = payloads - return nil -} - -func (m *mockExecutionState) Pass() error { - m.Finished = true - return nil -} - -func (m *mockExecutionState) Fail(reason, message string) error { - m.Finished = true - m.Failed = true - m.FailReason = reason - m.FailMsg = message - return nil -} - -// --- Tests --- - func TestTextPrompt_Configuration(t *testing.T) { c := &TextPrompt{} config := c.Configuration() @@ -145,7 +109,8 @@ func TestTextPrompt_Execute(t *testing.T) { tests := []struct { name string config map[string]interface{} - mockResponse func(*http.Request) *http.Response + responseStatus int + responseBody string expectError bool expectEmission bool validatePayload func(*testing.T, MessagePayload) @@ -159,21 +124,8 @@ func TestTextPrompt_Execute(t *testing.T) { "systemMessage": "You are a bot", "temperature": 0.7, }, - mockResponse: func(req *http.Request) *http.Response { - // Verify request body - body, _ := io.ReadAll(req.Body) - var sent CreateMessageRequest - json.Unmarshal(body, &sent) - - if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { - return &http.Response{StatusCode: 400, Body: io.NopCloser(bytes.NewBufferString("bad request body"))} - } - - return &http.Response{ - StatusCode: 200, - Body: io.NopCloser(bytes.NewBufferString(validResponseJSON)), - } - }, + responseStatus: 200, + responseBody: validResponseJSON, expectError: false, expectEmission: true, validatePayload: func(t *testing.T, p MessagePayload) { @@ -201,32 +153,35 @@ func TestTextPrompt_Execute(t *testing.T) { "model": "claude-3-test", "prompt": "fail me", }, - mockResponse: func(req *http.Request) *http.Response { - return &http.Response{ - StatusCode: 500, - Body: io.NopCloser(bytes.NewBufferString(`{"error": {"message": "internal error"}}`)), - } - }, - expectError: true, + responseStatus: 500, + responseBody: `{"error": {"message": "internal error"}}`, + expectError: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - // Setup Mocks - mockState := &mockExecutionState{} - mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponse} - mockInt := &mockIntegrationContext{ - config: map[string][]byte{ - "apiKey": []byte("test-key"), - }, + execState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "test-key"}, } + var responses []*http.Response + if tt.responseStatus != 0 { + responses = []*http.Response{ + { + StatusCode: tt.responseStatus, + Body: io.NopCloser(bytes.NewBufferString(tt.responseBody)), + }, + } + } + httpCtx := &contexts.HTTPContext{Responses: responses} + ctx := core.ExecutionContext{ Configuration: tt.config, - ExecutionState: mockState, - HTTP: mockHTTP, - Integration: mockInt, + ExecutionState: execState, + HTTP: httpCtx, + Integration: integrationCtx, } err := c.Execute(ctx) @@ -244,19 +199,32 @@ func TestTextPrompt_Execute(t *testing.T) { } if tt.expectEmission { - if mockState.EmittedType != MessagePayloadType { - t.Errorf("expected emitted type %s, got %s", MessagePayloadType, mockState.EmittedType) + if execState.Type != MessagePayloadType { + t.Errorf("expected emitted type %s, got %s", MessagePayloadType, execState.Type) } - if len(mockState.EmittedPayloads) != 1 { - t.Errorf("expected 1 payload, got %d", len(mockState.EmittedPayloads)) + if len(execState.Payloads) != 1 { + t.Errorf("expected 1 payload, got %d", len(execState.Payloads)) } else if tt.validatePayload != nil { - // Convert payload back to struct for validation - // In real execution this is passed as any, here we cast it - payload, ok := mockState.EmittedPayloads[0].(MessagePayload) + wrapped, ok := execState.Payloads[0].(map[string]any) + if !ok { + t.Error("emitted payload wrapper is not map[string]any") + return + } + data, ok := wrapped["data"].(MessagePayload) if !ok { - t.Error("emitted payload is not MessagePayload") - } else { - tt.validatePayload(t, payload) + t.Error("emitted payload data is not MessagePayload") + return + } + tt.validatePayload(t, data) + } + // Verify request body was sent correctly (e.g. Success case) + if len(httpCtx.Requests) == 1 && tt.validatePayload != nil { + bodyBytes, _ := io.ReadAll(httpCtx.Requests[0].Body) + var sent CreateMessageRequest + if err := json.Unmarshal(bodyBytes, &sent); err != nil { + t.Errorf("failed to unmarshal sent body: %v", err) + } else if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { + t.Errorf("request body mismatch: model=%s max_tokens=%d system=%s", sent.Model, sent.MaxTokens, sent.System) } } } From c728e65623e4563bdf45c5e60630b556c7c753d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Sun, 8 Feb 2026 16:24:29 -0300 Subject: [PATCH 022/160] fix: regen api files for TS due to updated lib (#2970) Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/api-client/client.gen.ts | 21 +- web_src/src/api-client/client/client.gen.ts | 285 ++++ web_src/src/api-client/client/index.ts | 21 + web_src/src/api-client/client/types.gen.ts | 202 +++ web_src/src/api-client/client/utils.gen.ts | 289 ++++ web_src/src/api-client/core/auth.gen.ts | 41 + .../src/api-client/core/bodySerializer.gen.ts | 82 ++ web_src/src/api-client/core/params.gen.ts | 169 +++ .../src/api-client/core/pathSerializer.gen.ts | 167 +++ .../api-client/core/queryKeySerializer.gen.ts | 111 ++ .../api-client/core/serverSentEvents.gen.ts | 239 ++++ web_src/src/api-client/core/types.gen.ts | 86 ++ web_src/src/api-client/core/utils.gen.ts | 137 ++ web_src/src/api-client/index.ts | 646 ++++++++- web_src/src/api-client/sdk.gen.ts | 1201 +++++++---------- web_src/src/api-client/types.gen.ts | 14 +- 16 files changed, 3010 insertions(+), 701 deletions(-) create mode 100644 web_src/src/api-client/client/client.gen.ts create mode 100644 web_src/src/api-client/client/index.ts create mode 100644 web_src/src/api-client/client/types.gen.ts create mode 100644 web_src/src/api-client/client/utils.gen.ts create mode 100644 web_src/src/api-client/core/auth.gen.ts create mode 100644 web_src/src/api-client/core/bodySerializer.gen.ts create mode 100644 web_src/src/api-client/core/params.gen.ts create mode 100644 web_src/src/api-client/core/pathSerializer.gen.ts create mode 100644 web_src/src/api-client/core/queryKeySerializer.gen.ts create mode 100644 web_src/src/api-client/core/serverSentEvents.gen.ts create mode 100644 web_src/src/api-client/core/types.gen.ts create mode 100644 web_src/src/api-client/core/utils.gen.ts diff --git a/web_src/src/api-client/client.gen.ts b/web_src/src/api-client/client.gen.ts index cef44c97a7..0e152a7f07 100644 --- a/web_src/src/api-client/client.gen.ts +++ b/web_src/src/api-client/client.gen.ts @@ -1,12 +1,7 @@ // This file is auto-generated by @hey-api/openapi-ts -import type { ClientOptions } from "./types.gen"; -import { - type Config, - type ClientOptions as DefaultClientOptions, - createClient, - createConfig, -} from "@hey-api/client-fetch"; +import { type ClientOptions, type Config, createClient, createConfig } from "./client"; +import type { ClientOptions as ClientOptions2 } from "./types.gen"; /** * The `createClientConfig()` function will be called on client initialization @@ -16,12 +11,8 @@ import { * `setConfig()`. This is useful for example if you're using Next.js * to ensure your client always has the correct values. */ -export type CreateClientConfig = ( - override?: Config, -) => Config & T>; +export type CreateClientConfig = ( + override?: Config, +) => Config & T>; -export const client = createClient( - createConfig({ - throwOnError: true, - }), -); +export const client = createClient(createConfig({ throwOnError: true })); diff --git a/web_src/src/api-client/client/client.gen.ts b/web_src/src/api-client/client/client.gen.ts new file mode 100644 index 0000000000..ba705e043a --- /dev/null +++ b/web_src/src/api-client/client/client.gen.ts @@ -0,0 +1,285 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import { createSseClient } from "../core/serverSentEvents.gen"; +import type { HttpMethod } from "../core/types.gen"; +import { getValidRequestBody } from "../core/utils.gen"; +import type { Client, Config, RequestOptions, ResolvedRequestOptions } from "./types.gen"; +import { + buildUrl, + createConfig, + createInterceptors, + getParseAs, + mergeConfigs, + mergeHeaders, + setAuthParams, +} from "./utils.gen"; + +type ReqInit = Omit & { + body?: any; + headers: ReturnType; +}; + +export const createClient = (config: Config = {}): Client => { + let _config = mergeConfigs(createConfig(), config); + + const getConfig = (): Config => ({ ..._config }); + + const setConfig = (config: Config): Config => { + _config = mergeConfigs(_config, config); + return getConfig(); + }; + + const interceptors = createInterceptors(); + + const beforeRequest = async (options: RequestOptions) => { + const opts = { + ..._config, + ...options, + fetch: options.fetch ?? _config.fetch ?? globalThis.fetch, + headers: mergeHeaders(_config.headers, options.headers), + serializedBody: undefined, + }; + + if (opts.security) { + await setAuthParams({ + ...opts, + security: opts.security, + }); + } + + if (opts.requestValidator) { + await opts.requestValidator(opts); + } + + if (opts.body !== undefined && opts.bodySerializer) { + opts.serializedBody = opts.bodySerializer(opts.body); + } + + // remove Content-Type header if body is empty to avoid sending invalid requests + if (opts.body === undefined || opts.serializedBody === "") { + opts.headers.delete("Content-Type"); + } + + const url = buildUrl(opts); + + return { opts, url }; + }; + + const request: Client["request"] = async (options) => { + // @ts-expect-error + const { opts, url } = await beforeRequest(options); + const requestInit: ReqInit = { + redirect: "follow", + ...opts, + body: getValidRequestBody(opts), + }; + + let request = new Request(url, requestInit); + + for (const fn of interceptors.request.fns) { + if (fn) { + request = await fn(request, opts); + } + } + + // fetch must be assigned here, otherwise it would throw the error: + // TypeError: Failed to execute 'fetch' on 'Window': Illegal invocation + const _fetch = opts.fetch!; + let response: Response; + + try { + response = await _fetch(request); + } catch (error) { + // Handle fetch exceptions (AbortError, network errors, etc.) + let finalError = error; + + for (const fn of interceptors.error.fns) { + if (fn) { + finalError = (await fn(error, undefined as any, request, opts)) as unknown; + } + } + + finalError = finalError || ({} as unknown); + + if (opts.throwOnError) { + throw finalError; + } + + // Return error response + return opts.responseStyle === "data" + ? undefined + : { + error: finalError, + request, + response: undefined as any, + }; + } + + for (const fn of interceptors.response.fns) { + if (fn) { + response = await fn(response, request, opts); + } + } + + const result = { + request, + response, + }; + + if (response.ok) { + const parseAs = + (opts.parseAs === "auto" ? getParseAs(response.headers.get("Content-Type")) : opts.parseAs) ?? "json"; + + if (response.status === 204 || response.headers.get("Content-Length") === "0") { + let emptyData: any; + switch (parseAs) { + case "arrayBuffer": + case "blob": + case "text": + emptyData = await response[parseAs](); + break; + case "formData": + emptyData = new FormData(); + break; + case "stream": + emptyData = response.body; + break; + case "json": + default: + emptyData = {}; + break; + } + return opts.responseStyle === "data" + ? emptyData + : { + data: emptyData, + ...result, + }; + } + + let data: any; + switch (parseAs) { + case "arrayBuffer": + case "blob": + case "formData": + case "text": + data = await response[parseAs](); + break; + case "json": { + // Some servers return 200 with no Content-Length and empty body. + // response.json() would throw; read as text and parse if non-empty. + const text = await response.text(); + data = text ? JSON.parse(text) : {}; + break; + } + case "stream": + return opts.responseStyle === "data" + ? response.body + : { + data: response.body, + ...result, + }; + } + + if (parseAs === "json") { + if (opts.responseValidator) { + await opts.responseValidator(data); + } + + if (opts.responseTransformer) { + data = await opts.responseTransformer(data); + } + } + + return opts.responseStyle === "data" + ? data + : { + data, + ...result, + }; + } + + const textError = await response.text(); + let jsonError: unknown; + + try { + jsonError = JSON.parse(textError); + } catch { + // noop + } + + const error = jsonError ?? textError; + let finalError = error; + + for (const fn of interceptors.error.fns) { + if (fn) { + finalError = (await fn(error, response, request, opts)) as string; + } + } + + finalError = finalError || ({} as string); + + if (opts.throwOnError) { + throw finalError; + } + + // TODO: we probably want to return error and improve types + return opts.responseStyle === "data" + ? undefined + : { + error: finalError, + ...result, + }; + }; + + const makeMethodFn = (method: Uppercase) => (options: RequestOptions) => request({ ...options, method }); + + const makeSseFn = (method: Uppercase) => async (options: RequestOptions) => { + const { opts, url } = await beforeRequest(options); + return createSseClient({ + ...opts, + body: opts.body as BodyInit | null | undefined, + headers: opts.headers as unknown as Record, + method, + onRequest: async (url, init) => { + let request = new Request(url, init); + for (const fn of interceptors.request.fns) { + if (fn) { + request = await fn(request, opts); + } + } + return request; + }, + serializedBody: getValidRequestBody(opts) as BodyInit | null | undefined, + url, + }); + }; + + return { + buildUrl, + connect: makeMethodFn("CONNECT"), + delete: makeMethodFn("DELETE"), + get: makeMethodFn("GET"), + getConfig, + head: makeMethodFn("HEAD"), + interceptors, + options: makeMethodFn("OPTIONS"), + patch: makeMethodFn("PATCH"), + post: makeMethodFn("POST"), + put: makeMethodFn("PUT"), + request, + setConfig, + sse: { + connect: makeSseFn("CONNECT"), + delete: makeSseFn("DELETE"), + get: makeSseFn("GET"), + head: makeSseFn("HEAD"), + options: makeSseFn("OPTIONS"), + patch: makeSseFn("PATCH"), + post: makeSseFn("POST"), + put: makeSseFn("PUT"), + trace: makeSseFn("TRACE"), + }, + trace: makeMethodFn("TRACE"), + } as Client; +}; diff --git a/web_src/src/api-client/client/index.ts b/web_src/src/api-client/client/index.ts new file mode 100644 index 0000000000..1738032573 --- /dev/null +++ b/web_src/src/api-client/client/index.ts @@ -0,0 +1,21 @@ +// This file is auto-generated by @hey-api/openapi-ts + +export type { Auth } from "../core/auth.gen"; +export type { QuerySerializerOptions } from "../core/bodySerializer.gen"; +export { formDataBodySerializer, jsonBodySerializer, urlSearchParamsBodySerializer } from "../core/bodySerializer.gen"; +export { buildClientParams } from "../core/params.gen"; +export { serializeQueryKeyValue } from "../core/queryKeySerializer.gen"; +export { createClient } from "./client.gen"; +export type { + Client, + ClientOptions, + Config, + CreateClientConfig, + Options, + RequestOptions, + RequestResult, + ResolvedRequestOptions, + ResponseStyle, + TDataShape, +} from "./types.gen"; +export { createConfig, mergeHeaders } from "./utils.gen"; diff --git a/web_src/src/api-client/client/types.gen.ts b/web_src/src/api-client/client/types.gen.ts new file mode 100644 index 0000000000..998c311b9e --- /dev/null +++ b/web_src/src/api-client/client/types.gen.ts @@ -0,0 +1,202 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import type { Auth } from "../core/auth.gen"; +import type { ServerSentEventsOptions, ServerSentEventsResult } from "../core/serverSentEvents.gen"; +import type { Client as CoreClient, Config as CoreConfig } from "../core/types.gen"; +import type { Middleware } from "./utils.gen"; + +export type ResponseStyle = "data" | "fields"; + +export interface Config + extends Omit, + CoreConfig { + /** + * Base URL for all requests made by this client. + */ + baseUrl?: T["baseUrl"]; + /** + * Fetch API implementation. You can use this option to provide a custom + * fetch instance. + * + * @default globalThis.fetch + */ + fetch?: typeof fetch; + /** + * Please don't use the Fetch client for Next.js applications. The `next` + * options won't have any effect. + * + * Install {@link https://www.npmjs.com/package/@hey-api/client-next `@hey-api/client-next`} instead. + */ + next?: never; + /** + * Return the response data parsed in a specified format. By default, `auto` + * will infer the appropriate method from the `Content-Type` response header. + * You can override this behavior with any of the {@link Body} methods. + * Select `stream` if you don't want to parse response data at all. + * + * @default 'auto' + */ + parseAs?: "arrayBuffer" | "auto" | "blob" | "formData" | "json" | "stream" | "text"; + /** + * Should we return only data or multiple fields (data, error, response, etc.)? + * + * @default 'fields' + */ + responseStyle?: ResponseStyle; + /** + * Throw an error instead of returning it in the response? + * + * @default false + */ + throwOnError?: T["throwOnError"]; +} + +export interface RequestOptions< + TData = unknown, + TResponseStyle extends ResponseStyle = "fields", + ThrowOnError extends boolean = boolean, + Url extends string = string, +> extends Config<{ + responseStyle: TResponseStyle; + throwOnError: ThrowOnError; + }>, + Pick< + ServerSentEventsOptions, + "onSseError" | "onSseEvent" | "sseDefaultRetryDelay" | "sseMaxRetryAttempts" | "sseMaxRetryDelay" + > { + /** + * Any body that you want to add to your request. + * + * {@link https://developer.mozilla.org/docs/Web/API/fetch#body} + */ + body?: unknown; + path?: Record; + query?: Record; + /** + * Security mechanism(s) to use for the request. + */ + security?: ReadonlyArray; + url: Url; +} + +export interface ResolvedRequestOptions< + TResponseStyle extends ResponseStyle = "fields", + ThrowOnError extends boolean = boolean, + Url extends string = string, +> extends RequestOptions { + serializedBody?: string; +} + +export type RequestResult< + TData = unknown, + TError = unknown, + ThrowOnError extends boolean = boolean, + TResponseStyle extends ResponseStyle = "fields", +> = ThrowOnError extends true + ? Promise< + TResponseStyle extends "data" + ? TData extends Record + ? TData[keyof TData] + : TData + : { + data: TData extends Record ? TData[keyof TData] : TData; + request: Request; + response: Response; + } + > + : Promise< + TResponseStyle extends "data" + ? (TData extends Record ? TData[keyof TData] : TData) | undefined + : ( + | { + data: TData extends Record ? TData[keyof TData] : TData; + error: undefined; + } + | { + data: undefined; + error: TError extends Record ? TError[keyof TError] : TError; + } + ) & { + request: Request; + response: Response; + } + >; + +export interface ClientOptions { + baseUrl?: string; + responseStyle?: ResponseStyle; + throwOnError?: boolean; +} + +type MethodFn = < + TData = unknown, + TError = unknown, + ThrowOnError extends boolean = false, + TResponseStyle extends ResponseStyle = "fields", +>( + options: Omit, "method">, +) => RequestResult; + +type SseFn = < + TData = unknown, + TError = unknown, + ThrowOnError extends boolean = false, + TResponseStyle extends ResponseStyle = "fields", +>( + options: Omit, "method">, +) => Promise>; + +type RequestFn = < + TData = unknown, + TError = unknown, + ThrowOnError extends boolean = false, + TResponseStyle extends ResponseStyle = "fields", +>( + options: Omit, "method"> & + Pick>, "method">, +) => RequestResult; + +type BuildUrlFn = < + TData extends { + body?: unknown; + path?: Record; + query?: Record; + url: string; + }, +>( + options: TData & Options, +) => string; + +export type Client = CoreClient & { + interceptors: Middleware; +}; + +/** + * The `createClientConfig()` function will be called on client initialization + * and the returned object will become the client's initial configuration. + * + * You may want to initialize your client this way instead of calling + * `setConfig()`. This is useful for example if you're using Next.js + * to ensure your client always has the correct values. + */ +export type CreateClientConfig = ( + override?: Config, +) => Config & T>; + +export interface TDataShape { + body?: unknown; + headers?: unknown; + path?: unknown; + query?: unknown; + url: string; +} + +type OmitKeys = Pick>; + +export type Options< + TData extends TDataShape = TDataShape, + ThrowOnError extends boolean = boolean, + TResponse = unknown, + TResponseStyle extends ResponseStyle = "fields", +> = OmitKeys, "body" | "path" | "query" | "url"> & + ([TData] extends [never] ? unknown : Omit); diff --git a/web_src/src/api-client/client/utils.gen.ts b/web_src/src/api-client/client/utils.gen.ts new file mode 100644 index 0000000000..6ef3f58b64 --- /dev/null +++ b/web_src/src/api-client/client/utils.gen.ts @@ -0,0 +1,289 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import { getAuthToken } from "../core/auth.gen"; +import type { QuerySerializerOptions } from "../core/bodySerializer.gen"; +import { jsonBodySerializer } from "../core/bodySerializer.gen"; +import { serializeArrayParam, serializeObjectParam, serializePrimitiveParam } from "../core/pathSerializer.gen"; +import { getUrl } from "../core/utils.gen"; +import type { Client, ClientOptions, Config, RequestOptions } from "./types.gen"; + +export const createQuerySerializer = ({ parameters = {}, ...args }: QuerySerializerOptions = {}) => { + const querySerializer = (queryParams: T) => { + const search: string[] = []; + if (queryParams && typeof queryParams === "object") { + for (const name in queryParams) { + const value = queryParams[name]; + + if (value === undefined || value === null) { + continue; + } + + const options = parameters[name] || args; + + if (Array.isArray(value)) { + const serializedArray = serializeArrayParam({ + allowReserved: options.allowReserved, + explode: true, + name, + style: "form", + value, + ...options.array, + }); + if (serializedArray) search.push(serializedArray); + } else if (typeof value === "object") { + const serializedObject = serializeObjectParam({ + allowReserved: options.allowReserved, + explode: true, + name, + style: "deepObject", + value: value as Record, + ...options.object, + }); + if (serializedObject) search.push(serializedObject); + } else { + const serializedPrimitive = serializePrimitiveParam({ + allowReserved: options.allowReserved, + name, + value: value as string, + }); + if (serializedPrimitive) search.push(serializedPrimitive); + } + } + } + return search.join("&"); + }; + return querySerializer; +}; + +/** + * Infers parseAs value from provided Content-Type header. + */ +export const getParseAs = (contentType: string | null): Exclude => { + if (!contentType) { + // If no Content-Type header is provided, the best we can do is return the raw response body, + // which is effectively the same as the 'stream' option. + return "stream"; + } + + const cleanContent = contentType.split(";")[0]?.trim(); + + if (!cleanContent) { + return; + } + + if (cleanContent.startsWith("application/json") || cleanContent.endsWith("+json")) { + return "json"; + } + + if (cleanContent === "multipart/form-data") { + return "formData"; + } + + if (["application/", "audio/", "image/", "video/"].some((type) => cleanContent.startsWith(type))) { + return "blob"; + } + + if (cleanContent.startsWith("text/")) { + return "text"; + } + + return; +}; + +const checkForExistence = ( + options: Pick & { + headers: Headers; + }, + name?: string, +): boolean => { + if (!name) { + return false; + } + if (options.headers.has(name) || options.query?.[name] || options.headers.get("Cookie")?.includes(`${name}=`)) { + return true; + } + return false; +}; + +export const setAuthParams = async ({ + security, + ...options +}: Pick, "security"> & + Pick & { + headers: Headers; + }) => { + for (const auth of security) { + if (checkForExistence(options, auth.name)) { + continue; + } + + const token = await getAuthToken(auth, options.auth); + + if (!token) { + continue; + } + + const name = auth.name ?? "Authorization"; + + switch (auth.in) { + case "query": + if (!options.query) { + options.query = {}; + } + options.query[name] = token; + break; + case "cookie": + options.headers.append("Cookie", `${name}=${token}`); + break; + case "header": + default: + options.headers.set(name, token); + break; + } + } +}; + +export const buildUrl: Client["buildUrl"] = (options) => + getUrl({ + baseUrl: options.baseUrl as string, + path: options.path, + query: options.query, + querySerializer: + typeof options.querySerializer === "function" + ? options.querySerializer + : createQuerySerializer(options.querySerializer), + url: options.url, + }); + +export const mergeConfigs = (a: Config, b: Config): Config => { + const config = { ...a, ...b }; + if (config.baseUrl?.endsWith("/")) { + config.baseUrl = config.baseUrl.substring(0, config.baseUrl.length - 1); + } + config.headers = mergeHeaders(a.headers, b.headers); + return config; +}; + +const headersEntries = (headers: Headers): Array<[string, string]> => { + const entries: Array<[string, string]> = []; + headers.forEach((value, key) => { + entries.push([key, value]); + }); + return entries; +}; + +export const mergeHeaders = (...headers: Array["headers"] | undefined>): Headers => { + const mergedHeaders = new Headers(); + for (const header of headers) { + if (!header) { + continue; + } + + const iterator = header instanceof Headers ? headersEntries(header) : Object.entries(header); + + for (const [key, value] of iterator) { + if (value === null) { + mergedHeaders.delete(key); + } else if (Array.isArray(value)) { + for (const v of value) { + mergedHeaders.append(key, v as string); + } + } else if (value !== undefined) { + // assume object headers are meant to be JSON stringified, i.e. their + // content value in OpenAPI specification is 'application/json' + mergedHeaders.set(key, typeof value === "object" ? JSON.stringify(value) : (value as string)); + } + } + } + return mergedHeaders; +}; + +type ErrInterceptor = ( + error: Err, + response: Res, + request: Req, + options: Options, +) => Err | Promise; + +type ReqInterceptor = (request: Req, options: Options) => Req | Promise; + +type ResInterceptor = (response: Res, request: Req, options: Options) => Res | Promise; + +class Interceptors { + fns: Array = []; + + clear(): void { + this.fns = []; + } + + eject(id: number | Interceptor): void { + const index = this.getInterceptorIndex(id); + if (this.fns[index]) { + this.fns[index] = null; + } + } + + exists(id: number | Interceptor): boolean { + const index = this.getInterceptorIndex(id); + return Boolean(this.fns[index]); + } + + getInterceptorIndex(id: number | Interceptor): number { + if (typeof id === "number") { + return this.fns[id] ? id : -1; + } + return this.fns.indexOf(id); + } + + update(id: number | Interceptor, fn: Interceptor): number | Interceptor | false { + const index = this.getInterceptorIndex(id); + if (this.fns[index]) { + this.fns[index] = fn; + return id; + } + return false; + } + + use(fn: Interceptor): number { + this.fns.push(fn); + return this.fns.length - 1; + } +} + +export interface Middleware { + error: Interceptors>; + request: Interceptors>; + response: Interceptors>; +} + +export const createInterceptors = (): Middleware => ({ + error: new Interceptors>(), + request: new Interceptors>(), + response: new Interceptors>(), +}); + +const defaultQuerySerializer = createQuerySerializer({ + allowReserved: false, + array: { + explode: true, + style: "form", + }, + object: { + explode: true, + style: "deepObject", + }, +}); + +const defaultHeaders = { + "Content-Type": "application/json", +}; + +export const createConfig = ( + override: Config & T> = {}, +): Config & T> => ({ + ...jsonBodySerializer, + headers: defaultHeaders, + parseAs: "auto", + querySerializer: defaultQuerySerializer, + ...override, +}); diff --git a/web_src/src/api-client/core/auth.gen.ts b/web_src/src/api-client/core/auth.gen.ts new file mode 100644 index 0000000000..d392a5cb52 --- /dev/null +++ b/web_src/src/api-client/core/auth.gen.ts @@ -0,0 +1,41 @@ +// This file is auto-generated by @hey-api/openapi-ts + +export type AuthToken = string | undefined; + +export interface Auth { + /** + * Which part of the request do we use to send the auth? + * + * @default 'header' + */ + in?: "header" | "query" | "cookie"; + /** + * Header or query parameter name. + * + * @default 'Authorization' + */ + name?: string; + scheme?: "basic" | "bearer"; + type: "apiKey" | "http"; +} + +export const getAuthToken = async ( + auth: Auth, + callback: ((auth: Auth) => Promise | AuthToken) | AuthToken, +): Promise => { + const token = typeof callback === "function" ? await callback(auth) : callback; + + if (!token) { + return; + } + + if (auth.scheme === "bearer") { + return `Bearer ${token}`; + } + + if (auth.scheme === "basic") { + return `Basic ${btoa(token)}`; + } + + return token; +}; diff --git a/web_src/src/api-client/core/bodySerializer.gen.ts b/web_src/src/api-client/core/bodySerializer.gen.ts new file mode 100644 index 0000000000..24f175802f --- /dev/null +++ b/web_src/src/api-client/core/bodySerializer.gen.ts @@ -0,0 +1,82 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import type { ArrayStyle, ObjectStyle, SerializerOptions } from "./pathSerializer.gen"; + +export type QuerySerializer = (query: Record) => string; + +export type BodySerializer = (body: any) => any; + +type QuerySerializerOptionsObject = { + allowReserved?: boolean; + array?: Partial>; + object?: Partial>; +}; + +export type QuerySerializerOptions = QuerySerializerOptionsObject & { + /** + * Per-parameter serialization overrides. When provided, these settings + * override the global array/object settings for specific parameter names. + */ + parameters?: Record; +}; + +const serializeFormDataPair = (data: FormData, key: string, value: unknown): void => { + if (typeof value === "string" || value instanceof Blob) { + data.append(key, value); + } else if (value instanceof Date) { + data.append(key, value.toISOString()); + } else { + data.append(key, JSON.stringify(value)); + } +}; + +const serializeUrlSearchParamsPair = (data: URLSearchParams, key: string, value: unknown): void => { + if (typeof value === "string") { + data.append(key, value); + } else { + data.append(key, JSON.stringify(value)); + } +}; + +export const formDataBodySerializer = { + bodySerializer: | Array>>(body: T): FormData => { + const data = new FormData(); + + Object.entries(body).forEach(([key, value]) => { + if (value === undefined || value === null) { + return; + } + if (Array.isArray(value)) { + value.forEach((v) => serializeFormDataPair(data, key, v)); + } else { + serializeFormDataPair(data, key, value); + } + }); + + return data; + }, +}; + +export const jsonBodySerializer = { + bodySerializer: (body: T): string => + JSON.stringify(body, (_key, value) => (typeof value === "bigint" ? value.toString() : value)), +}; + +export const urlSearchParamsBodySerializer = { + bodySerializer: | Array>>(body: T): string => { + const data = new URLSearchParams(); + + Object.entries(body).forEach(([key, value]) => { + if (value === undefined || value === null) { + return; + } + if (Array.isArray(value)) { + value.forEach((v) => serializeUrlSearchParamsPair(data, key, v)); + } else { + serializeUrlSearchParamsPair(data, key, value); + } + }); + + return data.toString(); + }, +}; diff --git a/web_src/src/api-client/core/params.gen.ts b/web_src/src/api-client/core/params.gen.ts new file mode 100644 index 0000000000..4531287206 --- /dev/null +++ b/web_src/src/api-client/core/params.gen.ts @@ -0,0 +1,169 @@ +// This file is auto-generated by @hey-api/openapi-ts + +type Slot = "body" | "headers" | "path" | "query"; + +export type Field = + | { + in: Exclude; + /** + * Field name. This is the name we want the user to see and use. + */ + key: string; + /** + * Field mapped name. This is the name we want to use in the request. + * If omitted, we use the same value as `key`. + */ + map?: string; + } + | { + in: Extract; + /** + * Key isn't required for bodies. + */ + key?: string; + map?: string; + } + | { + /** + * Field name. This is the name we want the user to see and use. + */ + key: string; + /** + * Field mapped name. This is the name we want to use in the request. + * If `in` is omitted, `map` aliases `key` to the transport layer. + */ + map: Slot; + }; + +export interface Fields { + allowExtra?: Partial>; + args?: ReadonlyArray; +} + +export type FieldsConfig = ReadonlyArray; + +const extraPrefixesMap: Record = { + $body_: "body", + $headers_: "headers", + $path_: "path", + $query_: "query", +}; +const extraPrefixes = Object.entries(extraPrefixesMap); + +type KeyMap = Map< + string, + | { + in: Slot; + map?: string; + } + | { + in?: never; + map: Slot; + } +>; + +const buildKeyMap = (fields: FieldsConfig, map?: KeyMap): KeyMap => { + if (!map) { + map = new Map(); + } + + for (const config of fields) { + if ("in" in config) { + if (config.key) { + map.set(config.key, { + in: config.in, + map: config.map, + }); + } + } else if ("key" in config) { + map.set(config.key, { + map: config.map, + }); + } else if (config.args) { + buildKeyMap(config.args, map); + } + } + + return map; +}; + +interface Params { + body: unknown; + headers: Record; + path: Record; + query: Record; +} + +const stripEmptySlots = (params: Params) => { + for (const [slot, value] of Object.entries(params)) { + if (value && typeof value === "object" && !Object.keys(value).length) { + delete params[slot as Slot]; + } + } +}; + +export const buildClientParams = (args: ReadonlyArray, fields: FieldsConfig) => { + const params: Params = { + body: {}, + headers: {}, + path: {}, + query: {}, + }; + + const map = buildKeyMap(fields); + + let config: FieldsConfig[number] | undefined; + + for (const [index, arg] of args.entries()) { + if (fields[index]) { + config = fields[index]; + } + + if (!config) { + continue; + } + + if ("in" in config) { + if (config.key) { + const field = map.get(config.key)!; + const name = field.map || config.key; + if (field.in) { + (params[field.in] as Record)[name] = arg; + } + } else { + params.body = arg; + } + } else { + for (const [key, value] of Object.entries(arg ?? {})) { + const field = map.get(key); + + if (field) { + if (field.in) { + const name = field.map || key; + (params[field.in] as Record)[name] = value; + } else { + params[field.map] = value; + } + } else { + const extra = extraPrefixes.find(([prefix]) => key.startsWith(prefix)); + + if (extra) { + const [prefix, slot] = extra; + (params[slot] as Record)[key.slice(prefix.length)] = value; + } else if ("allowExtra" in config && config.allowExtra) { + for (const [slot, allowed] of Object.entries(config.allowExtra)) { + if (allowed) { + (params[slot as Slot] as Record)[key] = value; + break; + } + } + } + } + } + } + } + + stripEmptySlots(params); + + return params; +}; diff --git a/web_src/src/api-client/core/pathSerializer.gen.ts b/web_src/src/api-client/core/pathSerializer.gen.ts new file mode 100644 index 0000000000..aa4d3d918e --- /dev/null +++ b/web_src/src/api-client/core/pathSerializer.gen.ts @@ -0,0 +1,167 @@ +// This file is auto-generated by @hey-api/openapi-ts + +interface SerializeOptions extends SerializePrimitiveOptions, SerializerOptions {} + +interface SerializePrimitiveOptions { + allowReserved?: boolean; + name: string; +} + +export interface SerializerOptions { + /** + * @default true + */ + explode: boolean; + style: T; +} + +export type ArrayStyle = "form" | "spaceDelimited" | "pipeDelimited"; +export type ArraySeparatorStyle = ArrayStyle | MatrixStyle; +type MatrixStyle = "label" | "matrix" | "simple"; +export type ObjectStyle = "form" | "deepObject"; +type ObjectSeparatorStyle = ObjectStyle | MatrixStyle; + +interface SerializePrimitiveParam extends SerializePrimitiveOptions { + value: string; +} + +export const separatorArrayExplode = (style: ArraySeparatorStyle) => { + switch (style) { + case "label": + return "."; + case "matrix": + return ";"; + case "simple": + return ","; + default: + return "&"; + } +}; + +export const separatorArrayNoExplode = (style: ArraySeparatorStyle) => { + switch (style) { + case "form": + return ","; + case "pipeDelimited": + return "|"; + case "spaceDelimited": + return "%20"; + default: + return ","; + } +}; + +export const separatorObjectExplode = (style: ObjectSeparatorStyle) => { + switch (style) { + case "label": + return "."; + case "matrix": + return ";"; + case "simple": + return ","; + default: + return "&"; + } +}; + +export const serializeArrayParam = ({ + allowReserved, + explode, + name, + style, + value, +}: SerializeOptions & { + value: unknown[]; +}) => { + if (!explode) { + const joinedValues = (allowReserved ? value : value.map((v) => encodeURIComponent(v as string))).join( + separatorArrayNoExplode(style), + ); + switch (style) { + case "label": + return `.${joinedValues}`; + case "matrix": + return `;${name}=${joinedValues}`; + case "simple": + return joinedValues; + default: + return `${name}=${joinedValues}`; + } + } + + const separator = separatorArrayExplode(style); + const joinedValues = value + .map((v) => { + if (style === "label" || style === "simple") { + return allowReserved ? v : encodeURIComponent(v as string); + } + + return serializePrimitiveParam({ + allowReserved, + name, + value: v as string, + }); + }) + .join(separator); + return style === "label" || style === "matrix" ? separator + joinedValues : joinedValues; +}; + +export const serializePrimitiveParam = ({ allowReserved, name, value }: SerializePrimitiveParam) => { + if (value === undefined || value === null) { + return ""; + } + + if (typeof value === "object") { + throw new Error( + "Deeply-nested arrays/objects aren’t supported. Provide your own `querySerializer()` to handle these.", + ); + } + + return `${name}=${allowReserved ? value : encodeURIComponent(value)}`; +}; + +export const serializeObjectParam = ({ + allowReserved, + explode, + name, + style, + value, + valueOnly, +}: SerializeOptions & { + value: Record | Date; + valueOnly?: boolean; +}) => { + if (value instanceof Date) { + return valueOnly ? value.toISOString() : `${name}=${value.toISOString()}`; + } + + if (style !== "deepObject" && !explode) { + let values: string[] = []; + Object.entries(value).forEach(([key, v]) => { + values = [...values, key, allowReserved ? (v as string) : encodeURIComponent(v as string)]; + }); + const joinedValues = values.join(","); + switch (style) { + case "form": + return `${name}=${joinedValues}`; + case "label": + return `.${joinedValues}`; + case "matrix": + return `;${name}=${joinedValues}`; + default: + return joinedValues; + } + } + + const separator = separatorObjectExplode(style); + const joinedValues = Object.entries(value) + .map(([key, v]) => + serializePrimitiveParam({ + allowReserved, + name: style === "deepObject" ? `${name}[${key}]` : key, + value: v as string, + }), + ) + .join(separator); + return style === "label" || style === "matrix" ? separator + joinedValues : joinedValues; +}; diff --git a/web_src/src/api-client/core/queryKeySerializer.gen.ts b/web_src/src/api-client/core/queryKeySerializer.gen.ts new file mode 100644 index 0000000000..58d7eb53fc --- /dev/null +++ b/web_src/src/api-client/core/queryKeySerializer.gen.ts @@ -0,0 +1,111 @@ +// This file is auto-generated by @hey-api/openapi-ts + +/** + * JSON-friendly union that mirrors what Pinia Colada can hash. + */ +export type JsonValue = null | string | number | boolean | JsonValue[] | { [key: string]: JsonValue }; + +/** + * Replacer that converts non-JSON values (bigint, Date, etc.) to safe substitutes. + */ +export const queryKeyJsonReplacer = (_key: string, value: unknown) => { + if (value === undefined || typeof value === "function" || typeof value === "symbol") { + return undefined; + } + if (typeof value === "bigint") { + return value.toString(); + } + if (value instanceof Date) { + return value.toISOString(); + } + return value; +}; + +/** + * Safely stringifies a value and parses it back into a JsonValue. + */ +export const stringifyToJsonValue = (input: unknown): JsonValue | undefined => { + try { + const json = JSON.stringify(input, queryKeyJsonReplacer); + if (json === undefined) { + return undefined; + } + return JSON.parse(json) as JsonValue; + } catch { + return undefined; + } +}; + +/** + * Detects plain objects (including objects with a null prototype). + */ +const isPlainObject = (value: unknown): value is Record => { + if (value === null || typeof value !== "object") { + return false; + } + const prototype = Object.getPrototypeOf(value as object); + return prototype === Object.prototype || prototype === null; +}; + +/** + * Turns URLSearchParams into a sorted JSON object for deterministic keys. + */ +const serializeSearchParams = (params: URLSearchParams): JsonValue => { + const entries = Array.from(params.entries()).sort(([a], [b]) => a.localeCompare(b)); + const result: Record = {}; + + for (const [key, value] of entries) { + const existing = result[key]; + if (existing === undefined) { + result[key] = value; + continue; + } + + if (Array.isArray(existing)) { + (existing as string[]).push(value); + } else { + result[key] = [existing, value]; + } + } + + return result; +}; + +/** + * Normalizes any accepted value into a JSON-friendly shape for query keys. + */ +export const serializeQueryKeyValue = (value: unknown): JsonValue | undefined => { + if (value === null) { + return null; + } + + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + return value; + } + + if (value === undefined || typeof value === "function" || typeof value === "symbol") { + return undefined; + } + + if (typeof value === "bigint") { + return value.toString(); + } + + if (value instanceof Date) { + return value.toISOString(); + } + + if (Array.isArray(value)) { + return stringifyToJsonValue(value); + } + + if (typeof URLSearchParams !== "undefined" && value instanceof URLSearchParams) { + return serializeSearchParams(value); + } + + if (isPlainObject(value)) { + return stringifyToJsonValue(value); + } + + return undefined; +}; diff --git a/web_src/src/api-client/core/serverSentEvents.gen.ts b/web_src/src/api-client/core/serverSentEvents.gen.ts new file mode 100644 index 0000000000..adc5546d96 --- /dev/null +++ b/web_src/src/api-client/core/serverSentEvents.gen.ts @@ -0,0 +1,239 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import type { Config } from "./types.gen"; + +export type ServerSentEventsOptions = Omit & + Pick & { + /** + * Fetch API implementation. You can use this option to provide a custom + * fetch instance. + * + * @default globalThis.fetch + */ + fetch?: typeof fetch; + /** + * Implementing clients can call request interceptors inside this hook. + */ + onRequest?: (url: string, init: RequestInit) => Promise; + /** + * Callback invoked when a network or parsing error occurs during streaming. + * + * This option applies only if the endpoint returns a stream of events. + * + * @param error The error that occurred. + */ + onSseError?: (error: unknown) => void; + /** + * Callback invoked when an event is streamed from the server. + * + * This option applies only if the endpoint returns a stream of events. + * + * @param event Event streamed from the server. + * @returns Nothing (void). + */ + onSseEvent?: (event: StreamEvent) => void; + serializedBody?: RequestInit["body"]; + /** + * Default retry delay in milliseconds. + * + * This option applies only if the endpoint returns a stream of events. + * + * @default 3000 + */ + sseDefaultRetryDelay?: number; + /** + * Maximum number of retry attempts before giving up. + */ + sseMaxRetryAttempts?: number; + /** + * Maximum retry delay in milliseconds. + * + * Applies only when exponential backoff is used. + * + * This option applies only if the endpoint returns a stream of events. + * + * @default 30000 + */ + sseMaxRetryDelay?: number; + /** + * Optional sleep function for retry backoff. + * + * Defaults to using `setTimeout`. + */ + sseSleepFn?: (ms: number) => Promise; + url: string; + }; + +export interface StreamEvent { + data: TData; + event?: string; + id?: string; + retry?: number; +} + +export type ServerSentEventsResult = { + stream: AsyncGenerator ? TData[keyof TData] : TData, TReturn, TNext>; +}; + +export const createSseClient = ({ + onRequest, + onSseError, + onSseEvent, + responseTransformer, + responseValidator, + sseDefaultRetryDelay, + sseMaxRetryAttempts, + sseMaxRetryDelay, + sseSleepFn, + url, + ...options +}: ServerSentEventsOptions): ServerSentEventsResult => { + let lastEventId: string | undefined; + + const sleep = sseSleepFn ?? ((ms: number) => new Promise((resolve) => setTimeout(resolve, ms))); + + const createStream = async function* () { + let retryDelay: number = sseDefaultRetryDelay ?? 3000; + let attempt = 0; + const signal = options.signal ?? new AbortController().signal; + + while (true) { + if (signal.aborted) break; + + attempt++; + + const headers = + options.headers instanceof Headers + ? options.headers + : new Headers(options.headers as Record | undefined); + + if (lastEventId !== undefined) { + headers.set("Last-Event-ID", lastEventId); + } + + try { + const requestInit: RequestInit = { + redirect: "follow", + ...options, + body: options.serializedBody, + headers, + signal, + }; + let request = new Request(url, requestInit); + if (onRequest) { + request = await onRequest(url, requestInit); + } + // fetch must be assigned here, otherwise it would throw the error: + // TypeError: Failed to execute 'fetch' on 'Window': Illegal invocation + const _fetch = options.fetch ?? globalThis.fetch; + const response = await _fetch(request); + + if (!response.ok) throw new Error(`SSE failed: ${response.status} ${response.statusText}`); + + if (!response.body) throw new Error("No body in SSE response"); + + const reader = response.body.pipeThrough(new TextDecoderStream()).getReader(); + + let buffer = ""; + + const abortHandler = () => { + try { + reader.cancel(); + } catch { + // noop + } + }; + + signal.addEventListener("abort", abortHandler); + + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + buffer += value; + // Normalize line endings: CRLF -> LF, then CR -> LF + buffer = buffer.replace(/\r\n/g, "\n").replace(/\r/g, "\n"); + + const chunks = buffer.split("\n\n"); + buffer = chunks.pop() ?? ""; + + for (const chunk of chunks) { + const lines = chunk.split("\n"); + const dataLines: Array = []; + let eventName: string | undefined; + + for (const line of lines) { + if (line.startsWith("data:")) { + dataLines.push(line.replace(/^data:\s*/, "")); + } else if (line.startsWith("event:")) { + eventName = line.replace(/^event:\s*/, ""); + } else if (line.startsWith("id:")) { + lastEventId = line.replace(/^id:\s*/, ""); + } else if (line.startsWith("retry:")) { + const parsed = Number.parseInt(line.replace(/^retry:\s*/, ""), 10); + if (!Number.isNaN(parsed)) { + retryDelay = parsed; + } + } + } + + let data: unknown; + let parsedJson = false; + + if (dataLines.length) { + const rawData = dataLines.join("\n"); + try { + data = JSON.parse(rawData); + parsedJson = true; + } catch { + data = rawData; + } + } + + if (parsedJson) { + if (responseValidator) { + await responseValidator(data); + } + + if (responseTransformer) { + data = await responseTransformer(data); + } + } + + onSseEvent?.({ + data, + event: eventName, + id: lastEventId, + retry: retryDelay, + }); + + if (dataLines.length) { + yield data as any; + } + } + } + } finally { + signal.removeEventListener("abort", abortHandler); + reader.releaseLock(); + } + + break; // exit loop on normal completion + } catch (error) { + // connection failed or aborted; retry after delay + onSseError?.(error); + + if (sseMaxRetryAttempts !== undefined && attempt >= sseMaxRetryAttempts) { + break; // stop after firing error + } + + // exponential backoff: double retry each attempt, cap at 30s + const backoff = Math.min(retryDelay * 2 ** (attempt - 1), sseMaxRetryDelay ?? 30000); + await sleep(backoff); + } + } + }; + + const stream = createStream(); + + return { stream }; +}; diff --git a/web_src/src/api-client/core/types.gen.ts b/web_src/src/api-client/core/types.gen.ts new file mode 100644 index 0000000000..7047c8161e --- /dev/null +++ b/web_src/src/api-client/core/types.gen.ts @@ -0,0 +1,86 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import type { Auth, AuthToken } from "./auth.gen"; +import type { BodySerializer, QuerySerializer, QuerySerializerOptions } from "./bodySerializer.gen"; + +export type HttpMethod = "connect" | "delete" | "get" | "head" | "options" | "patch" | "post" | "put" | "trace"; + +export type Client = { + /** + * Returns the final request URL. + */ + buildUrl: BuildUrlFn; + getConfig: () => Config; + request: RequestFn; + setConfig: (config: Config) => Config; +} & { + [K in HttpMethod]: MethodFn; +} & ([SseFn] extends [never] ? { sse?: never } : { sse: { [K in HttpMethod]: SseFn } }); + +export interface Config { + /** + * Auth token or a function returning auth token. The resolved value will be + * added to the request payload as defined by its `security` array. + */ + auth?: ((auth: Auth) => Promise | AuthToken) | AuthToken; + /** + * A function for serializing request body parameter. By default, + * {@link JSON.stringify()} will be used. + */ + bodySerializer?: BodySerializer | null; + /** + * An object containing any HTTP headers that you want to pre-populate your + * `Headers` object with. + * + * {@link https://developer.mozilla.org/docs/Web/API/Headers/Headers#init See more} + */ + headers?: + | RequestInit["headers"] + | Record; + /** + * The request method. + * + * {@link https://developer.mozilla.org/docs/Web/API/fetch#method See more} + */ + method?: Uppercase; + /** + * A function for serializing request query parameters. By default, arrays + * will be exploded in form style, objects will be exploded in deepObject + * style, and reserved characters are percent-encoded. + * + * This method will have no effect if the native `paramsSerializer()` Axios + * API function is used. + * + * {@link https://swagger.io/docs/specification/serialization/#query View examples} + */ + querySerializer?: QuerySerializer | QuerySerializerOptions; + /** + * A function validating request data. This is useful if you want to ensure + * the request conforms to the desired shape, so it can be safely sent to + * the server. + */ + requestValidator?: (data: unknown) => Promise; + /** + * A function transforming response data before it's returned. This is useful + * for post-processing data, e.g. converting ISO strings into Date objects. + */ + responseTransformer?: (data: unknown) => Promise; + /** + * A function validating response data. This is useful if you want to ensure + * the response conforms to the desired shape, so it can be safely passed to + * the transformers and returned to the user. + */ + responseValidator?: (data: unknown) => Promise; +} + +type IsExactlyNeverOrNeverUndefined = [T] extends [never] + ? true + : [T] extends [never | undefined] + ? [undefined] extends [T] + ? false + : true + : false; + +export type OmitNever> = { + [K in keyof T as IsExactlyNeverOrNeverUndefined extends true ? never : K]: T[K]; +}; diff --git a/web_src/src/api-client/core/utils.gen.ts b/web_src/src/api-client/core/utils.gen.ts new file mode 100644 index 0000000000..f210e72a46 --- /dev/null +++ b/web_src/src/api-client/core/utils.gen.ts @@ -0,0 +1,137 @@ +// This file is auto-generated by @hey-api/openapi-ts + +import type { BodySerializer, QuerySerializer } from "./bodySerializer.gen"; +import { + type ArraySeparatorStyle, + serializeArrayParam, + serializeObjectParam, + serializePrimitiveParam, +} from "./pathSerializer.gen"; + +export interface PathSerializer { + path: Record; + url: string; +} + +export const PATH_PARAM_RE = /\{[^{}]+\}/g; + +export const defaultPathSerializer = ({ path, url: _url }: PathSerializer) => { + let url = _url; + const matches = _url.match(PATH_PARAM_RE); + if (matches) { + for (const match of matches) { + let explode = false; + let name = match.substring(1, match.length - 1); + let style: ArraySeparatorStyle = "simple"; + + if (name.endsWith("*")) { + explode = true; + name = name.substring(0, name.length - 1); + } + + if (name.startsWith(".")) { + name = name.substring(1); + style = "label"; + } else if (name.startsWith(";")) { + name = name.substring(1); + style = "matrix"; + } + + const value = path[name]; + + if (value === undefined || value === null) { + continue; + } + + if (Array.isArray(value)) { + url = url.replace(match, serializeArrayParam({ explode, name, style, value })); + continue; + } + + if (typeof value === "object") { + url = url.replace( + match, + serializeObjectParam({ + explode, + name, + style, + value: value as Record, + valueOnly: true, + }), + ); + continue; + } + + if (style === "matrix") { + url = url.replace( + match, + `;${serializePrimitiveParam({ + name, + value: value as string, + })}`, + ); + continue; + } + + const replaceValue = encodeURIComponent(style === "label" ? `.${value as string}` : (value as string)); + url = url.replace(match, replaceValue); + } + } + return url; +}; + +export const getUrl = ({ + baseUrl, + path, + query, + querySerializer, + url: _url, +}: { + baseUrl?: string; + path?: Record; + query?: Record; + querySerializer: QuerySerializer; + url: string; +}) => { + const pathUrl = _url.startsWith("/") ? _url : `/${_url}`; + let url = (baseUrl ?? "") + pathUrl; + if (path) { + url = defaultPathSerializer({ path, url }); + } + let search = query ? querySerializer(query) : ""; + if (search.startsWith("?")) { + search = search.substring(1); + } + if (search) { + url += `?${search}`; + } + return url; +}; + +export function getValidRequestBody(options: { + body?: unknown; + bodySerializer?: BodySerializer | null; + serializedBody?: unknown; +}) { + const hasBody = options.body !== undefined; + const isSerializedBody = hasBody && options.bodySerializer; + + if (isSerializedBody) { + if ("serializedBody" in options) { + const hasSerializedBody = options.serializedBody !== undefined && options.serializedBody !== ""; + + return hasSerializedBody ? options.serializedBody : null; + } + + // not all clients implement a serializedBody property (i.e. client-axios) + return options.body !== "" ? options.body : null; + } + + // plain/text body + if (hasBody) { + return options.body; + } + + // no body was provided + return undefined; +} diff --git a/web_src/src/api-client/index.ts b/web_src/src/api-client/index.ts index da87079367..839884e0e4 100644 --- a/web_src/src/api-client/index.ts +++ b/web_src/src/api-client/index.ts @@ -1,3 +1,645 @@ // This file is auto-generated by @hey-api/openapi-ts -export * from "./types.gen"; -export * from "./sdk.gen"; + +export { + blueprintsCreateBlueprint, + blueprintsDeleteBlueprint, + blueprintsDescribeBlueprint, + blueprintsListBlueprints, + blueprintsUpdateBlueprint, + canvasesCancelExecution, + canvasesCreateCanvas, + canvasesDeleteCanvas, + canvasesDeleteNodeQueueItem, + canvasesDescribeCanvas, + canvasesEmitNodeEvent, + canvasesInvokeNodeExecutionAction, + canvasesInvokeNodeTriggerAction, + canvasesListCanvases, + canvasesListCanvasEvents, + canvasesListChildExecutions, + canvasesListEventExecutions, + canvasesListNodeEvents, + canvasesListNodeExecutions, + canvasesListNodeQueueItems, + canvasesResolveExecutionErrors, + canvasesUpdateCanvas, + canvasesUpdateNodePause, + componentsDescribeComponent, + componentsListComponentActions, + componentsListComponents, + groupsAddUserToGroup, + groupsCreateGroup, + groupsDeleteGroup, + groupsDescribeGroup, + groupsListGroups, + groupsListGroupUsers, + groupsRemoveUserFromGroup, + groupsUpdateGroup, + integrationsListIntegrations, + meMe, + meRegenerateToken, + type Options, + organizationsAcceptInviteLink, + organizationsCreateIntegration, + organizationsCreateInvitation, + organizationsDeleteIntegration, + organizationsDeleteOrganization, + organizationsDescribeIntegration, + organizationsDescribeOrganization, + organizationsGetInviteLink, + organizationsListIntegrationResources, + organizationsListIntegrations, + organizationsListInvitations, + organizationsRemoveInvitation, + organizationsRemoveUser, + organizationsResetInviteLink, + organizationsUpdateIntegration, + organizationsUpdateInviteLink, + organizationsUpdateOrganization, + rolesAssignRole, + rolesCreateRole, + rolesDeleteRole, + rolesDescribeRole, + rolesListRoles, + rolesUpdateRole, + secretsCreateSecret, + secretsDeleteSecret, + secretsDeleteSecretKey, + secretsDescribeSecret, + secretsListSecrets, + secretsSetSecretKey, + secretsUpdateSecret, + secretsUpdateSecretName, + triggersDescribeTrigger, + triggersListTriggers, + usersListUserPermissions, + usersListUserRoles, + usersListUsers, + widgetsDescribeWidget, + widgetsListWidgets, +} from "./sdk.gen"; +export type { + AuthorizationDomainType, + AuthorizationPermission, + BlueprintsBlueprint, + BlueprintsCreateBlueprintData, + BlueprintsCreateBlueprintError, + BlueprintsCreateBlueprintErrors, + BlueprintsCreateBlueprintRequest, + BlueprintsCreateBlueprintResponse, + BlueprintsCreateBlueprintResponse2, + BlueprintsCreateBlueprintResponses, + BlueprintsDeleteBlueprintData, + BlueprintsDeleteBlueprintError, + BlueprintsDeleteBlueprintErrors, + BlueprintsDeleteBlueprintResponse, + BlueprintsDeleteBlueprintResponse2, + BlueprintsDeleteBlueprintResponses, + BlueprintsDescribeBlueprintData, + BlueprintsDescribeBlueprintError, + BlueprintsDescribeBlueprintErrors, + BlueprintsDescribeBlueprintResponse, + BlueprintsDescribeBlueprintResponse2, + BlueprintsDescribeBlueprintResponses, + BlueprintsListBlueprintsData, + BlueprintsListBlueprintsError, + BlueprintsListBlueprintsErrors, + BlueprintsListBlueprintsResponse, + BlueprintsListBlueprintsResponse2, + BlueprintsListBlueprintsResponses, + BlueprintsUpdateBlueprintBody, + BlueprintsUpdateBlueprintData, + BlueprintsUpdateBlueprintError, + BlueprintsUpdateBlueprintErrors, + BlueprintsUpdateBlueprintResponse, + BlueprintsUpdateBlueprintResponse2, + BlueprintsUpdateBlueprintResponses, + CanvasesCancelExecutionBody, + CanvasesCancelExecutionData, + CanvasesCancelExecutionError, + CanvasesCancelExecutionErrors, + CanvasesCancelExecutionResponse, + CanvasesCancelExecutionResponse2, + CanvasesCancelExecutionResponses, + CanvasesCanvas, + CanvasesCanvasEvent, + CanvasesCanvasEventWithExecutions, + CanvasesCanvasMetadata, + CanvasesCanvasNodeExecution, + CanvasesCanvasNodeQueueItem, + CanvasesCanvasSpec, + CanvasesCanvasStatus, + CanvasesCreateCanvasData, + CanvasesCreateCanvasError, + CanvasesCreateCanvasErrors, + CanvasesCreateCanvasRequest, + CanvasesCreateCanvasResponse, + CanvasesCreateCanvasResponse2, + CanvasesCreateCanvasResponses, + CanvasesDeleteCanvasData, + CanvasesDeleteCanvasError, + CanvasesDeleteCanvasErrors, + CanvasesDeleteCanvasResponse, + CanvasesDeleteCanvasResponse2, + CanvasesDeleteCanvasResponses, + CanvasesDeleteNodeQueueItemData, + CanvasesDeleteNodeQueueItemError, + CanvasesDeleteNodeQueueItemErrors, + CanvasesDeleteNodeQueueItemResponse, + CanvasesDeleteNodeQueueItemResponse2, + CanvasesDeleteNodeQueueItemResponses, + CanvasesDescribeCanvasData, + CanvasesDescribeCanvasError, + CanvasesDescribeCanvasErrors, + CanvasesDescribeCanvasResponse, + CanvasesDescribeCanvasResponse2, + CanvasesDescribeCanvasResponses, + CanvasesEmitNodeEventBody, + CanvasesEmitNodeEventData, + CanvasesEmitNodeEventError, + CanvasesEmitNodeEventErrors, + CanvasesEmitNodeEventResponse, + CanvasesEmitNodeEventResponse2, + CanvasesEmitNodeEventResponses, + CanvasesInvokeNodeExecutionActionBody, + CanvasesInvokeNodeExecutionActionData, + CanvasesInvokeNodeExecutionActionError, + CanvasesInvokeNodeExecutionActionErrors, + CanvasesInvokeNodeExecutionActionResponse, + CanvasesInvokeNodeExecutionActionResponse2, + CanvasesInvokeNodeExecutionActionResponses, + CanvasesInvokeNodeTriggerActionBody, + CanvasesInvokeNodeTriggerActionData, + CanvasesInvokeNodeTriggerActionError, + CanvasesInvokeNodeTriggerActionErrors, + CanvasesInvokeNodeTriggerActionResponse, + CanvasesInvokeNodeTriggerActionResponse2, + CanvasesInvokeNodeTriggerActionResponses, + CanvasesListCanvasesData, + CanvasesListCanvasesError, + CanvasesListCanvasesErrors, + CanvasesListCanvasesResponse, + CanvasesListCanvasesResponse2, + CanvasesListCanvasesResponses, + CanvasesListCanvasEventsData, + CanvasesListCanvasEventsError, + CanvasesListCanvasEventsErrors, + CanvasesListCanvasEventsResponse, + CanvasesListCanvasEventsResponse2, + CanvasesListCanvasEventsResponses, + CanvasesListChildExecutionsBody, + CanvasesListChildExecutionsData, + CanvasesListChildExecutionsError, + CanvasesListChildExecutionsErrors, + CanvasesListChildExecutionsResponse, + CanvasesListChildExecutionsResponse2, + CanvasesListChildExecutionsResponses, + CanvasesListEventExecutionsData, + CanvasesListEventExecutionsError, + CanvasesListEventExecutionsErrors, + CanvasesListEventExecutionsResponse, + CanvasesListEventExecutionsResponse2, + CanvasesListEventExecutionsResponses, + CanvasesListNodeEventsData, + CanvasesListNodeEventsError, + CanvasesListNodeEventsErrors, + CanvasesListNodeEventsResponse, + CanvasesListNodeEventsResponse2, + CanvasesListNodeEventsResponses, + CanvasesListNodeExecutionsData, + CanvasesListNodeExecutionsError, + CanvasesListNodeExecutionsErrors, + CanvasesListNodeExecutionsResponse, + CanvasesListNodeExecutionsResponse2, + CanvasesListNodeExecutionsResponses, + CanvasesListNodeQueueItemsData, + CanvasesListNodeQueueItemsError, + CanvasesListNodeQueueItemsErrors, + CanvasesListNodeQueueItemsResponse, + CanvasesListNodeQueueItemsResponse2, + CanvasesListNodeQueueItemsResponses, + CanvasesResolveExecutionErrorsBody, + CanvasesResolveExecutionErrorsData, + CanvasesResolveExecutionErrorsError, + CanvasesResolveExecutionErrorsErrors, + CanvasesResolveExecutionErrorsResponse, + CanvasesResolveExecutionErrorsResponse2, + CanvasesResolveExecutionErrorsResponses, + CanvasesUpdateCanvasBody, + CanvasesUpdateCanvasData, + CanvasesUpdateCanvasError, + CanvasesUpdateCanvasErrors, + CanvasesUpdateCanvasResponse, + CanvasesUpdateCanvasResponse2, + CanvasesUpdateCanvasResponses, + CanvasesUpdateNodePauseBody, + CanvasesUpdateNodePauseData, + CanvasesUpdateNodePauseError, + CanvasesUpdateNodePauseErrors, + CanvasesUpdateNodePauseResponse, + CanvasesUpdateNodePauseResponse2, + CanvasesUpdateNodePauseResponses, + CanvasNodeExecutionResult, + CanvasNodeExecutionResultReason, + CanvasNodeExecutionState, + ClientOptions, + ComponentsComponent, + ComponentsComponentAction, + ComponentsDescribeComponentData, + ComponentsDescribeComponentError, + ComponentsDescribeComponentErrors, + ComponentsDescribeComponentResponse, + ComponentsDescribeComponentResponse2, + ComponentsDescribeComponentResponses, + ComponentsEdge, + ComponentsIntegrationRef, + ComponentsListComponentActionsData, + ComponentsListComponentActionsError, + ComponentsListComponentActionsErrors, + ComponentsListComponentActionsResponse, + ComponentsListComponentActionsResponse2, + ComponentsListComponentActionsResponses, + ComponentsListComponentsData, + ComponentsListComponentsError, + ComponentsListComponentsErrors, + ComponentsListComponentsResponse, + ComponentsListComponentsResponse2, + ComponentsListComponentsResponses, + ComponentsNode, + ComponentsNodeType, + ComponentsPosition, + ConfigurationAnyPredicateListTypeOptions, + ConfigurationDateTimeTypeOptions, + ConfigurationDateTypeOptions, + ConfigurationExpressionTypeOptions, + ConfigurationField, + ConfigurationListItemDefinition, + ConfigurationListTypeOptions, + ConfigurationMultiSelectTypeOptions, + ConfigurationNumberTypeOptions, + ConfigurationObjectTypeOptions, + ConfigurationParameterRef, + ConfigurationParameterValueFrom, + ConfigurationRequiredCondition, + ConfigurationResourceTypeOptions, + ConfigurationSelectOption, + ConfigurationSelectTypeOptions, + ConfigurationStringTypeOptions, + ConfigurationTextTypeOptions, + ConfigurationTimeTypeOptions, + ConfigurationTypeOptions, + ConfigurationValidationRule, + ConfigurationVisibilityCondition, + GooglerpcStatus, + GroupsAddUserToGroupBody, + GroupsAddUserToGroupData, + GroupsAddUserToGroupError, + GroupsAddUserToGroupErrors, + GroupsAddUserToGroupResponse, + GroupsAddUserToGroupResponse2, + GroupsAddUserToGroupResponses, + GroupsCreateGroupData, + GroupsCreateGroupError, + GroupsCreateGroupErrors, + GroupsCreateGroupRequest, + GroupsCreateGroupResponse, + GroupsCreateGroupResponse2, + GroupsCreateGroupResponses, + GroupsDeleteGroupData, + GroupsDeleteGroupError, + GroupsDeleteGroupErrors, + GroupsDeleteGroupResponse, + GroupsDeleteGroupResponse2, + GroupsDeleteGroupResponses, + GroupsDescribeGroupData, + GroupsDescribeGroupError, + GroupsDescribeGroupErrors, + GroupsDescribeGroupResponse, + GroupsDescribeGroupResponse2, + GroupsDescribeGroupResponses, + GroupsGroup, + GroupsGroupMetadata, + GroupsGroupSpec, + GroupsGroupStatus, + GroupsListGroupsData, + GroupsListGroupsError, + GroupsListGroupsErrors, + GroupsListGroupsResponse, + GroupsListGroupsResponse2, + GroupsListGroupsResponses, + GroupsListGroupUsersData, + GroupsListGroupUsersError, + GroupsListGroupUsersErrors, + GroupsListGroupUsersResponse, + GroupsListGroupUsersResponse2, + GroupsListGroupUsersResponses, + GroupsRemoveUserFromGroupBody, + GroupsRemoveUserFromGroupData, + GroupsRemoveUserFromGroupError, + GroupsRemoveUserFromGroupErrors, + GroupsRemoveUserFromGroupResponse, + GroupsRemoveUserFromGroupResponse2, + GroupsRemoveUserFromGroupResponses, + GroupsUpdateGroupBody, + GroupsUpdateGroupData, + GroupsUpdateGroupError, + GroupsUpdateGroupErrors, + GroupsUpdateGroupResponse, + GroupsUpdateGroupResponse2, + GroupsUpdateGroupResponses, + IntegrationNodeRef, + IntegrationsIntegrationDefinition, + IntegrationsListIntegrationsData, + IntegrationsListIntegrationsError, + IntegrationsListIntegrationsErrors, + IntegrationsListIntegrationsResponse, + IntegrationsListIntegrationsResponses, + MeMeData, + MeMeError, + MeMeErrors, + MeMeResponse, + MeMeResponses, + MeRegenerateTokenData, + MeRegenerateTokenError, + MeRegenerateTokenErrors, + MeRegenerateTokenResponse, + MeRegenerateTokenResponse2, + MeRegenerateTokenResponses, + NodeBlueprintRef, + NodeComponentRef, + NodeTriggerRef, + NodeWidgetRef, + OrganizationsAcceptInviteLinkData, + OrganizationsAcceptInviteLinkError, + OrganizationsAcceptInviteLinkErrors, + OrganizationsAcceptInviteLinkResponse, + OrganizationsAcceptInviteLinkResponses, + OrganizationsBrowserAction, + OrganizationsCreateIntegrationBody, + OrganizationsCreateIntegrationData, + OrganizationsCreateIntegrationError, + OrganizationsCreateIntegrationErrors, + OrganizationsCreateIntegrationResponse, + OrganizationsCreateIntegrationResponse2, + OrganizationsCreateIntegrationResponses, + OrganizationsCreateInvitationBody, + OrganizationsCreateInvitationData, + OrganizationsCreateInvitationError, + OrganizationsCreateInvitationErrors, + OrganizationsCreateInvitationResponse, + OrganizationsCreateInvitationResponse2, + OrganizationsCreateInvitationResponses, + OrganizationsDeleteIntegrationData, + OrganizationsDeleteIntegrationError, + OrganizationsDeleteIntegrationErrors, + OrganizationsDeleteIntegrationResponse, + OrganizationsDeleteIntegrationResponse2, + OrganizationsDeleteIntegrationResponses, + OrganizationsDeleteOrganizationData, + OrganizationsDeleteOrganizationError, + OrganizationsDeleteOrganizationErrors, + OrganizationsDeleteOrganizationResponse, + OrganizationsDeleteOrganizationResponse2, + OrganizationsDeleteOrganizationResponses, + OrganizationsDescribeIntegrationData, + OrganizationsDescribeIntegrationError, + OrganizationsDescribeIntegrationErrors, + OrganizationsDescribeIntegrationResponse, + OrganizationsDescribeIntegrationResponse2, + OrganizationsDescribeIntegrationResponses, + OrganizationsDescribeOrganizationData, + OrganizationsDescribeOrganizationError, + OrganizationsDescribeOrganizationErrors, + OrganizationsDescribeOrganizationResponse, + OrganizationsDescribeOrganizationResponse2, + OrganizationsDescribeOrganizationResponses, + OrganizationsGetInviteLinkData, + OrganizationsGetInviteLinkError, + OrganizationsGetInviteLinkErrors, + OrganizationsGetInviteLinkResponse, + OrganizationsGetInviteLinkResponse2, + OrganizationsGetInviteLinkResponses, + OrganizationsIntegration, + OrganizationsIntegrationMetadata, + OrganizationsIntegrationResourceRef, + OrganizationsIntegrationSpec, + OrganizationsIntegrationStatus, + OrganizationsInvitation, + OrganizationsInviteLink, + OrganizationsListIntegrationResourcesData, + OrganizationsListIntegrationResourcesError, + OrganizationsListIntegrationResourcesErrors, + OrganizationsListIntegrationResourcesResponse, + OrganizationsListIntegrationResourcesResponse2, + OrganizationsListIntegrationResourcesResponses, + OrganizationsListIntegrationsData, + OrganizationsListIntegrationsError, + OrganizationsListIntegrationsErrors, + OrganizationsListIntegrationsResponse, + OrganizationsListIntegrationsResponses, + OrganizationsListInvitationsData, + OrganizationsListInvitationsError, + OrganizationsListInvitationsErrors, + OrganizationsListInvitationsResponse, + OrganizationsListInvitationsResponse2, + OrganizationsListInvitationsResponses, + OrganizationsOrganization, + OrganizationsOrganizationMetadata, + OrganizationsRemoveInvitationData, + OrganizationsRemoveInvitationError, + OrganizationsRemoveInvitationErrors, + OrganizationsRemoveInvitationResponse, + OrganizationsRemoveInvitationResponse2, + OrganizationsRemoveInvitationResponses, + OrganizationsRemoveUserData, + OrganizationsRemoveUserError, + OrganizationsRemoveUserErrors, + OrganizationsRemoveUserResponse, + OrganizationsRemoveUserResponse2, + OrganizationsRemoveUserResponses, + OrganizationsResetInviteLinkData, + OrganizationsResetInviteLinkError, + OrganizationsResetInviteLinkErrors, + OrganizationsResetInviteLinkResponse, + OrganizationsResetInviteLinkResponse2, + OrganizationsResetInviteLinkResponses, + OrganizationsUpdateIntegrationBody, + OrganizationsUpdateIntegrationData, + OrganizationsUpdateIntegrationError, + OrganizationsUpdateIntegrationErrors, + OrganizationsUpdateIntegrationResponse, + OrganizationsUpdateIntegrationResponse2, + OrganizationsUpdateIntegrationResponses, + OrganizationsUpdateInviteLinkBody, + OrganizationsUpdateInviteLinkData, + OrganizationsUpdateInviteLinkError, + OrganizationsUpdateInviteLinkErrors, + OrganizationsUpdateInviteLinkResponse, + OrganizationsUpdateInviteLinkResponse2, + OrganizationsUpdateInviteLinkResponses, + OrganizationsUpdateOrganizationBody, + OrganizationsUpdateOrganizationData, + OrganizationsUpdateOrganizationError, + OrganizationsUpdateOrganizationErrors, + OrganizationsUpdateOrganizationResponse, + OrganizationsUpdateOrganizationResponse2, + OrganizationsUpdateOrganizationResponses, + ProtobufAny, + ProtobufNullValue, + RolesAssignRoleBody, + RolesAssignRoleData, + RolesAssignRoleError, + RolesAssignRoleErrors, + RolesAssignRoleResponse, + RolesAssignRoleResponse2, + RolesAssignRoleResponses, + RolesCreateRoleData, + RolesCreateRoleError, + RolesCreateRoleErrors, + RolesCreateRoleRequest, + RolesCreateRoleResponse, + RolesCreateRoleResponse2, + RolesCreateRoleResponses, + RolesDeleteRoleData, + RolesDeleteRoleError, + RolesDeleteRoleErrors, + RolesDeleteRoleResponse, + RolesDeleteRoleResponse2, + RolesDeleteRoleResponses, + RolesDescribeRoleData, + RolesDescribeRoleError, + RolesDescribeRoleErrors, + RolesDescribeRoleResponse, + RolesDescribeRoleResponse2, + RolesDescribeRoleResponses, + RolesListRolesData, + RolesListRolesError, + RolesListRolesErrors, + RolesListRolesResponse, + RolesListRolesResponse2, + RolesListRolesResponses, + RolesRole, + RolesRoleMetadata, + RolesRoleSpec, + RolesUpdateRoleBody, + RolesUpdateRoleData, + RolesUpdateRoleError, + RolesUpdateRoleErrors, + RolesUpdateRoleResponse, + RolesUpdateRoleResponse2, + RolesUpdateRoleResponses, + SecretLocal, + SecretProvider, + SecretsCreateSecretData, + SecretsCreateSecretError, + SecretsCreateSecretErrors, + SecretsCreateSecretRequest, + SecretsCreateSecretResponse, + SecretsCreateSecretResponse2, + SecretsCreateSecretResponses, + SecretsDeleteSecretData, + SecretsDeleteSecretError, + SecretsDeleteSecretErrors, + SecretsDeleteSecretKeyData, + SecretsDeleteSecretKeyError, + SecretsDeleteSecretKeyErrors, + SecretsDeleteSecretKeyResponse, + SecretsDeleteSecretKeyResponse2, + SecretsDeleteSecretKeyResponses, + SecretsDeleteSecretResponse, + SecretsDeleteSecretResponse2, + SecretsDeleteSecretResponses, + SecretsDescribeSecretData, + SecretsDescribeSecretError, + SecretsDescribeSecretErrors, + SecretsDescribeSecretResponse, + SecretsDescribeSecretResponse2, + SecretsDescribeSecretResponses, + SecretsListSecretsData, + SecretsListSecretsError, + SecretsListSecretsErrors, + SecretsListSecretsResponse, + SecretsListSecretsResponse2, + SecretsListSecretsResponses, + SecretsSecret, + SecretsSecretMetadata, + SecretsSecretSpec, + SecretsSetSecretKeyBody, + SecretsSetSecretKeyData, + SecretsSetSecretKeyError, + SecretsSetSecretKeyErrors, + SecretsSetSecretKeyResponse, + SecretsSetSecretKeyResponse2, + SecretsSetSecretKeyResponses, + SecretsUpdateSecretBody, + SecretsUpdateSecretData, + SecretsUpdateSecretError, + SecretsUpdateSecretErrors, + SecretsUpdateSecretNameBody, + SecretsUpdateSecretNameData, + SecretsUpdateSecretNameError, + SecretsUpdateSecretNameErrors, + SecretsUpdateSecretNameResponse, + SecretsUpdateSecretNameResponse2, + SecretsUpdateSecretNameResponses, + SecretsUpdateSecretResponse, + SecretsUpdateSecretResponse2, + SecretsUpdateSecretResponses, + SuperplaneBlueprintsOutputChannel, + SuperplaneBlueprintsUserRef, + SuperplaneCanvasesUserRef, + SuperplaneComponentsOutputChannel, + SuperplaneIntegrationsListIntegrationsResponse, + SuperplaneMeUser, + SuperplaneOrganizationsListIntegrationsResponse, + SuperplaneUsersUser, + TriggersDescribeTriggerData, + TriggersDescribeTriggerError, + TriggersDescribeTriggerErrors, + TriggersDescribeTriggerResponse, + TriggersDescribeTriggerResponse2, + TriggersDescribeTriggerResponses, + TriggersListTriggersData, + TriggersListTriggersError, + TriggersListTriggersErrors, + TriggersListTriggersResponse, + TriggersListTriggersResponse2, + TriggersListTriggersResponses, + TriggersTrigger, + UsersAccountProvider, + UsersListUserPermissionsData, + UsersListUserPermissionsError, + UsersListUserPermissionsErrors, + UsersListUserPermissionsResponse, + UsersListUserPermissionsResponse2, + UsersListUserPermissionsResponses, + UsersListUserRolesData, + UsersListUserRolesError, + UsersListUserRolesErrors, + UsersListUserRolesResponse, + UsersListUserRolesResponse2, + UsersListUserRolesResponses, + UsersListUsersData, + UsersListUsersError, + UsersListUsersErrors, + UsersListUsersResponse, + UsersListUsersResponse2, + UsersListUsersResponses, + UsersUserMetadata, + UsersUserRoleAssignment, + UsersUserSpec, + UsersUserStatus, + WidgetsDescribeWidgetData, + WidgetsDescribeWidgetError, + WidgetsDescribeWidgetErrors, + WidgetsDescribeWidgetResponse, + WidgetsDescribeWidgetResponse2, + WidgetsDescribeWidgetResponses, + WidgetsListWidgetsData, + WidgetsListWidgetsError, + WidgetsListWidgetsErrors, + WidgetsListWidgetsResponse, + WidgetsListWidgetsResponse2, + WidgetsListWidgetsResponses, + WidgetsWidget, +} from "./types.gen"; diff --git a/web_src/src/api-client/sdk.gen.ts b/web_src/src/api-client/sdk.gen.ts index 119751f8a3..f6a014a166 100644 --- a/web_src/src/api-client/sdk.gen.ts +++ b/web_src/src/api-client/sdk.gen.ts @@ -1,236 +1,236 @@ // This file is auto-generated by @hey-api/openapi-ts -import type { Options as ClientOptions, TDataShape, Client } from "@hey-api/client-fetch"; +import type { Client, Options as Options2, TDataShape } from "./client"; +import { client } from "./client.gen"; import type { - BlueprintsListBlueprintsData, - BlueprintsListBlueprintsResponse2, - BlueprintsListBlueprintsError, BlueprintsCreateBlueprintData, - BlueprintsCreateBlueprintResponse2, - BlueprintsCreateBlueprintError, + BlueprintsCreateBlueprintErrors, + BlueprintsCreateBlueprintResponses, BlueprintsDeleteBlueprintData, - BlueprintsDeleteBlueprintResponse2, - BlueprintsDeleteBlueprintError, + BlueprintsDeleteBlueprintErrors, + BlueprintsDeleteBlueprintResponses, BlueprintsDescribeBlueprintData, - BlueprintsDescribeBlueprintResponse2, - BlueprintsDescribeBlueprintError, + BlueprintsDescribeBlueprintErrors, + BlueprintsDescribeBlueprintResponses, + BlueprintsListBlueprintsData, + BlueprintsListBlueprintsErrors, + BlueprintsListBlueprintsResponses, BlueprintsUpdateBlueprintData, - BlueprintsUpdateBlueprintResponse2, - BlueprintsUpdateBlueprintError, - CanvasesListCanvasesData, - CanvasesListCanvasesResponse2, - CanvasesListCanvasesError, + BlueprintsUpdateBlueprintErrors, + BlueprintsUpdateBlueprintResponses, + CanvasesCancelExecutionData, + CanvasesCancelExecutionErrors, + CanvasesCancelExecutionResponses, CanvasesCreateCanvasData, - CanvasesCreateCanvasResponse2, - CanvasesCreateCanvasError, - CanvasesListCanvasEventsData, - CanvasesListCanvasEventsResponse2, - CanvasesListCanvasEventsError, - CanvasesListEventExecutionsData, - CanvasesListEventExecutionsResponse2, - CanvasesListEventExecutionsError, - CanvasesResolveExecutionErrorsData, - CanvasesResolveExecutionErrorsResponse2, - CanvasesResolveExecutionErrorsError, + CanvasesCreateCanvasErrors, + CanvasesCreateCanvasResponses, + CanvasesDeleteCanvasData, + CanvasesDeleteCanvasErrors, + CanvasesDeleteCanvasResponses, + CanvasesDeleteNodeQueueItemData, + CanvasesDeleteNodeQueueItemErrors, + CanvasesDeleteNodeQueueItemResponses, + CanvasesDescribeCanvasData, + CanvasesDescribeCanvasErrors, + CanvasesDescribeCanvasResponses, + CanvasesEmitNodeEventData, + CanvasesEmitNodeEventErrors, + CanvasesEmitNodeEventResponses, CanvasesInvokeNodeExecutionActionData, - CanvasesInvokeNodeExecutionActionResponse2, - CanvasesInvokeNodeExecutionActionError, - CanvasesCancelExecutionData, - CanvasesCancelExecutionResponse2, - CanvasesCancelExecutionError, + CanvasesInvokeNodeExecutionActionErrors, + CanvasesInvokeNodeExecutionActionResponses, + CanvasesInvokeNodeTriggerActionData, + CanvasesInvokeNodeTriggerActionErrors, + CanvasesInvokeNodeTriggerActionResponses, + CanvasesListCanvasesData, + CanvasesListCanvasesErrors, + CanvasesListCanvasesResponses, + CanvasesListCanvasEventsData, + CanvasesListCanvasEventsErrors, + CanvasesListCanvasEventsResponses, CanvasesListChildExecutionsData, - CanvasesListChildExecutionsResponse2, - CanvasesListChildExecutionsError, + CanvasesListChildExecutionsErrors, + CanvasesListChildExecutionsResponses, + CanvasesListEventExecutionsData, + CanvasesListEventExecutionsErrors, + CanvasesListEventExecutionsResponses, CanvasesListNodeEventsData, - CanvasesListNodeEventsResponse2, - CanvasesListNodeEventsError, - CanvasesEmitNodeEventData, - CanvasesEmitNodeEventResponse2, - CanvasesEmitNodeEventError, + CanvasesListNodeEventsErrors, + CanvasesListNodeEventsResponses, CanvasesListNodeExecutionsData, - CanvasesListNodeExecutionsResponse2, - CanvasesListNodeExecutionsError, - CanvasesUpdateNodePauseData, - CanvasesUpdateNodePauseResponse2, - CanvasesUpdateNodePauseError, + CanvasesListNodeExecutionsErrors, + CanvasesListNodeExecutionsResponses, CanvasesListNodeQueueItemsData, - CanvasesListNodeQueueItemsResponse2, - CanvasesListNodeQueueItemsError, - CanvasesDeleteNodeQueueItemData, - CanvasesDeleteNodeQueueItemResponse2, - CanvasesDeleteNodeQueueItemError, - CanvasesInvokeNodeTriggerActionData, - CanvasesInvokeNodeTriggerActionResponse2, - CanvasesInvokeNodeTriggerActionError, - CanvasesDeleteCanvasData, - CanvasesDeleteCanvasResponse2, - CanvasesDeleteCanvasError, - CanvasesDescribeCanvasData, - CanvasesDescribeCanvasResponse2, - CanvasesDescribeCanvasError, + CanvasesListNodeQueueItemsErrors, + CanvasesListNodeQueueItemsResponses, + CanvasesResolveExecutionErrorsData, + CanvasesResolveExecutionErrorsErrors, + CanvasesResolveExecutionErrorsResponses, CanvasesUpdateCanvasData, - CanvasesUpdateCanvasResponse2, - CanvasesUpdateCanvasError, - ComponentsListComponentsData, - ComponentsListComponentsResponse2, - ComponentsListComponentsError, + CanvasesUpdateCanvasErrors, + CanvasesUpdateCanvasResponses, + CanvasesUpdateNodePauseData, + CanvasesUpdateNodePauseErrors, + CanvasesUpdateNodePauseResponses, ComponentsDescribeComponentData, - ComponentsDescribeComponentResponse2, - ComponentsDescribeComponentError, + ComponentsDescribeComponentErrors, + ComponentsDescribeComponentResponses, ComponentsListComponentActionsData, - ComponentsListComponentActionsResponse2, - ComponentsListComponentActionsError, - GroupsListGroupsData, - GroupsListGroupsResponse2, - GroupsListGroupsError, + ComponentsListComponentActionsErrors, + ComponentsListComponentActionsResponses, + ComponentsListComponentsData, + ComponentsListComponentsErrors, + ComponentsListComponentsResponses, + GroupsAddUserToGroupData, + GroupsAddUserToGroupErrors, + GroupsAddUserToGroupResponses, GroupsCreateGroupData, - GroupsCreateGroupResponse2, - GroupsCreateGroupError, + GroupsCreateGroupErrors, + GroupsCreateGroupResponses, GroupsDeleteGroupData, - GroupsDeleteGroupResponse2, - GroupsDeleteGroupError, + GroupsDeleteGroupErrors, + GroupsDeleteGroupResponses, GroupsDescribeGroupData, - GroupsDescribeGroupResponse2, - GroupsDescribeGroupError, - GroupsUpdateGroupData, - GroupsUpdateGroupResponse2, - GroupsUpdateGroupError, + GroupsDescribeGroupErrors, + GroupsDescribeGroupResponses, + GroupsListGroupsData, + GroupsListGroupsErrors, + GroupsListGroupsResponses, GroupsListGroupUsersData, - GroupsListGroupUsersResponse2, - GroupsListGroupUsersError, - GroupsAddUserToGroupData, - GroupsAddUserToGroupResponse2, - GroupsAddUserToGroupError, + GroupsListGroupUsersErrors, + GroupsListGroupUsersResponses, GroupsRemoveUserFromGroupData, - GroupsRemoveUserFromGroupResponse2, - GroupsRemoveUserFromGroupError, + GroupsRemoveUserFromGroupErrors, + GroupsRemoveUserFromGroupResponses, + GroupsUpdateGroupData, + GroupsUpdateGroupErrors, + GroupsUpdateGroupResponses, IntegrationsListIntegrationsData, - IntegrationsListIntegrationsResponse, - IntegrationsListIntegrationsError, - OrganizationsAcceptInviteLinkData, - OrganizationsAcceptInviteLinkResponse, - OrganizationsAcceptInviteLinkError, + IntegrationsListIntegrationsErrors, + IntegrationsListIntegrationsResponses, MeMeData, - MeMeResponse, - MeMeError, + MeMeErrors, + MeMeResponses, MeRegenerateTokenData, - MeRegenerateTokenResponse2, - MeRegenerateTokenError, - OrganizationsDeleteOrganizationData, - OrganizationsDeleteOrganizationResponse2, - OrganizationsDeleteOrganizationError, - OrganizationsDescribeOrganizationData, - OrganizationsDescribeOrganizationResponse2, - OrganizationsDescribeOrganizationError, - OrganizationsUpdateOrganizationData, - OrganizationsUpdateOrganizationResponse2, - OrganizationsUpdateOrganizationError, - OrganizationsListIntegrationsData, - OrganizationsListIntegrationsResponse, - OrganizationsListIntegrationsError, + MeRegenerateTokenErrors, + MeRegenerateTokenResponses, + OrganizationsAcceptInviteLinkData, + OrganizationsAcceptInviteLinkErrors, + OrganizationsAcceptInviteLinkResponses, OrganizationsCreateIntegrationData, - OrganizationsCreateIntegrationResponse2, - OrganizationsCreateIntegrationError, + OrganizationsCreateIntegrationErrors, + OrganizationsCreateIntegrationResponses, + OrganizationsCreateInvitationData, + OrganizationsCreateInvitationErrors, + OrganizationsCreateInvitationResponses, OrganizationsDeleteIntegrationData, - OrganizationsDeleteIntegrationResponse2, - OrganizationsDeleteIntegrationError, + OrganizationsDeleteIntegrationErrors, + OrganizationsDeleteIntegrationResponses, + OrganizationsDeleteOrganizationData, + OrganizationsDeleteOrganizationErrors, + OrganizationsDeleteOrganizationResponses, OrganizationsDescribeIntegrationData, - OrganizationsDescribeIntegrationResponse2, - OrganizationsDescribeIntegrationError, - OrganizationsUpdateIntegrationData, - OrganizationsUpdateIntegrationResponse2, - OrganizationsUpdateIntegrationError, + OrganizationsDescribeIntegrationErrors, + OrganizationsDescribeIntegrationResponses, + OrganizationsDescribeOrganizationData, + OrganizationsDescribeOrganizationErrors, + OrganizationsDescribeOrganizationResponses, + OrganizationsGetInviteLinkData, + OrganizationsGetInviteLinkErrors, + OrganizationsGetInviteLinkResponses, OrganizationsListIntegrationResourcesData, - OrganizationsListIntegrationResourcesResponse2, - OrganizationsListIntegrationResourcesError, + OrganizationsListIntegrationResourcesErrors, + OrganizationsListIntegrationResourcesResponses, + OrganizationsListIntegrationsData, + OrganizationsListIntegrationsErrors, + OrganizationsListIntegrationsResponses, OrganizationsListInvitationsData, - OrganizationsListInvitationsResponse2, - OrganizationsListInvitationsError, - OrganizationsCreateInvitationData, - OrganizationsCreateInvitationResponse2, - OrganizationsCreateInvitationError, + OrganizationsListInvitationsErrors, + OrganizationsListInvitationsResponses, OrganizationsRemoveInvitationData, - OrganizationsRemoveInvitationResponse2, - OrganizationsRemoveInvitationError, - OrganizationsGetInviteLinkData, - OrganizationsGetInviteLinkResponse2, - OrganizationsGetInviteLinkError, - OrganizationsUpdateInviteLinkData, - OrganizationsUpdateInviteLinkResponse2, - OrganizationsUpdateInviteLinkError, - OrganizationsResetInviteLinkData, - OrganizationsResetInviteLinkResponse2, - OrganizationsResetInviteLinkError, + OrganizationsRemoveInvitationErrors, + OrganizationsRemoveInvitationResponses, OrganizationsRemoveUserData, - OrganizationsRemoveUserResponse2, - OrganizationsRemoveUserError, - RolesListRolesData, - RolesListRolesResponse2, - RolesListRolesError, + OrganizationsRemoveUserErrors, + OrganizationsRemoveUserResponses, + OrganizationsResetInviteLinkData, + OrganizationsResetInviteLinkErrors, + OrganizationsResetInviteLinkResponses, + OrganizationsUpdateIntegrationData, + OrganizationsUpdateIntegrationErrors, + OrganizationsUpdateIntegrationResponses, + OrganizationsUpdateInviteLinkData, + OrganizationsUpdateInviteLinkErrors, + OrganizationsUpdateInviteLinkResponses, + OrganizationsUpdateOrganizationData, + OrganizationsUpdateOrganizationErrors, + OrganizationsUpdateOrganizationResponses, + RolesAssignRoleData, + RolesAssignRoleErrors, + RolesAssignRoleResponses, RolesCreateRoleData, - RolesCreateRoleResponse2, - RolesCreateRoleError, + RolesCreateRoleErrors, + RolesCreateRoleResponses, RolesDeleteRoleData, - RolesDeleteRoleResponse2, - RolesDeleteRoleError, + RolesDeleteRoleErrors, + RolesDeleteRoleResponses, RolesDescribeRoleData, - RolesDescribeRoleResponse2, - RolesDescribeRoleError, + RolesDescribeRoleErrors, + RolesDescribeRoleResponses, + RolesListRolesData, + RolesListRolesErrors, + RolesListRolesResponses, RolesUpdateRoleData, - RolesUpdateRoleResponse2, - RolesUpdateRoleError, - RolesAssignRoleData, - RolesAssignRoleResponse2, - RolesAssignRoleError, - SecretsListSecretsData, - SecretsListSecretsResponse2, - SecretsListSecretsError, + RolesUpdateRoleErrors, + RolesUpdateRoleResponses, SecretsCreateSecretData, - SecretsCreateSecretResponse2, - SecretsCreateSecretError, + SecretsCreateSecretErrors, + SecretsCreateSecretResponses, SecretsDeleteSecretData, - SecretsDeleteSecretResponse2, - SecretsDeleteSecretError, - SecretsDescribeSecretData, - SecretsDescribeSecretResponse2, - SecretsDescribeSecretError, - SecretsUpdateSecretData, - SecretsUpdateSecretResponse2, - SecretsUpdateSecretError, + SecretsDeleteSecretErrors, SecretsDeleteSecretKeyData, - SecretsDeleteSecretKeyResponse2, - SecretsDeleteSecretKeyError, + SecretsDeleteSecretKeyErrors, + SecretsDeleteSecretKeyResponses, + SecretsDeleteSecretResponses, + SecretsDescribeSecretData, + SecretsDescribeSecretErrors, + SecretsDescribeSecretResponses, + SecretsListSecretsData, + SecretsListSecretsErrors, + SecretsListSecretsResponses, SecretsSetSecretKeyData, - SecretsSetSecretKeyResponse2, - SecretsSetSecretKeyError, + SecretsSetSecretKeyErrors, + SecretsSetSecretKeyResponses, + SecretsUpdateSecretData, + SecretsUpdateSecretErrors, SecretsUpdateSecretNameData, - SecretsUpdateSecretNameResponse2, - SecretsUpdateSecretNameError, - TriggersListTriggersData, - TriggersListTriggersResponse2, - TriggersListTriggersError, + SecretsUpdateSecretNameErrors, + SecretsUpdateSecretNameResponses, + SecretsUpdateSecretResponses, TriggersDescribeTriggerData, - TriggersDescribeTriggerResponse2, - TriggersDescribeTriggerError, - UsersListUsersData, - UsersListUsersResponse2, - UsersListUsersError, + TriggersDescribeTriggerErrors, + TriggersDescribeTriggerResponses, + TriggersListTriggersData, + TriggersListTriggersErrors, + TriggersListTriggersResponses, UsersListUserPermissionsData, - UsersListUserPermissionsResponse2, - UsersListUserPermissionsError, + UsersListUserPermissionsErrors, + UsersListUserPermissionsResponses, UsersListUserRolesData, - UsersListUserRolesResponse2, - UsersListUserRolesError, - WidgetsListWidgetsData, - WidgetsListWidgetsResponse2, - WidgetsListWidgetsError, + UsersListUserRolesErrors, + UsersListUserRolesResponses, + UsersListUsersData, + UsersListUsersErrors, + UsersListUsersResponses, WidgetsDescribeWidgetData, - WidgetsDescribeWidgetResponse2, - WidgetsDescribeWidgetError, + WidgetsDescribeWidgetErrors, + WidgetsDescribeWidgetResponses, + WidgetsListWidgetsData, + WidgetsListWidgetsErrors, + WidgetsListWidgetsResponses, } from "./types.gen"; -import { client as _heyApiClient } from "./client.gen"; -export type Options = ClientOptions< +export type Options = Options2< TData, ThrowOnError > & { @@ -249,1283 +249,1124 @@ export type Options( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get< - BlueprintsListBlueprintsResponse2, - BlueprintsListBlueprintsError, - ThrowOnError - >({ +) => + (options?.client ?? client).get({ url: "/api/v1/blueprints", ...options, }); -}; /** * Create blueprint + * * Creates a new blueprint */ export const blueprintsCreateBlueprint = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - BlueprintsCreateBlueprintResponse2, - BlueprintsCreateBlueprintError, - ThrowOnError - >({ +) => + (options.client ?? client).post({ url: "/api/v1/blueprints", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Delete blueprint + * * Deletes an existing blueprint */ export const blueprintsDeleteBlueprint = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - BlueprintsDeleteBlueprintResponse2, - BlueprintsDeleteBlueprintError, - ThrowOnError - >({ +) => + (options.client ?? client).delete({ url: "/api/v1/blueprints/{id}", ...options, }); -}; /** * Describe blueprint + * * Returns a blueprint */ export const blueprintsDescribeBlueprint = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - BlueprintsDescribeBlueprintResponse2, - BlueprintsDescribeBlueprintError, - ThrowOnError - >({ - url: "/api/v1/blueprints/{id}", - ...options, - }); -}; +) => + (options.client ?? client).get( + { url: "/api/v1/blueprints/{id}", ...options }, + ); /** * Update blueprint + * * Updates an existing blueprint */ export const blueprintsUpdateBlueprint = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - BlueprintsUpdateBlueprintResponse2, - BlueprintsUpdateBlueprintError, - ThrowOnError - >({ +) => + (options.client ?? client).patch({ url: "/api/v1/blueprints/{id}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List canvases + * * Returns a list of all canvases */ export const canvasesListCanvases = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get( - { - url: "/api/v1/canvases", - ...options, - }, - ); -}; +) => + (options?.client ?? client).get({ + url: "/api/v1/canvases", + ...options, + }); /** * Create canvas + * * Creates a new canvas */ export const canvasesCreateCanvas = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post( - { - url: "/api/v1/canvases", - ...options, - headers: { - "Content-Type": "application/json", - ...options?.headers, - }, +) => + (options.client ?? client).post({ + url: "/api/v1/canvases", + ...options, + headers: { + "Content-Type": "application/json", + ...options.headers, }, - ); -}; + }); /** * List canvas events + * * Returns a list of root events that triggered executions in a canvas */ export const canvasesListCanvasEvents = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - CanvasesListCanvasEventsResponse2, - CanvasesListCanvasEventsError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/canvases/{canvasId}/events", ...options, }); -}; /** * List event executions + * * Returns a list of all node executions triggered by a root event */ export const canvasesListEventExecutions = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - CanvasesListEventExecutionsResponse2, - CanvasesListEventExecutionsError, - ThrowOnError - >({ - url: "/api/v1/canvases/{canvasId}/events/{eventId}/executions", - ...options, - }); -}; +) => + (options.client ?? client).get( + { url: "/api/v1/canvases/{canvasId}/events/{eventId}/executions", ...options }, + ); /** * Resolve execution errors + * * Marks canvas node execution errors as resolved */ export const canvasesResolveExecutionErrors = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - CanvasesResolveExecutionErrorsResponse2, - CanvasesResolveExecutionErrorsError, +) => + (options.client ?? client).patch< + CanvasesResolveExecutionErrorsResponses, + CanvasesResolveExecutionErrorsErrors, ThrowOnError >({ url: "/api/v1/canvases/{canvasId}/executions/resolve", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Invoke execution action + * * Invokes a custom action on a canvas node execution */ export const canvasesInvokeNodeExecutionAction = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - CanvasesInvokeNodeExecutionActionResponse2, - CanvasesInvokeNodeExecutionActionError, +) => + (options.client ?? client).post< + CanvasesInvokeNodeExecutionActionResponses, + CanvasesInvokeNodeExecutionActionErrors, ThrowOnError >({ url: "/api/v1/canvases/{canvasId}/executions/{executionId}/actions/{actionName}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Cancel execution + * * Cancels a running canvas node execution */ export const canvasesCancelExecution = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - CanvasesCancelExecutionResponse2, - CanvasesCancelExecutionError, - ThrowOnError - >({ +) => + (options.client ?? client).patch({ url: "/api/v1/canvases/{canvasId}/executions/{executionId}/cancel", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List child executions for an execution + * * List child executions for an execution */ export const canvasesListChildExecutions = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - CanvasesListChildExecutionsResponse2, - CanvasesListChildExecutionsError, +) => + (options.client ?? client).post< + CanvasesListChildExecutionsResponses, + CanvasesListChildExecutionsErrors, ThrowOnError >({ url: "/api/v1/canvases/{canvasId}/executions/{executionId}/children", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List node events + * * Returns a list of events for a specific canvas node */ export const canvasesListNodeEvents = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - CanvasesListNodeEventsResponse2, - CanvasesListNodeEventsError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/events", ...options, }); -}; /** * Emit output event for canvas node + * * Emit output event for canvas node */ export const canvasesEmitNodeEvent = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - CanvasesEmitNodeEventResponse2, - CanvasesEmitNodeEventError, - ThrowOnError - >({ +) => + (options.client ?? client).post({ url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/events", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List node executions + * * Returns a list of executions for a specific canvas node */ export const canvasesListNodeExecutions = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - CanvasesListNodeExecutionsResponse2, - CanvasesListNodeExecutionsError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/executions", ...options, }); -}; /** * Pause or resume node processing + * * Pauses or resumes processing for a canvas node while continuing to queue incoming items */ export const canvasesUpdateNodePause = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - CanvasesUpdateNodePauseResponse2, - CanvasesUpdateNodePauseError, - ThrowOnError - >({ +) => + (options.client ?? client).patch({ url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/pause", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List items in a node's queue + * * Returns a list of items in a node's queue */ export const canvasesListNodeQueueItems = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - CanvasesListNodeQueueItemsResponse2, - CanvasesListNodeQueueItemsError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/queue", ...options, }); -}; /** * Delete item from a node's queue + * * Deletes a specific item in a node's queue */ export const canvasesDeleteNodeQueueItem = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - CanvasesDeleteNodeQueueItemResponse2, - CanvasesDeleteNodeQueueItemError, +) => + (options.client ?? client).delete< + CanvasesDeleteNodeQueueItemResponses, + CanvasesDeleteNodeQueueItemErrors, ThrowOnError - >({ - url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/queue/{itemId}", - ...options, - }); -}; + >({ url: "/api/v1/canvases/{canvasId}/nodes/{nodeId}/queue/{itemId}", ...options }); /** * Invoke trigger action + * * Invokes a custom action on a canvas node trigger */ export const canvasesInvokeNodeTriggerAction = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - CanvasesInvokeNodeTriggerActionResponse2, - CanvasesInvokeNodeTriggerActionError, +) => + (options.client ?? client).post< + CanvasesInvokeNodeTriggerActionResponses, + CanvasesInvokeNodeTriggerActionErrors, ThrowOnError >({ url: "/api/v1/canvases/{canvasId}/triggers/{nodeId}/actions/{actionName}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Delete canvas + * * Deletes an existing canvas */ export const canvasesDeleteCanvas = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - CanvasesDeleteCanvasResponse2, - CanvasesDeleteCanvasError, - ThrowOnError - >({ +) => + (options.client ?? client).delete({ url: "/api/v1/canvases/{id}", ...options, }); -}; /** * Describe canvas + * * Returns a canvas by ID */ export const canvasesDescribeCanvas = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - CanvasesDescribeCanvasResponse2, - CanvasesDescribeCanvasError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/canvases/{id}", ...options, }); -}; /** * Update canvas + * * Updates an existing canvas */ export const canvasesUpdateCanvas = ( options: Options, -) => { - return (options.client ?? _heyApiClient).put({ +) => + (options.client ?? client).put({ url: "/api/v1/canvases/{id}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List components + * * Returns a list of all components */ export const componentsListComponents = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get< - ComponentsListComponentsResponse2, - ComponentsListComponentsError, - ThrowOnError - >({ +) => + (options?.client ?? client).get({ url: "/api/v1/components", ...options, }); -}; /** * Describe component + * * Returns a component by its name */ export const componentsDescribeComponent = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - ComponentsDescribeComponentResponse2, - ComponentsDescribeComponentError, - ThrowOnError - >({ - url: "/api/v1/components/{name}", - ...options, - }); -}; +) => + (options.client ?? client).get( + { url: "/api/v1/components/{name}", ...options }, + ); /** * List component actions + * * Returns available actions for a component */ export const componentsListComponentActions = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - ComponentsListComponentActionsResponse2, - ComponentsListComponentActionsError, +) => + (options.client ?? client).get< + ComponentsListComponentActionsResponses, + ComponentsListComponentActionsErrors, ThrowOnError - >({ - url: "/api/v1/components/{name}/actions", - ...options, - }); -}; + >({ url: "/api/v1/components/{name}/actions", ...options }); /** * List groups + * * Returns a list of groups within a domain */ export const groupsListGroups = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get({ +) => + (options?.client ?? client).get({ url: "/api/v1/groups", ...options, }); -}; /** * Create group + * * Creates a new group within a domain with a specific role */ export const groupsCreateGroup = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post({ +) => + (options.client ?? client).post({ url: "/api/v1/groups", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Delete group + * * Deletes an existing group within a domain */ export const groupsDeleteGroup = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete({ +) => + (options.client ?? client).delete({ url: "/api/v1/groups/{groupName}", ...options, }); -}; /** * Get group + * * Returns details of a specific group within a domain */ export const groupsDescribeGroup = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get({ +) => + (options.client ?? client).get({ url: "/api/v1/groups/{groupName}", ...options, }); -}; /** * Update group + * * Updates an existing group within a domain */ export const groupsUpdateGroup = ( options: Options, -) => { - return (options.client ?? _heyApiClient).put({ +) => + (options.client ?? client).put({ url: "/api/v1/groups/{groupName}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Get group users + * * Returns users that belong to a specific group within a domain */ export const groupsListGroupUsers = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get({ +) => + (options.client ?? client).get({ url: "/api/v1/groups/{groupName}/users", ...options, }); -}; /** * Add user to group + * * Adds a user to a group within a domain */ export const groupsAddUserToGroup = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post( - { - url: "/api/v1/groups/{groupName}/users", - ...options, - headers: { - "Content-Type": "application/json", - ...options?.headers, - }, +) => + (options.client ?? client).post({ + url: "/api/v1/groups/{groupName}/users", + ...options, + headers: { + "Content-Type": "application/json", + ...options.headers, }, - ); -}; + }); /** * Remove user from group + * * Removes a user from a group within a domain */ export const groupsRemoveUserFromGroup = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - GroupsRemoveUserFromGroupResponse2, - GroupsRemoveUserFromGroupError, - ThrowOnError - >({ +) => + (options.client ?? client).patch({ url: "/api/v1/groups/{groupName}/users/remove", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List available integrations + * * List available integrations */ export const integrationsListIntegrations = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get< - IntegrationsListIntegrationsResponse, - IntegrationsListIntegrationsError, +) => + (options?.client ?? client).get< + IntegrationsListIntegrationsResponses, + IntegrationsListIntegrationsErrors, ThrowOnError - >({ - url: "/api/v1/integrations", - ...options, - }); -}; + >({ url: "/api/v1/integrations", ...options }); /** * Accept an invite link + * * Accepts an organization invite link for the authenticated account */ export const organizationsAcceptInviteLink = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - OrganizationsAcceptInviteLinkResponse, - OrganizationsAcceptInviteLinkError, +) => + (options.client ?? client).post< + OrganizationsAcceptInviteLinkResponses, + OrganizationsAcceptInviteLinkErrors, ThrowOnError - >({ - url: "/api/v1/invite-links/{token}/accept", - ...options, - }); -}; + >({ url: "/api/v1/invite-links/{token}/accept", ...options }); /** * Get current user + * * Returns the currently authenticated user */ -export const meMe = (options?: Options) => { - return (options?.client ?? _heyApiClient).get({ - url: "/api/v1/me", - ...options, - }); -}; +export const meMe = (options?: Options) => + (options?.client ?? client).get({ url: "/api/v1/me", ...options }); /** * Regenerate API token + * * Regenerates the currently authencated user's API token */ export const meRegenerateToken = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).post({ +) => + (options?.client ?? client).post({ url: "/api/v1/me/token", ...options, }); -}; /** * Delete an organization + * * Deletes the specified organization (can be referenced by ID or name) */ export const organizationsDeleteOrganization = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - OrganizationsDeleteOrganizationResponse2, - OrganizationsDeleteOrganizationError, +) => + (options.client ?? client).delete< + OrganizationsDeleteOrganizationResponses, + OrganizationsDeleteOrganizationErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}", ...options }); /** * Get organization details + * * Returns the details of a specific organization (can be referenced by ID or name) */ export const organizationsDescribeOrganization = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - OrganizationsDescribeOrganizationResponse2, - OrganizationsDescribeOrganizationError, +) => + (options.client ?? client).get< + OrganizationsDescribeOrganizationResponses, + OrganizationsDescribeOrganizationErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}", ...options }); /** * Update an organization + * * Updates the specified organization (can be referenced by ID or name) */ export const organizationsUpdateOrganization = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - OrganizationsUpdateOrganizationResponse2, - OrganizationsUpdateOrganizationError, +) => + (options.client ?? client).patch< + OrganizationsUpdateOrganizationResponses, + OrganizationsUpdateOrganizationErrors, ThrowOnError >({ url: "/api/v1/organizations/{id}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List integrations in an organization + * * Returns a list of integrations in an organization */ export const organizationsListIntegrations = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - OrganizationsListIntegrationsResponse, - OrganizationsListIntegrationsError, +) => + (options.client ?? client).get< + OrganizationsListIntegrationsResponses, + OrganizationsListIntegrationsErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/integrations", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/integrations", ...options }); /** * Create organization integration + * * Create an organization integration */ export const organizationsCreateIntegration = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - OrganizationsCreateIntegrationResponse2, - OrganizationsCreateIntegrationError, +) => + (options.client ?? client).post< + OrganizationsCreateIntegrationResponses, + OrganizationsCreateIntegrationErrors, ThrowOnError >({ url: "/api/v1/organizations/{id}/integrations", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Delete organization integration + * * Deletes an integration from an organization */ export const organizationsDeleteIntegration = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - OrganizationsDeleteIntegrationResponse2, - OrganizationsDeleteIntegrationError, +) => + (options.client ?? client).delete< + OrganizationsDeleteIntegrationResponses, + OrganizationsDeleteIntegrationErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/integrations/{integrationId}", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/integrations/{integrationId}", ...options }); /** * Describe an integration in an organization + * * Returns details of a specific integration in an organization */ export const organizationsDescribeIntegration = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - OrganizationsDescribeIntegrationResponse2, - OrganizationsDescribeIntegrationError, +) => + (options.client ?? client).get< + OrganizationsDescribeIntegrationResponses, + OrganizationsDescribeIntegrationErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/integrations/{integrationId}", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/integrations/{integrationId}", ...options }); /** * Update integration + * * Updates the configuration for an organization integration */ export const organizationsUpdateIntegration = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - OrganizationsUpdateIntegrationResponse2, - OrganizationsUpdateIntegrationError, +) => + (options.client ?? client).patch< + OrganizationsUpdateIntegrationResponses, + OrganizationsUpdateIntegrationErrors, ThrowOnError >({ url: "/api/v1/organizations/{id}/integrations/{integrationId}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List integration resources + * * Lists resources for an integration */ export const organizationsListIntegrationResources = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - OrganizationsListIntegrationResourcesResponse2, - OrganizationsListIntegrationResourcesError, +) => + (options.client ?? client).get< + OrganizationsListIntegrationResourcesResponses, + OrganizationsListIntegrationResourcesErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/integrations/{integrationId}/resources", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/integrations/{integrationId}/resources", ...options }); /** * List organization invitations + * * Returns pending invitations for an organization */ export const organizationsListInvitations = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - OrganizationsListInvitationsResponse2, - OrganizationsListInvitationsError, +) => + (options.client ?? client).get< + OrganizationsListInvitationsResponses, + OrganizationsListInvitationsErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/invitations", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/invitations", ...options }); /** * Create an organization invitation + * * Invites a user to join an organization by email */ export const organizationsCreateInvitation = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - OrganizationsCreateInvitationResponse2, - OrganizationsCreateInvitationError, +) => + (options.client ?? client).post< + OrganizationsCreateInvitationResponses, + OrganizationsCreateInvitationErrors, ThrowOnError >({ url: "/api/v1/organizations/{id}/invitations", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Remove an organization invitation + * * Removes an organization invitation */ export const organizationsRemoveInvitation = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - OrganizationsRemoveInvitationResponse2, - OrganizationsRemoveInvitationError, +) => + (options.client ?? client).delete< + OrganizationsRemoveInvitationResponses, + OrganizationsRemoveInvitationErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/invitations/{invitationId}", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/invitations/{invitationId}", ...options }); /** * Get an organization invite link + * * Returns the invite link for an organization */ export const organizationsGetInviteLink = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - OrganizationsGetInviteLinkResponse2, - OrganizationsGetInviteLinkError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/organizations/{id}/invite-link", ...options, }); -}; /** * Update an organization invite link + * * Enables or disables the invite link for an organization */ export const organizationsUpdateInviteLink = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - OrganizationsUpdateInviteLinkResponse2, - OrganizationsUpdateInviteLinkError, +) => + (options.client ?? client).patch< + OrganizationsUpdateInviteLinkResponses, + OrganizationsUpdateInviteLinkErrors, ThrowOnError >({ url: "/api/v1/organizations/{id}/invite-link", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Reset an organization invite link + * * Generates a new invite link token for an organization */ export const organizationsResetInviteLink = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post< - OrganizationsResetInviteLinkResponse2, - OrganizationsResetInviteLinkError, +) => + (options.client ?? client).post< + OrganizationsResetInviteLinkResponses, + OrganizationsResetInviteLinkErrors, ThrowOnError - >({ - url: "/api/v1/organizations/{id}/invite-link/reset", - ...options, - }); -}; + >({ url: "/api/v1/organizations/{id}/invite-link/reset", ...options }); /** * Remove a user from an organization + * * Removes a user from an organization (can be referenced by ID or name) */ export const organizationsRemoveUser = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - OrganizationsRemoveUserResponse2, - OrganizationsRemoveUserError, - ThrowOnError - >({ +) => + (options.client ?? client).delete({ url: "/api/v1/organizations/{id}/users/{userId}", ...options, }); -}; /** * List roles + * * Returns available roles for a specific domain type with their permissions and inheritance */ export const rolesListRoles = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get({ +) => + (options?.client ?? client).get({ url: "/api/v1/roles", ...options, }); -}; /** * Create role + * * Creates a new custom role with specified permissions */ export const rolesCreateRole = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post({ +) => + (options.client ?? client).post({ url: "/api/v1/roles", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Delete role + * * Deletes an existing custom role */ export const rolesDeleteRole = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete({ +) => + (options.client ?? client).delete({ url: "/api/v1/roles/{roleName}", ...options, }); -}; /** * Describe role + * * Returns detailed information about a specific role including permissions and inheritance */ export const rolesDescribeRole = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get({ +) => + (options.client ?? client).get({ url: "/api/v1/roles/{roleName}", ...options, }); -}; /** * Update role + * * Updates an existing custom role with new permissions */ export const rolesUpdateRole = ( options: Options, -) => { - return (options.client ?? _heyApiClient).put({ +) => + (options.client ?? client).put({ url: "/api/v1/roles/{roleName}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Assign role + * * Assigns a role to a user within a domain */ export const rolesAssignRole = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post({ +) => + (options.client ?? client).post({ url: "/api/v1/roles/{roleName}/users", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List secrets + * * Returns the list of secrets */ export const secretsListSecrets = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get({ +) => + (options?.client ?? client).get({ url: "/api/v1/secrets", ...options, }); -}; /** * Create a new secret + * * Creates a new secret */ export const secretsCreateSecret = ( options: Options, -) => { - return (options.client ?? _heyApiClient).post({ +) => + (options.client ?? client).post({ url: "/api/v1/secrets", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Deletes a secret + * * Deletes the specified secret */ export const secretsDeleteSecret = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete( - { - url: "/api/v1/secrets/{idOrName}", - ...options, - }, - ); -}; +) => + (options.client ?? client).delete({ + url: "/api/v1/secrets/{idOrName}", + ...options, + }); /** * Get secret details + * * Returns the details of a specific secret */ export const secretsDescribeSecret = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - SecretsDescribeSecretResponse2, - SecretsDescribeSecretError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/secrets/{idOrName}", ...options, }); -}; /** * Updates a secret + * * Updates the specified secret */ export const secretsUpdateSecret = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch({ +) => + (options.client ?? client).patch({ url: "/api/v1/secrets/{idOrName}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Remove a key from a secret + * * Removes one key from the secret. Secret must have at least one key remaining. */ export const secretsDeleteSecretKey = ( options: Options, -) => { - return (options.client ?? _heyApiClient).delete< - SecretsDeleteSecretKeyResponse2, - SecretsDeleteSecretKeyError, - ThrowOnError - >({ +) => + (options.client ?? client).delete({ url: "/api/v1/secrets/{idOrName}/keys/{keyName}", ...options, }); -}; /** * Set or overwrite a single key in a secret + * * Sets the value for one key. Creates the key if missing, overwrites if present. */ export const secretsSetSecretKey = ( options: Options, -) => { - return (options.client ?? _heyApiClient).put({ +) => + (options.client ?? client).put({ url: "/api/v1/secrets/{idOrName}/keys/{keyName}", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * Update secret name + * * Updates only the name of the secret. Name must be unique within the domain. */ export const secretsUpdateSecretName = ( options: Options, -) => { - return (options.client ?? _heyApiClient).patch< - SecretsUpdateSecretNameResponse2, - SecretsUpdateSecretNameError, - ThrowOnError - >({ +) => + (options.client ?? client).patch({ url: "/api/v1/secrets/{idOrName}/name", ...options, headers: { "Content-Type": "application/json", - ...options?.headers, + ...options.headers, }, }); -}; /** * List triggers + * * Returns a list of all available triggers */ export const triggersListTriggers = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get( - { - url: "/api/v1/triggers", - ...options, - }, - ); -}; +) => + (options?.client ?? client).get({ + url: "/api/v1/triggers", + ...options, + }); /** * Describe trigger + * * Returns a trigger by its name */ export const triggersDescribeTrigger = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - TriggersDescribeTriggerResponse2, - TriggersDescribeTriggerError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/triggers/{name}", ...options, }); -}; /** * List users + * * Returns all users that have roles within a domain */ export const usersListUsers = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get({ +) => + (options?.client ?? client).get({ url: "/api/v1/users", ...options, }); -}; /** * List user permissions + * * Returns all permissions a user has within a specific domain */ export const usersListUserPermissions = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - UsersListUserPermissionsResponse2, - UsersListUserPermissionsError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/users/{userId}/permissions", ...options, }); -}; /** * Get user roles + * * Returns the roles a user has within a specific domain */ export const usersListUserRoles = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get({ +) => + (options.client ?? client).get({ url: "/api/v1/users/{userId}/roles", ...options, }); -}; /** * List widgets + * * Returns a list of all available widgets */ export const widgetsListWidgets = ( options?: Options, -) => { - return (options?.client ?? _heyApiClient).get({ +) => + (options?.client ?? client).get({ url: "/api/v1/widgets", ...options, }); -}; /** * Describe widget + * * Returns a widget by its name */ export const widgetsDescribeWidget = ( options: Options, -) => { - return (options.client ?? _heyApiClient).get< - WidgetsDescribeWidgetResponse2, - WidgetsDescribeWidgetError, - ThrowOnError - >({ +) => + (options.client ?? client).get({ url: "/api/v1/widgets/{name}", ...options, }); -}; diff --git a/web_src/src/api-client/types.gen.ts b/web_src/src/api-client/types.gen.ts index ce0ab2ff89..fc25d1716a 100644 --- a/web_src/src/api-client/types.gen.ts +++ b/web_src/src/api-client/types.gen.ts @@ -1,7 +1,17 @@ // This file is auto-generated by @hey-api/openapi-ts +export type ClientOptions = { + baseUrl: `http://${string}` | `https://${string}` | (string & {}); +}; + +/** + * Enums + */ export type AuthorizationDomainType = "DOMAIN_TYPE_UNSPECIFIED" | "DOMAIN_TYPE_ORGANIZATION"; +/** + * Common data structures + */ export type AuthorizationPermission = { resource?: string; action?: string; @@ -3208,7 +3218,3 @@ export type WidgetsDescribeWidgetResponses = { }; export type WidgetsDescribeWidgetResponse2 = WidgetsDescribeWidgetResponses[keyof WidgetsDescribeWidgetResponses]; - -export type ClientOptions = { - baseUrl: `http://${string}` | `https://${string}` | (string & {}); -}; From b710808d733fc98c35d0a1574ab365860dda01ec Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Sun, 8 Feb 2026 17:34:44 -0300 Subject: [PATCH 023/160] feat: restrict event payload size emitted by components (#2971) Next one after https://github.com/superplanehq/superplane/pull/2918. Here, we restrict the size of the payloads emitted by components and triggers through Emit(). This protects the storage layer of the engine against improper implementations. We start with a 32k limit. Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- pkg/workers/contexts/common.go | 7 +++ pkg/workers/contexts/event_context.go | 25 +++++--- pkg/workers/contexts/event_context_test.go | 47 +++++++++++++++ .../contexts/execution_state_context.go | 24 ++++++-- .../contexts/execution_state_context_test.go | 57 +++++++++++++++++++ test/support/support.go | 14 +++++ 6 files changed, 160 insertions(+), 14 deletions(-) create mode 100644 pkg/workers/contexts/common.go create mode 100644 pkg/workers/contexts/event_context_test.go create mode 100644 pkg/workers/contexts/execution_state_context_test.go diff --git a/pkg/workers/contexts/common.go b/pkg/workers/contexts/common.go new file mode 100644 index 0000000000..92661cd4b2 --- /dev/null +++ b/pkg/workers/contexts/common.go @@ -0,0 +1,7 @@ +package contexts + +/* + * DefaultMaxPayloadSize is used to enforce reasonably-sized + * event payloads from components and trigger implementations. + */ +const DefaultMaxPayloadSize = 32 * 1024 diff --git a/pkg/workers/contexts/event_context.go b/pkg/workers/contexts/event_context.go index 4c82129848..f8304db788 100644 --- a/pkg/workers/contexts/event_context.go +++ b/pkg/workers/contexts/event_context.go @@ -1,45 +1,52 @@ package contexts import ( + "encoding/json" "fmt" "strings" "time" - "github.com/mitchellh/mapstructure" "github.com/superplanehq/superplane/pkg/models" "gorm.io/datatypes" "gorm.io/gorm" ) type EventContext struct { - tx *gorm.DB - node *models.CanvasNode + tx *gorm.DB + node *models.CanvasNode + maxPayloadSize int } func NewEventContext(tx *gorm.DB, node *models.CanvasNode) *EventContext { - return &EventContext{tx: tx, node: node} + return &EventContext{tx: tx, node: node, maxPayloadSize: DefaultMaxPayloadSize} } func (s *EventContext) Emit(payloadType string, payload any) error { - var v any - structuredPayload := map[string]any{ "type": payloadType, "timestamp": time.Now(), "data": payload, } - err := mapstructure.Decode(structuredPayload, &v) + data, err := json.Marshal(structuredPayload) if err != nil { - return err + return fmt.Errorf("failed to marshal event payload: %w", err) + } + + if len(data) > s.maxPayloadSize { + return fmt.Errorf("event payload too large: %d bytes (max %d)", len(data), s.maxPayloadSize) } now := time.Now() + + // + // We use RawMessage here to avoid a second marshal when GORM persists the JSONType. + // event := models.CanvasEvent{ WorkflowID: s.node.WorkflowID, NodeID: s.node.NodeID, Channel: "default", - Data: datatypes.NewJSONType(v), + Data: datatypes.NewJSONType[any](json.RawMessage(data)), State: models.CanvasEventStatePending, CreatedAt: &now, } diff --git a/pkg/workers/contexts/event_context_test.go b/pkg/workers/contexts/event_context_test.go new file mode 100644 index 0000000000..259967ac42 --- /dev/null +++ b/pkg/workers/contexts/event_context_test.go @@ -0,0 +1,47 @@ +package contexts + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/models" + "github.com/superplanehq/superplane/test/support" + "gorm.io/datatypes" +) + +func Test__EventContext__Emit(t *testing.T) { + r := support.Setup(t) + defer r.Close() + + triggerNodeID := "trigger-1" + canvas, nodes := support.CreateCanvas( + t, + r.Organization.ID, + r.User, + []models.CanvasNode{ + { + NodeID: triggerNodeID, + Name: triggerNodeID, + Type: models.NodeTypeTrigger, + Ref: datatypes.NewJSONType(models.NodeRef{Trigger: &models.TriggerRef{Name: "start"}}), + Configuration: datatypes.NewJSONType(map[string]any{}), + }, + }, + nil, + ) + + t.Run("rejects large payload", func(t *testing.T) { + ctx := NewEventContext(database.Conn(), &nodes[0]) + largePayload := map[string]any{ + "value": strings.Repeat("a", DefaultMaxPayloadSize+100), + } + + err := ctx.Emit("test.payload", largePayload) + require.Error(t, err) + assert.Contains(t, err.Error(), "event payload too large") + support.VerifyCanvasEventsCount(t, canvas.ID, 0) + }) +} diff --git a/pkg/workers/contexts/execution_state_context.go b/pkg/workers/contexts/execution_state_context.go index cc6c692efb..f2db1062e6 100644 --- a/pkg/workers/contexts/execution_state_context.go +++ b/pkg/workers/contexts/execution_state_context.go @@ -1,6 +1,8 @@ package contexts import ( + "encoding/json" + "fmt" "time" "github.com/superplanehq/superplane/pkg/models" @@ -8,12 +10,13 @@ import ( ) type ExecutionStateContext struct { - execution *models.CanvasNodeExecution - tx *gorm.DB + execution *models.CanvasNodeExecution + tx *gorm.DB + maxPayloadSize int } func NewExecutionStateContext(tx *gorm.DB, execution *models.CanvasNodeExecution) *ExecutionStateContext { - return &ExecutionStateContext{tx: tx, execution: execution} + return &ExecutionStateContext{tx: tx, execution: execution, maxPayloadSize: DefaultMaxPayloadSize} } func (s *ExecutionStateContext) IsFinished() bool { @@ -35,11 +38,22 @@ func (s *ExecutionStateContext) Emit(channel, payloadType string, payloads []any } for _, payload := range payloads { - outputs[channel] = append(outputs[channel], map[string]any{ + event := map[string]any{ "type": payloadType, "timestamp": time.Now(), "data": payload, - }) + } + + data, err := json.Marshal(event) + if err != nil { + return fmt.Errorf("failed to marshal payload: %w", err) + } + + if len(data) > s.maxPayloadSize { + return fmt.Errorf("event payload too large: %d bytes (max %d)", len(data), s.maxPayloadSize) + } + + outputs[channel] = append(outputs[channel], json.RawMessage(data)) } _, err := s.execution.PassInTransaction(s.tx, outputs) diff --git a/pkg/workers/contexts/execution_state_context_test.go b/pkg/workers/contexts/execution_state_context_test.go new file mode 100644 index 0000000000..a0cdd522fb --- /dev/null +++ b/pkg/workers/contexts/execution_state_context_test.go @@ -0,0 +1,57 @@ +package contexts + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/models" + "github.com/superplanehq/superplane/test/support" + "gorm.io/datatypes" +) + +func Test__ExecutionStateContext__Emit(t *testing.T) { + r := support.Setup(t) + defer r.Close() + + triggerNodeID := "trigger-1" + componentNodeID := "component-1" + canvas, _ := support.CreateCanvas( + t, + r.Organization.ID, + r.User, + []models.CanvasNode{ + { + NodeID: triggerNodeID, + Name: triggerNodeID, + Type: models.NodeTypeTrigger, + Ref: datatypes.NewJSONType(models.NodeRef{Trigger: &models.TriggerRef{Name: "start"}}), + }, + { + NodeID: componentNodeID, + Name: componentNodeID, + Type: models.NodeTypeComponent, + Ref: datatypes.NewJSONType(models.NodeRef{Component: &models.ComponentRef{Name: "noop"}}), + }, + }, + []models.Edge{ + {SourceID: triggerNodeID, TargetID: componentNodeID, Channel: "default"}, + }, + ) + + t.Run("rejects large payload", func(t *testing.T) { + rootData := map[string]any{"root": "event"} + rootEvent := support.EmitCanvasEventForNodeWithData(t, canvas.ID, triggerNodeID, "default", nil, rootData) + execution := support.CreateCanvasNodeExecution(t, canvas.ID, componentNodeID, rootEvent.ID, rootEvent.ID, nil) + + ctx := NewExecutionStateContext(database.Conn(), execution) + largePayload := strings.Repeat("a", DefaultMaxPayloadSize+100) + + err := ctx.Emit("default", "test.payload", []any{largePayload}) + require.Error(t, err) + assert.Contains(t, err.Error(), "event payload too large") + support.VerifyCanvasNodeEventsCount(t, canvas.ID, componentNodeID, 0) + }) +} diff --git a/test/support/support.go b/test/support/support.go index 151db07fa1..1d06772021 100644 --- a/test/support/support.go +++ b/test/support/support.go @@ -400,6 +400,20 @@ func VerifyCanvasEventsCount(t *testing.T, canvasID uuid.UUID, expected int) { require.Equal(t, expected, int(actual)) } +func VerifyCanvasNodeEventsCount(t *testing.T, canvasID uuid.UUID, nodeID string, expected int) { + var actual int64 + + err := database.Conn(). + Model(&models.CanvasEvent{}). + Where("workflow_id = ?", canvasID). + Where("node_id = ?", nodeID). + Count(&actual). + Error + + require.NoError(t, err) + require.Equal(t, expected, int(actual)) +} + func VerifyNodeExecutionsCount(t *testing.T, workflowID uuid.UUID, expected int) { var actual int64 From 85edb9fa7c2cc985c7ff9696d90b93e3618c2e06 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Sun, 8 Feb 2026 17:42:15 -0300 Subject: [PATCH 024/160] chore: refactor NodeRef structure in Integrations API (#2972) We are still using `workflow_*` in the integration object and also in the integrations page. Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- api/swagger/superplane.swagger.json | 4 +- .../organizations/create_integration.go | 12 ++-- .../organizations/list_integrations.go | 2 +- .../organizations/update_integration.go | 2 +- pkg/models/integration.go | 16 ++--- .../model_integration_node_ref.go | 72 +++++++++---------- pkg/protos/organizations/organizations.pb.go | 24 +++---- protos/organizations.proto | 4 +- web_src/src/api-client/types.gen.ts | 4 +- .../settings/IntegrationDetails.tsx | 24 +++---- 10 files changed, 82 insertions(+), 82 deletions(-) diff --git a/api/swagger/superplane.swagger.json b/api/swagger/superplane.swagger.json index b64aec58ee..5599bd9dd0 100644 --- a/api/swagger/superplane.swagger.json +++ b/api/swagger/superplane.swagger.json @@ -4399,10 +4399,10 @@ "IntegrationNodeRef": { "type": "object", "properties": { - "workflowId": { + "canvasId": { "type": "string" }, - "workflowName": { + "canvasName": { "type": "string" }, "nodeId": { diff --git a/pkg/grpc/actions/organizations/create_integration.go b/pkg/grpc/actions/organizations/create_integration.go index 70b721240c..92c6726d14 100644 --- a/pkg/grpc/actions/organizations/create_integration.go +++ b/pkg/grpc/actions/organizations/create_integration.go @@ -86,7 +86,7 @@ func CreateIntegration(ctx context.Context, registry *registry.Registry, oidcPro } } - proto, err := serializeIntegration(registry, newIntegration, []models.WorkflowNodeReference{}) + proto, err := serializeIntegration(registry, newIntegration, []models.CanvasNodeReference{}) if err != nil { return nil, status.Errorf(codes.Internal, "failed to serialize integration: %v", err) } @@ -96,7 +96,7 @@ func CreateIntegration(ctx context.Context, registry *registry.Registry, oidcPro }, nil } -func serializeIntegration(registry *registry.Registry, instance *models.Integration, nodeRefs []models.WorkflowNodeReference) (*pb.Integration, error) { +func serializeIntegration(registry *registry.Registry, instance *models.Integration, nodeRefs []models.CanvasNodeReference) (*pb.Integration, error) { integration, err := registry.GetIntegration(instance.AppName) if err != nil { return nil, err @@ -144,10 +144,10 @@ func serializeIntegration(registry *registry.Registry, instance *models.Integrat for _, nodeRef := range nodeRefs { proto.Status.UsedIn = append(proto.Status.UsedIn, &pb.Integration_NodeRef{ - WorkflowId: nodeRef.WorkflowID.String(), - WorkflowName: nodeRef.WorkflowName, - NodeId: nodeRef.NodeID, - NodeName: nodeRef.NodeName, + CanvasId: nodeRef.CanvasID.String(), + CanvasName: nodeRef.CanvasName, + NodeId: nodeRef.NodeID, + NodeName: nodeRef.NodeName, }) } diff --git a/pkg/grpc/actions/organizations/list_integrations.go b/pkg/grpc/actions/organizations/list_integrations.go index aa5c7cfe4c..e579d78bc2 100644 --- a/pkg/grpc/actions/organizations/list_integrations.go +++ b/pkg/grpc/actions/organizations/list_integrations.go @@ -17,7 +17,7 @@ func ListIntegrations(ctx context.Context, registry *registry.Registry, orgID st protos := []*pb.Integration{} for _, integration := range integrations { - proto, err := serializeIntegration(registry, &integration, []models.WorkflowNodeReference{}) + proto, err := serializeIntegration(registry, &integration, []models.CanvasNodeReference{}) if err != nil { return nil, err } diff --git a/pkg/grpc/actions/organizations/update_integration.go b/pkg/grpc/actions/organizations/update_integration.go index fdba80d3fe..b8437c0bb5 100644 --- a/pkg/grpc/actions/organizations/update_integration.go +++ b/pkg/grpc/actions/organizations/update_integration.go @@ -83,7 +83,7 @@ func UpdateIntegration(ctx context.Context, registry *registry.Registry, oidcPro return nil, status.Error(codes.Internal, "failed to save integration") } - proto, err := serializeIntegration(registry, instance, []models.WorkflowNodeReference{}) + proto, err := serializeIntegration(registry, instance, []models.CanvasNodeReference{}) if err != nil { log.Errorf("failed to serialize integration %s: %v", instance.ID, err) return nil, status.Error(codes.Internal, "failed to serialize integration") diff --git a/pkg/models/integration.go b/pkg/models/integration.go index 67d7d4e128..d033ac7232 100644 --- a/pkg/models/integration.go +++ b/pkg/models/integration.go @@ -113,19 +113,19 @@ func ListUnscopedIntegrationWebhooks(tx *gorm.DB, integrationID uuid.UUID) ([]We return webhooks, nil } -type WorkflowNodeReference struct { - WorkflowID uuid.UUID - WorkflowName string - NodeID string - NodeName string +type CanvasNodeReference struct { + CanvasID uuid.UUID + CanvasName string + NodeID string + NodeName string } -func ListIntegrationNodeReferences(integrationID uuid.UUID) ([]WorkflowNodeReference, error) { - var nodeReferences []WorkflowNodeReference +func ListIntegrationNodeReferences(integrationID uuid.UUID) ([]CanvasNodeReference, error) { + var nodeReferences []CanvasNodeReference err := database.Conn(). Table("workflow_nodes AS wn"). Joins("JOIN workflows AS w ON w.id = wn.workflow_id"). - Select("w.id as workflow_id, w.name as workflow_name, wn.node_id as node_id, wn.name as node_name"). + Select("w.id as canvas_id, w.name as canvas_name, wn.node_id as node_id, wn.name as node_name"). Where("wn.app_installation_id = ?", integrationID). Where("wn.deleted_at IS NULL"). Find(&nodeReferences). diff --git a/pkg/openapi_client/model_integration_node_ref.go b/pkg/openapi_client/model_integration_node_ref.go index edd34dea25..bcca759c26 100644 --- a/pkg/openapi_client/model_integration_node_ref.go +++ b/pkg/openapi_client/model_integration_node_ref.go @@ -20,10 +20,10 @@ var _ MappedNullable = &IntegrationNodeRef{} // IntegrationNodeRef struct for IntegrationNodeRef type IntegrationNodeRef struct { - WorkflowId *string `json:"workflowId,omitempty"` - WorkflowName *string `json:"workflowName,omitempty"` - NodeId *string `json:"nodeId,omitempty"` - NodeName *string `json:"nodeName,omitempty"` + CanvasId *string `json:"canvasId,omitempty"` + CanvasName *string `json:"canvasName,omitempty"` + NodeId *string `json:"nodeId,omitempty"` + NodeName *string `json:"nodeName,omitempty"` } // NewIntegrationNodeRef instantiates a new IntegrationNodeRef object @@ -43,68 +43,68 @@ func NewIntegrationNodeRefWithDefaults() *IntegrationNodeRef { return &this } -// GetWorkflowId returns the WorkflowId field value if set, zero value otherwise. -func (o *IntegrationNodeRef) GetWorkflowId() string { - if o == nil || IsNil(o.WorkflowId) { +// GetCanvasId returns the CanvasId field value if set, zero value otherwise. +func (o *IntegrationNodeRef) GetCanvasId() string { + if o == nil || IsNil(o.CanvasId) { var ret string return ret } - return *o.WorkflowId + return *o.CanvasId } -// GetWorkflowIdOk returns a tuple with the WorkflowId field value if set, nil otherwise +// GetCanvasIdOk returns a tuple with the CanvasId field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *IntegrationNodeRef) GetWorkflowIdOk() (*string, bool) { - if o == nil || IsNil(o.WorkflowId) { +func (o *IntegrationNodeRef) GetCanvasIdOk() (*string, bool) { + if o == nil || IsNil(o.CanvasId) { return nil, false } - return o.WorkflowId, true + return o.CanvasId, true } -// HasWorkflowId returns a boolean if a field has been set. -func (o *IntegrationNodeRef) HasWorkflowId() bool { - if o != nil && !IsNil(o.WorkflowId) { +// HasCanvasId returns a boolean if a field has been set. +func (o *IntegrationNodeRef) HasCanvasId() bool { + if o != nil && !IsNil(o.CanvasId) { return true } return false } -// SetWorkflowId gets a reference to the given string and assigns it to the WorkflowId field. -func (o *IntegrationNodeRef) SetWorkflowId(v string) { - o.WorkflowId = &v +// SetCanvasId gets a reference to the given string and assigns it to the CanvasId field. +func (o *IntegrationNodeRef) SetCanvasId(v string) { + o.CanvasId = &v } -// GetWorkflowName returns the WorkflowName field value if set, zero value otherwise. -func (o *IntegrationNodeRef) GetWorkflowName() string { - if o == nil || IsNil(o.WorkflowName) { +// GetCanvasName returns the CanvasName field value if set, zero value otherwise. +func (o *IntegrationNodeRef) GetCanvasName() string { + if o == nil || IsNil(o.CanvasName) { var ret string return ret } - return *o.WorkflowName + return *o.CanvasName } -// GetWorkflowNameOk returns a tuple with the WorkflowName field value if set, nil otherwise +// GetCanvasNameOk returns a tuple with the CanvasName field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *IntegrationNodeRef) GetWorkflowNameOk() (*string, bool) { - if o == nil || IsNil(o.WorkflowName) { +func (o *IntegrationNodeRef) GetCanvasNameOk() (*string, bool) { + if o == nil || IsNil(o.CanvasName) { return nil, false } - return o.WorkflowName, true + return o.CanvasName, true } -// HasWorkflowName returns a boolean if a field has been set. -func (o *IntegrationNodeRef) HasWorkflowName() bool { - if o != nil && !IsNil(o.WorkflowName) { +// HasCanvasName returns a boolean if a field has been set. +func (o *IntegrationNodeRef) HasCanvasName() bool { + if o != nil && !IsNil(o.CanvasName) { return true } return false } -// SetWorkflowName gets a reference to the given string and assigns it to the WorkflowName field. -func (o *IntegrationNodeRef) SetWorkflowName(v string) { - o.WorkflowName = &v +// SetCanvasName gets a reference to the given string and assigns it to the CanvasName field. +func (o *IntegrationNodeRef) SetCanvasName(v string) { + o.CanvasName = &v } // GetNodeId returns the NodeId field value if set, zero value otherwise. @@ -181,11 +181,11 @@ func (o IntegrationNodeRef) MarshalJSON() ([]byte, error) { func (o IntegrationNodeRef) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if !IsNil(o.WorkflowId) { - toSerialize["workflowId"] = o.WorkflowId + if !IsNil(o.CanvasId) { + toSerialize["canvasId"] = o.CanvasId } - if !IsNil(o.WorkflowName) { - toSerialize["workflowName"] = o.WorkflowName + if !IsNil(o.CanvasName) { + toSerialize["canvasName"] = o.CanvasName } if !IsNil(o.NodeId) { toSerialize["nodeId"] = o.NodeId diff --git a/pkg/protos/organizations/organizations.pb.go b/pkg/protos/organizations/organizations.pb.go index 1e2fb35dec..d8ea2bec63 100644 --- a/pkg/protos/organizations/organizations.pb.go +++ b/pkg/protos/organizations/organizations.pb.go @@ -2388,8 +2388,8 @@ func (x *Integration_Status) GetUsedIn() []*Integration_NodeRef { type Integration_NodeRef struct { state protoimpl.MessageState `protogen:"open.v1"` - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - WorkflowName string `protobuf:"bytes,2,opt,name=workflow_name,json=workflowName,proto3" json:"workflow_name,omitempty"` + CanvasId string `protobuf:"bytes,1,opt,name=canvas_id,json=canvasId,proto3" json:"canvas_id,omitempty"` + CanvasName string `protobuf:"bytes,2,opt,name=canvas_name,json=canvasName,proto3" json:"canvas_name,omitempty"` NodeId string `protobuf:"bytes,3,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"` NodeName string `protobuf:"bytes,4,opt,name=node_name,json=nodeName,proto3" json:"node_name,omitempty"` unknownFields protoimpl.UnknownFields @@ -2426,16 +2426,16 @@ func (*Integration_NodeRef) Descriptor() ([]byte, []int) { return file_organizations_proto_rawDescGZIP(), []int{36, 3} } -func (x *Integration_NodeRef) GetWorkflowId() string { +func (x *Integration_NodeRef) GetCanvasId() string { if x != nil { - return x.WorkflowId + return x.CanvasId } return "" } -func (x *Integration_NodeRef) GetWorkflowName() string { +func (x *Integration_NodeRef) GetCanvasName() string { if x != nil { - return x.WorkflowName + return x.CanvasName } return "" } @@ -2574,7 +2574,7 @@ const file_organizations_proto_rawDesc = "" + "\x18DeleteIntegrationRequest\x12\x0e\n" + "\x02id\x18\x01 \x01(\tR\x02id\x12%\n" + "\x0eintegration_id\x18\x02 \x01(\tR\rintegrationId\"\x1b\n" + - "\x19DeleteIntegrationResponse\"\x9b\a\n" + + "\x19DeleteIntegrationResponse\"\x92\a\n" + "\vIntegration\x12J\n" + "\bmetadata\x18\x01 \x01(\v2..Superplane.Organizations.Integration.MetadataR\bmetadata\x12>\n" + "\x04spec\x18\x02 \x01(\v2*.Superplane.Organizations.Integration.SpecR\x04spec\x12D\n" + @@ -2594,11 +2594,11 @@ const file_organizations_proto_rawDesc = "" + "\x11state_description\x18\x02 \x01(\tR\x10stateDescription\x123\n" + "\bmetadata\x18\x03 \x01(\v2\x17.google.protobuf.StructR\bmetadata\x12N\n" + "\x0ebrowser_action\x18\x04 \x01(\v2'.Superplane.Organizations.BrowserActionR\rbrowserAction\x12F\n" + - "\aused_in\x18\x05 \x03(\v2-.Superplane.Organizations.Integration.NodeRefR\x06usedIn\x1a\x85\x01\n" + - "\aNodeRef\x12\x1f\n" + - "\vworkflow_id\x18\x01 \x01(\tR\n" + - "workflowId\x12#\n" + - "\rworkflow_name\x18\x02 \x01(\tR\fworkflowName\x12\x17\n" + + "\aused_in\x18\x05 \x03(\v2-.Superplane.Organizations.Integration.NodeRefR\x06usedIn\x1a}\n" + + "\aNodeRef\x12\x1b\n" + + "\tcanvas_id\x18\x01 \x01(\tR\bcanvasId\x12\x1f\n" + + "\vcanvas_name\x18\x02 \x01(\tR\n" + + "canvasName\x12\x17\n" + "\anode_id\x18\x03 \x01(\tR\x06nodeId\x12\x1b\n" + "\tnode_name\x18\x04 \x01(\tR\bnodeName\"\xf4\x01\n" + "\rBrowserAction\x12\x10\n" + diff --git a/protos/organizations.proto b/protos/organizations.proto index 150024da8f..454cfc290f 100644 --- a/protos/organizations.proto +++ b/protos/organizations.proto @@ -410,8 +410,8 @@ message Integration { } message NodeRef { - string workflow_id = 1; - string workflow_name = 2; + string canvas_id = 1; + string canvas_name = 2; string node_id = 3; string node_name = 4; } diff --git a/web_src/src/api-client/types.gen.ts b/web_src/src/api-client/types.gen.ts index fc25d1716a..be271040a7 100644 --- a/web_src/src/api-client/types.gen.ts +++ b/web_src/src/api-client/types.gen.ts @@ -577,8 +577,8 @@ export type GroupsUpdateGroupResponse = { }; export type IntegrationNodeRef = { - workflowId?: string; - workflowName?: string; + canvasId?: string; + canvasName?: string; nodeId?: string; nodeName?: string; }; diff --git a/web_src/src/pages/organization/settings/IntegrationDetails.tsx b/web_src/src/pages/organization/settings/IntegrationDetails.tsx index 2c77967219..8711311156 100644 --- a/web_src/src/pages/organization/settings/IntegrationDetails.tsx +++ b/web_src/src/pages/organization/settings/IntegrationDetails.tsx @@ -53,22 +53,22 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) const workflowGroups = useMemo(() => { if (!integration?.status?.usedIn) return []; - const groups = new Map }>(); + const groups = new Map }>(); integration.status.usedIn.forEach((nodeRef) => { - const workflowId = nodeRef.workflowId || ""; - const workflowName = nodeRef.workflowName || workflowId; + const canvasId = nodeRef.canvasId || ""; + const canvasName = nodeRef.canvasName || canvasId; const nodeId = nodeRef.nodeId || ""; const nodeName = nodeRef.nodeName || nodeId; - if (!groups.has(workflowId)) { - groups.set(workflowId, { workflowName, nodes: [] }); + if (!groups.has(canvasId)) { + groups.set(canvasId, { canvasName, nodes: [] }); } - groups.get(workflowId)?.nodes.push({ nodeId, nodeName }); + groups.get(canvasId)?.nodes.push({ nodeId, nodeName }); }); - return Array.from(groups.entries()).map(([workflowId, data]) => ({ - workflowId, - workflowName: data.workflowName, + return Array.from(groups.entries()).map(([canvasId, data]) => ({ + canvasId, + canvasName: data.canvasName, nodes: data.nodes, })); }, [integration?.status?.usedIn]); @@ -248,13 +248,13 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps)
{workflowGroups.map((group) => ( - ))} -
- - ) : ( -

- This integration is not used in any workflow yet. -

- )} - - + {integration?.status?.browserAction && ( + + )} - {/* Danger Zone */} -
-
-

Danger Zone

-

- Once you delete this integration, all its data will be permanently deleted. This action cannot be - undone. -

+
+
+

Configuration

+ {integrationDef?.configuration && integrationDef.configuration.length > 0 ? ( - +
+ {integrationDef.configuration.map((field: ConfigurationField) => ( + setConfigValues({ ...configValues, [field.name!]: value })} + allValues={configValues} + domainId={organizationId} + domainType="DOMAIN_TYPE_ORGANIZATION" + organizationId={organizationId} + appInstallationId={integration?.metadata?.id} + /> + ))} + +
+ + {updateMutation.isError && ( + Failed to update configuration + )} +
+
-
+ ) : ( +

No configuration fields available.

+ )}
- +
- -
-
- {integration?.status?.browserAction && ( - - )} +
+
+

Integration Details

+
+
+

Integration ID

+

{integration.metadata?.id}

+
+
+
+
- {integrationDef?.configuration && integrationDef.configuration.length > 0 ? ( - -
- {integrationDef.configuration.map((field: ConfigurationField) => ( - setConfigValues({ ...configValues, [field.name!]: value })} - allValues={configValues} - domainId={organizationId} - domainType="DOMAIN_TYPE_ORGANIZATION" - organizationId={organizationId} - appInstallationId={integration?.metadata?.id} - /> - ))} + {/* Used By */} +
+
+

Used By

+ {workflowGroups.length > 0 ? ( + <> +

+ This integration is currently used in the following canvases: +

+
+ {workflowGroups.map((group) => ( + + ))} +
+ + ) : ( +

+ This integration is not used in any workflow yet. +

+ )} +
+
-
- - {updateMutation.isSuccess && ( - - Configuration updated successfully! - - )} - {updateMutation.isError && ( - Failed to update configuration - )} -
- -
- ) : ( -

No configuration fields available.

- )} -
+ {/* Danger Zone */} +
+
+

Danger Zone

+

+ Once you delete this integration, all its data will be permanently deleted. This action cannot be undone. +

+ + +
- - +
+
{/* Delete Confirmation Modal */} {showDeleteConfirm && ( From 65ea989bbcbccd9e9ddfe3162b467426c3ee9e81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Mon, 9 Feb 2026 17:45:50 -0300 Subject: [PATCH 030/160] feat: render integration base + 2 components (#2905) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Add a new **Render** integration to SuperPlane with: - API key connection (optional workspace ID) - `On Event` trigger from Render webhooks - `Trigger Deploy` action for Render services ## What’s included - Backend integration (`pkg/integrations/render/*`) - Render API client (services, owners, webhooks, deploy trigger) - Automatic webhook provisioning/cleanup through Render API - Webhook signature verification (`webhook-id`, `webhook-timestamp`, `webhook-signature`) - Frontend mappers + icon + registry wiring for Render components/triggers - New docs page: `docs/components/Render.mdx` - Sidebar order updates for docs components Video showing the implementation https://github.com/user-attachments/assets/41c75657-9f1f-422c-8741-3db31480be32 --------- Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Render.mdx | 173 ++++ pkg/core/integration.go | 6 + pkg/core/trigger.go | 5 + pkg/integrations/github/webhook_handler.go | 4 + pkg/integrations/pagerduty/webhook_handler.go | 4 + pkg/integrations/render/client.go | 583 +++++++++++++ pkg/integrations/render/common.go | 808 ++++++++++++++++++ pkg/integrations/render/common_test.go | 43 + pkg/integrations/render/deploy.go | 513 +++++++++++ pkg/integrations/render/deploy_test.go | 399 +++++++++ pkg/integrations/render/example.go | 50 ++ .../render/example_data_on_build.json | 12 + .../render/example_data_on_deploy.json | 12 + .../render/example_output_deploy.json | 11 + pkg/integrations/render/on_build.go | 119 +++ pkg/integrations/render/on_build_test.go | 198 +++++ pkg/integrations/render/on_deploy.go | 128 +++ pkg/integrations/render/on_deploy_test.go | 402 +++++++++ pkg/integrations/render/render.go | 262 ++++++ pkg/integrations/render/render_test.go | 513 +++++++++++ pkg/integrations/render/webhook_handler.go | 394 +++++++++ pkg/integrations/rootly/webhook_handler.go | 4 + pkg/integrations/semaphore/webhook_handler.go | 4 + pkg/integrations/sendgrid/webhook_handler.go | 4 + pkg/public/server.go | 24 + pkg/registry/webhook_handler.go | 11 + pkg/registry/webhook_handler_test.go | 15 + pkg/server/server.go | 1 + pkg/workers/contexts/integration_context.go | 30 + .../contexts/integration_context_test.go | 104 +++ test/support/application.go | 16 +- .../src/assets/icons/integrations/render.svg | 13 + web_src/src/pages/workflowv2/index.tsx | 2 +- .../mappers/aws/lambda/run_function.ts | 1 + web_src/src/pages/workflowv2/mappers/index.ts | 8 + .../pages/workflowv2/mappers/render/common.ts | 167 ++++ .../pages/workflowv2/mappers/render/deploy.ts | 142 +++ .../pages/workflowv2/mappers/render/index.ts | 17 + .../workflowv2/mappers/render/on_build.ts | 4 + .../workflowv2/mappers/render/on_deploy.ts | 4 + web_src/src/pages/workflowv2/mappers/types.ts | 1 + web_src/src/pages/workflowv2/utils.ts | 13 +- .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + 44 files changed, 5220 insertions(+), 10 deletions(-) create mode 100644 docs/components/Render.mdx create mode 100644 pkg/integrations/render/client.go create mode 100644 pkg/integrations/render/common.go create mode 100644 pkg/integrations/render/common_test.go create mode 100644 pkg/integrations/render/deploy.go create mode 100644 pkg/integrations/render/deploy_test.go create mode 100644 pkg/integrations/render/example.go create mode 100644 pkg/integrations/render/example_data_on_build.json create mode 100644 pkg/integrations/render/example_data_on_deploy.json create mode 100644 pkg/integrations/render/example_output_deploy.json create mode 100644 pkg/integrations/render/on_build.go create mode 100644 pkg/integrations/render/on_build_test.go create mode 100644 pkg/integrations/render/on_deploy.go create mode 100644 pkg/integrations/render/on_deploy_test.go create mode 100644 pkg/integrations/render/render.go create mode 100644 pkg/integrations/render/render_test.go create mode 100644 pkg/integrations/render/webhook_handler.go create mode 100644 web_src/src/assets/icons/integrations/render.svg create mode 100644 web_src/src/pages/workflowv2/mappers/render/common.ts create mode 100644 web_src/src/pages/workflowv2/mappers/render/deploy.ts create mode 100644 web_src/src/pages/workflowv2/mappers/render/index.ts create mode 100644 web_src/src/pages/workflowv2/mappers/render/on_build.ts create mode 100644 web_src/src/pages/workflowv2/mappers/render/on_deploy.ts diff --git a/docs/components/Render.mdx b/docs/components/Render.mdx new file mode 100644 index 0000000000..c4b52c3085 --- /dev/null +++ b/docs/components/Render.mdx @@ -0,0 +1,173 @@ +--- +title: "Render" +--- + +Deploy and manage Render services, and react to Render deploy/build events + +## Triggers + + + + + + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + +## Instructions + +1. **API Key:** Create it in [Render Account Settings -> API Keys](https://dashboard.render.com/u/settings#api-keys). +2. **Workspace (optional):** Use your Render workspace ID (`usr-...` or `tea-...`) or workspace name. Leave empty to use the first workspace available to the API key. +3. **Workspace Plan:** Select **Professional** or **Organization / Enterprise** (used to choose webhook strategy). +4. **Auth:** SuperPlane sends requests to [Render API v1](https://api.render.com/v1/) using `Authorization: Bearer `. +5. **Webhooks:** SuperPlane configures Render webhooks automatically via the [Render Webhooks API](https://render.com/docs/webhooks). No manual setup is required. +6. **Troubleshooting:** Check [Render Dashboard -> Integrations -> Webhooks](https://dashboard.render.com/) and the [Render webhook docs](https://render.com/docs/webhooks). + +Note: **Plan requirement:** Render webhooks require a Professional plan or higher. + + + +## On Build + +The On Build trigger emits build-related Render events for one selected service. + +### Use Cases + +- **Build failure alerts**: Notify your team when builds fail +- **Build success hooks**: Trigger follow-up automation after successful builds + +### Configuration + +- **Service**: Required Render service. +- **Event Types**: Build event states to listen for. Defaults to `build_ended`. + +### Webhook Verification + +Render webhooks are validated using the secret generated when SuperPlane creates the webhook via the Render API. Verification checks: +- `webhook-id` +- `webhook-timestamp` +- `webhook-signature` (`v1,`) + +### Event Data + +The default output emits payload data fields like `buildId`, `eventId`, `serviceId`, `serviceName`, and `status` (when present). + +### Example Data + +```json +{ + "data": { + "buildId": "bld-cukouhrtq21c73e9scng", + "createdAt": "2026-02-05T16:00:00.000000Z", + "eventId": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "failed" + }, + "timestamp": "2026-02-05T16:00:01.000000Z", + "type": "render.build.ended" +} +``` + + + +## On Deploy + +The On Deploy trigger emits deploy-related Render events for one selected service. + +### Use Cases + +- **Deploy notifications**: Notify Slack or PagerDuty when deploys succeed/fail +- **Post-deploy automation**: Trigger smoke tests after successful deploy completion events +- **Release orchestration**: Trigger downstream workflows when deploy stages change + +### Configuration + +- **Service**: Required Render service. +- **Event Types**: Deploy event states to listen for. Defaults to `deploy_ended`. + +### Webhook Verification + +Render webhooks are validated using the secret generated when SuperPlane creates the webhook via the Render API. Verification checks: +- `webhook-id` +- `webhook-timestamp` +- `webhook-signature` (`v1,`) + +### Event Data + +The default output emits payload data fields like `deployId`, `eventId`, `serviceId`, `serviceName`, and `status` (when present). + +### Example Data + +```json +{ + "data": { + "createdAt": "2026-02-05T16:00:00.000000Z", + "deployId": "dep-cukouhrtq21c73e9scng", + "eventId": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded" + }, + "timestamp": "2026-02-05T16:00:01.000000Z", + "type": "render.deploy.ended" +} +``` + + + +## Deploy + +The Deploy component starts a new deploy for a Render service and waits for it to complete. + +### Use Cases + +- **Merge to deploy**: Trigger production deploys after a successful GitHub merge and CI pass +- **Scheduled redeploys**: Redeploy staging services on schedules or external content changes +- **Chained deploys**: Deploy service B when service A finishes successfully + +### How It Works + +1. Triggers a new deploy for the selected Render service via the Render API +2. Waits for the deploy to complete (via deploy_ended webhook and optional polling fallback) +3. Routes execution based on deploy outcome: + - **Success channel**: Deploy completed successfully + - **Failed channel**: Deploy failed or was cancelled + +### Configuration + +- **Service**: Render service to deploy +- **Clear Cache**: Clear build cache before deploying + +### Output Channels + +- **Success**: Emitted when the deploy completes successfully +- **Failed**: Emitted when the deploy fails or is cancelled + +### Notes + +- Uses the existing integration webhook for deploy_ended events (same as On Deploy trigger) +- Falls back to polling if the webhook does not arrive +- Requires a Render API key configured on the integration + +### Example Output + +```json +{ + "data": { + "createdAt": "2026-02-05T16:10:00.000000Z", + "deployId": "dep-cukouhrtq21c73e9scng", + "finishedAt": "2026-02-05T16:15:00.000000Z", + "serviceId": "srv-cukouhrtq21c73e9scng", + "status": "succeeded" + }, + "timestamp": "2026-02-05T16:15:00.000000Z", + "type": "render.deploy.finished" +} +``` + diff --git a/pkg/core/integration.go b/pkg/core/integration.go index 9c61833dfd..11afc97cce 100644 --- a/pkg/core/integration.go +++ b/pkg/core/integration.go @@ -101,6 +101,12 @@ type WebhookHandler interface { * Compare two webhook configurations to see if they are the same. */ CompareConfig(a, b any) (bool, error) + + /* + * Merge an existing webhook configuration with a requested one. + * Return changed=false when no update is needed. + */ + Merge(current, requested any) (merged any, changed bool, err error) } type WebhookHandlerContext struct { diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 7d14a270d4..7f4ef72e65 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -117,12 +117,17 @@ type WebhookRequestContext struct { Configuration any Webhook NodeWebhookContext Events EventContext + Integration IntegrationContext // // Return an execution context for a given execution, // through a referencing key-value pair. // FindExecutionByKV func(key string, value string) (*ExecutionContext, error) + + // Do not make HTTP calls as part of handling the webhook. This is useful for + // retrieving more data that is not part of the webhook payload. + HTTP HTTPContext } type NodeWebhookContext interface { diff --git a/pkg/integrations/github/webhook_handler.go b/pkg/integrations/github/webhook_handler.go index c3c7d0c06f..22378cfcc2 100644 --- a/pkg/integrations/github/webhook_handler.go +++ b/pkg/integrations/github/webhook_handler.go @@ -69,6 +69,10 @@ func (h *GitHubWebhookHandler) CompareConfig(a, b any) (bool, error) { return true, nil } +func (h *GitHubWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} + func (h *GitHubWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { metadata := Metadata{} err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata) diff --git a/pkg/integrations/pagerduty/webhook_handler.go b/pkg/integrations/pagerduty/webhook_handler.go index e0e43842bc..fcf5ac1531 100644 --- a/pkg/integrations/pagerduty/webhook_handler.go +++ b/pkg/integrations/pagerduty/webhook_handler.go @@ -75,6 +75,10 @@ func (h *PagerDutyWebhookHandler) CompareConfig(a, b any) (bool, error) { return true, nil } +func (h *PagerDutyWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} + func (h *PagerDutyWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { client, err := NewClient(ctx.HTTP, ctx.Integration) if err != nil { diff --git a/pkg/integrations/render/client.go b/pkg/integrations/render/client.go new file mode 100644 index 0000000000..4a0f722fa1 --- /dev/null +++ b/pkg/integrations/render/client.go @@ -0,0 +1,583 @@ +package render + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/superplanehq/superplane/pkg/core" +) + +const defaultRenderBaseURL = "https://api.render.com/v1" + +type Client struct { + APIKey string + BaseURL string + http core.HTTPContext +} + +type APIError struct { + StatusCode int + Body string +} + +func (e *APIError) Error() string { + return fmt.Sprintf("request failed with %d: %s", e.StatusCode, e.Body) +} + +type Workspace struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type workspaceWithCursor struct { + Cursor string `json:"cursor"` + // Render docs call this a workspace, but the API response uses the legacy "owner" key. + Workspace Workspace `json:"owner"` +} + +type Service struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type serviceWithCursor struct { + Cursor string `json:"cursor"` + Service Service `json:"service"` +} + +type Webhook struct { + ID string `json:"id"` + WorkspaceID string `json:"ownerId"` + Name string `json:"name"` + URL string `json:"url"` + Enabled bool `json:"enabled"` + EventFilter []string `json:"eventFilter"` + Secret string `json:"secret"` +} + +type webhookWithCursor struct { + Cursor string `json:"cursor"` + Webhook Webhook `json:"webhook"` +} + +type CreateWebhookRequest struct { + WorkspaceID string `json:"ownerId"` + Name string `json:"name"` + URL string `json:"url"` + Enabled bool `json:"enabled"` + EventFilter []string `json:"eventFilter"` +} + +type UpdateWebhookRequest struct { + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` + Enabled bool `json:"enabled"` + EventFilter []string `json:"eventFilter,omitempty"` +} + +type deployRequest struct { + ClearCache string `json:"clearCache"` +} + +type triggerDeployResponse struct { + Deploy DeployResponse `json:"deploy"` +} + +type DeployResponse struct { + ID string `json:"id"` + Status string `json:"status"` + CreatedAt string `json:"createdAt"` + FinishedAt string `json:"finishedAt"` +} + +type EventResponse struct { + ID string `json:"id"` + Timestamp string `json:"timestamp"` + ServiceID string `json:"serviceId"` + Type string `json:"type"` + Details EventResponseDetails `json:"details"` +} + +type EventResponseDetails interface{} + +type EventDeployResponseDetails struct { + DeployID string `json:"deployId"` +} + +type EventBuildResponseDetails struct { + BuildID string `json:"buildId"` +} + +type EventUnknownResponseDetails struct{} + +type EventResponseResourceDetails struct { + ID string `json:"id"` +} + +type eventResponsePayload struct { + ID string `json:"id"` + Timestamp string `json:"timestamp"` + ServiceID string `json:"serviceId"` + Type string `json:"type"` + Details json.RawMessage `json:"details"` +} + +type eventResponseDetailsEnvelope struct { + DeployID string `json:"deployId"` + BuildID string `json:"buildId"` + ID string `json:"id"` + Deploy *EventResponseResourceDetails `json:"deploy"` + Build *EventResponseResourceDetails `json:"build"` +} + +func (r *EventResponse) UnmarshalJSON(data []byte) error { + payload := eventResponsePayload{} + if err := json.Unmarshal(data, &payload); err != nil { + return err + } + + r.ID = payload.ID + r.Timestamp = payload.Timestamp + r.ServiceID = payload.ServiceID + r.Type = payload.Type + r.Details = nil + + if len(payload.Details) == 0 { + return nil + } + + var detailsValue any + if err := json.Unmarshal(payload.Details, &detailsValue); err != nil { + return err + } + if detailsValue == nil { + return nil + } + + details := eventResponseDetailsEnvelope{} + if err := json.Unmarshal(payload.Details, &details); err != nil { + return err + } + + if deployID := resolveDeployID(details, payload.Type); deployID != "" { + r.Details = EventDeployResponseDetails{DeployID: deployID} + return nil + } + + if buildID := resolveBuildID(details, payload.Type); buildID != "" { + r.Details = EventBuildResponseDetails{BuildID: buildID} + return nil + } + + r.Details = EventUnknownResponseDetails{} + + return nil +} + +func resolveDeployID(details eventResponseDetailsEnvelope, eventType string) string { + if details.DeployID != "" { + return details.DeployID + } + + if details.Deploy != nil && details.Deploy.ID != "" { + return details.Deploy.ID + } + + if details.ID != "" && looksLikeDeployEventType(strings.ToLower(eventType)) { + return details.ID + } + + return "" +} + +func resolveBuildID(details eventResponseDetailsEnvelope, eventType string) string { + if details.BuildID != "" { + return details.BuildID + } + + if details.Build != nil && details.Build.ID != "" { + return details.Build.ID + } + + if details.ID != "" && looksLikeBuildEventType(strings.ToLower(eventType)) { + return details.ID + } + + return "" +} + +func looksLikeDeployEventType(eventType string) bool { + return strings.Contains(eventType, "deploy") +} + +func looksLikeBuildEventType(eventType string) bool { + return strings.Contains(eventType, "build") +} + +func NewClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*Client, error) { + if ctx == nil { + return nil, fmt.Errorf("no integration context") + } + + apiKey, err := ctx.GetConfig("apiKey") + if err != nil { + return nil, err + } + + trimmedAPIKey := strings.TrimSpace(string(apiKey)) + if trimmedAPIKey == "" { + return nil, fmt.Errorf("apiKey is required") + } + + return &Client{ + APIKey: trimmedAPIKey, + BaseURL: defaultRenderBaseURL, + http: httpClient, + }, nil +} + +func (c *Client) Verify() error { + query := url.Values{} + query.Set("limit", "1") + _, _, err := c.execRequestWithResponse(http.MethodGet, "/services", query, nil) + return err +} + +func (c *Client) ListWorkspaces() ([]Workspace, error) { + query := url.Values{} + query.Set("limit", "100") + + _, body, err := c.execRequestWithResponse(http.MethodGet, "/owners", query, nil) + if err != nil { + return nil, err + } + + return parseWorkspaces(body) +} + +func (c *Client) ListServices(workspaceID string) ([]Service, error) { + query := url.Values{} + query.Set("limit", "100") + if strings.TrimSpace(workspaceID) != "" { + query.Set("ownerId", strings.TrimSpace(workspaceID)) + } + + _, body, err := c.execRequestWithResponse(http.MethodGet, "/services", query, nil) + if err != nil { + return nil, err + } + + return parseServices(body) +} + +func (c *Client) ListWebhooks(workspaceID string) ([]Webhook, error) { + if workspaceID == "" { + return nil, fmt.Errorf("workspaceID is required") + } + + query := url.Values{} + query.Set("ownerId", workspaceID) + query.Set("limit", "100") + + _, body, err := c.execRequestWithResponse(http.MethodGet, "/webhooks", query, nil) + if err != nil { + return nil, err + } + + return parseWebhooks(body) +} + +func (c *Client) GetWebhook(webhookID string) (*Webhook, error) { + if webhookID == "" { + return nil, fmt.Errorf("webhookID is required") + } + + _, body, err := c.execRequestWithResponse(http.MethodGet, "/webhooks/"+url.PathEscape(webhookID), nil, nil) + if err != nil { + return nil, err + } + + return parseWebhook(body) +} + +func (c *Client) CreateWebhook(request CreateWebhookRequest) (*Webhook, error) { + if request.WorkspaceID == "" { + return nil, fmt.Errorf("workspaceID is required") + } + if request.URL == "" { + return nil, fmt.Errorf("url is required") + } + if request.Name == "" { + return nil, fmt.Errorf("name is required") + } + + _, body, err := c.execRequestWithResponse(http.MethodPost, "/webhooks", nil, request) + if err != nil { + return nil, err + } + + return parseWebhook(body) +} + +func (c *Client) UpdateWebhook(webhookID string, request UpdateWebhookRequest) (*Webhook, error) { + if webhookID == "" { + return nil, fmt.Errorf("webhookID is required") + } + + _, body, err := c.execRequestWithResponse( + http.MethodPatch, + "/webhooks/"+url.PathEscape(webhookID), + nil, + request, + ) + if err != nil { + return nil, err + } + + return parseWebhook(body) +} + +func (c *Client) DeleteWebhook(webhookID string) error { + if webhookID == "" { + return fmt.Errorf("webhookID is required") + } + + _, _, err := c.execRequestWithResponse(http.MethodDelete, "/webhooks/"+url.PathEscape(webhookID), nil, nil) + return err +} + +func (c *Client) TriggerDeploy(serviceID string, clearCache bool) (DeployResponse, error) { + if serviceID == "" { + return DeployResponse{}, fmt.Errorf("serviceID is required") + } + + clearCacheValue := "do_not_clear" + if clearCache { + clearCacheValue = "clear" + } + + _, body, err := c.execRequestWithResponse( + http.MethodPost, + "/services/"+url.PathEscape(serviceID)+"/deploys", + nil, + deployRequest{ClearCache: clearCacheValue}, + ) + if err != nil { + return DeployResponse{}, err + } + + wrappedResponse := triggerDeployResponse{} + if err := json.Unmarshal(body, &wrappedResponse); err == nil && wrappedResponse.Deploy.ID != "" { + return wrappedResponse.Deploy, nil + } + + deployResponse := DeployResponse{} + if err := json.Unmarshal(body, &deployResponse); err != nil { + return DeployResponse{}, fmt.Errorf("failed to unmarshal deploy response: %w", err) + } + + return deployResponse, nil +} + +func (c *Client) GetDeploy(serviceID string, deployID string) (DeployResponse, error) { + if serviceID == "" { + return DeployResponse{}, fmt.Errorf("serviceID is required") + } + if deployID == "" { + return DeployResponse{}, fmt.Errorf("deployID is required") + } + + _, body, err := c.execRequestWithResponse( + http.MethodGet, + "/services/"+url.PathEscape(serviceID)+"/deploys/"+url.PathEscape(deployID), + nil, + nil, + ) + if err != nil { + return DeployResponse{}, err + } + + deployResponse := DeployResponse{} + if err := json.Unmarshal(body, &deployResponse); err != nil { + return DeployResponse{}, fmt.Errorf("failed to unmarshal deploy response: %w", err) + } + return deployResponse, nil +} + +func (c *Client) GetEvent(eventID string) (EventResponse, error) { + if eventID == "" { + return EventResponse{}, fmt.Errorf("eventID is required") + } + + _, body, err := c.execRequestWithResponse( + http.MethodGet, + "/events/"+url.PathEscape(eventID), + nil, + nil, + ) + if err != nil { + return EventResponse{}, err + } + + response := EventResponse{} + if err := json.Unmarshal(body, &response); err != nil { + return EventResponse{}, fmt.Errorf("failed to unmarshal event response: %w", err) + } + + return response, nil +} + +func parseWorkspaces(body []byte) ([]Workspace, error) { + withCursor := []workspaceWithCursor{} + if err := json.Unmarshal(body, &withCursor); err == nil && len(withCursor) > 0 { + return parseWorkspacesWithCursor(withCursor), nil + } + + plainWorkspaces := []Workspace{} + if err := json.Unmarshal(body, &plainWorkspaces); err != nil { + return nil, fmt.Errorf("failed to unmarshal workspaces response: %w", err) + } + + return plainWorkspaces, nil +} + +func parseServices(body []byte) ([]Service, error) { + withCursor := []serviceWithCursor{} + if err := json.Unmarshal(body, &withCursor); err == nil && len(withCursor) > 0 { + return parseServicesWithCursor(withCursor), nil + } + + plainServices := []Service{} + if err := json.Unmarshal(body, &plainServices); err != nil { + return nil, fmt.Errorf("failed to unmarshal services response: %w", err) + } + + return plainServices, nil +} + +func parseWebhooks(body []byte) ([]Webhook, error) { + withCursor := []webhookWithCursor{} + if err := json.Unmarshal(body, &withCursor); err == nil && len(withCursor) > 0 { + return parseWebhooksWithCursor(withCursor), nil + } + + plainWebhooks := []Webhook{} + if err := json.Unmarshal(body, &plainWebhooks); err != nil { + return nil, fmt.Errorf("failed to unmarshal webhooks response: %w", err) + } + + return plainWebhooks, nil +} + +func parseWebhook(body []byte) (*Webhook, error) { + webhook := Webhook{} + if err := json.Unmarshal(body, &webhook); err == nil && webhook.ID != "" { + return &webhook, nil + } + + wrapper := struct { + Webhook Webhook `json:"webhook"` + }{} + if err := json.Unmarshal(body, &wrapper); err != nil { + return nil, fmt.Errorf("failed to unmarshal webhook response: %w", err) + } + + if wrapper.Webhook.ID == "" { + return nil, fmt.Errorf("webhook id is missing in response") + } + + return &wrapper.Webhook, nil +} + +func parseWorkspacesWithCursor(withCursor []workspaceWithCursor) []Workspace { + workspaces := make([]Workspace, 0, len(withCursor)) + for _, item := range withCursor { + if item.Workspace.ID == "" { + continue + } + + workspaces = append(workspaces, item.Workspace) + } + + return workspaces +} + +func parseServicesWithCursor(withCursor []serviceWithCursor) []Service { + services := make([]Service, 0, len(withCursor)) + for _, item := range withCursor { + if item.Service.ID == "" { + continue + } + + services = append(services, item.Service) + } + + return services +} + +func parseWebhooksWithCursor(withCursor []webhookWithCursor) []Webhook { + webhooks := make([]Webhook, 0, len(withCursor)) + for _, item := range withCursor { + if item.Webhook.ID == "" { + continue + } + + webhooks = append(webhooks, item.Webhook) + } + + return webhooks +} + +func (c *Client) execRequestWithResponse( + method string, + path string, + query url.Values, + payload any, +) (*http.Response, []byte, error) { + endpoint := c.BaseURL + path + if len(query) > 0 { + endpoint += "?" + query.Encode() + } + + var body io.Reader + if payload != nil { + encodedBody, err := json.Marshal(payload) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal request: %w", err) + } + body = bytes.NewReader(encodedBody) + } + + req, err := http.NewRequest(method, endpoint, body) + if err != nil { + return nil, nil, fmt.Errorf("failed to build request: %w", err) + } + + req.Header.Set("Authorization", "Bearer "+c.APIKey) + req.Header.Set("Accept", "application/json") + if payload != nil { + req.Header.Set("Content-Type", "application/json") + } + + res, err := c.http.Do(req) + if err != nil { + return nil, nil, fmt.Errorf("request failed: %w", err) + } + defer res.Body.Close() + + responseBody, err := io.ReadAll(res.Body) + if err != nil { + return nil, nil, fmt.Errorf("failed to read response body: %w", err) + } + + if res.StatusCode < http.StatusOK || res.StatusCode >= http.StatusMultipleChoices { + return nil, nil, &APIError{StatusCode: res.StatusCode, Body: string(responseBody)} + } + + return res, responseBody, nil +} diff --git a/pkg/integrations/render/common.go b/pkg/integrations/render/common.go new file mode 100644 index 0000000000..7c95ccec4b --- /dev/null +++ b/pkg/integrations/render/common.go @@ -0,0 +1,808 @@ +package render + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "slices" + "sort" + "strconv" + "strings" + "time" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnResourceEventConfiguration struct { + Service string `json:"service" mapstructure:"service"` + EventTypes []string `json:"eventTypes" mapstructure:"eventTypes"` +} + +type ServiceMetadata struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type OnResourceEventMetadata struct { + Service *ServiceMetadata `json:"service"` +} + +type WebhookConfiguration struct { + Strategy string `json:"strategy" mapstructure:"strategy"` + ResourceType string `json:"resourceType,omitempty" mapstructure:"resourceType"` + EventTypes []string `json:"eventTypes,omitempty" mapstructure:"eventTypes"` +} + +const ( + workspacePlanProfessional = "professional" + workspacePlanOrganization = "organization" + + webhookStrategyIntegration = "integration" + webhookStrategyResourceType = "resource_type" + + webhookResourceTypeDeploy = "deploy" + webhookResourceTypeBuild = "build" + + webhookTimestampMaxSkew = 5 * time.Minute +) + +func webhookConfigurationForResource( + integration core.IntegrationContext, + resourceType string, + eventTypes []string, +) WebhookConfiguration { + metadata := Metadata{} + if err := mapstructure.Decode(integration.GetMetadata(), &metadata); err != nil { + return WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: normalizeWebhookEventTypes(eventTypes), + } + } + + workspacePlan := metadata.workspacePlan() + if workspacePlan != workspacePlanOrganization { + return WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: normalizeWebhookEventTypes(eventTypes), + } + } + + normalizedResourceType := strings.ToLower(strings.TrimSpace(resourceType)) + switch normalizedResourceType { + case webhookResourceTypeDeploy, webhookResourceTypeBuild: + return WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: normalizedResourceType, + EventTypes: normalizeWebhookEventTypes(eventTypes), + } + default: + return WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: normalizeWebhookEventTypes(eventTypes), + } + } +} + +func decodeWebhookConfiguration(configuration any) (WebhookConfiguration, error) { + webhookConfiguration := WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + } + + if configuration == nil { + return webhookConfiguration, nil + } + + if err := mapstructure.Decode(configuration, &webhookConfiguration); err != nil { + return WebhookConfiguration{}, err + } + + return normalizeWebhookConfiguration(webhookConfiguration), nil +} + +func normalizeWebhookConfiguration(configuration WebhookConfiguration) WebhookConfiguration { + normalizedConfiguration := WebhookConfiguration{ + Strategy: strings.ToLower(strings.TrimSpace(configuration.Strategy)), + ResourceType: strings.ToLower(strings.TrimSpace(configuration.ResourceType)), + EventTypes: normalizeWebhookEventTypes(configuration.EventTypes), + } + + if normalizedConfiguration.Strategy == "" { + if normalizedConfiguration.ResourceType == "" { + normalizedConfiguration.Strategy = webhookStrategyIntegration + } else { + normalizedConfiguration.Strategy = webhookStrategyResourceType + } + } + + if normalizedConfiguration.Strategy != webhookStrategyResourceType { + return WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: normalizedConfiguration.EventTypes, + } + } + + switch normalizedConfiguration.ResourceType { + case webhookResourceTypeDeploy, webhookResourceTypeBuild: + return normalizedConfiguration + default: + return WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: normalizedConfiguration.EventTypes, + } + } +} + +func webhookName(configuration WebhookConfiguration) string { + configuration = normalizeWebhookConfiguration(configuration) + if configuration.Strategy == webhookStrategyResourceType && + configuration.ResourceType == webhookResourceTypeDeploy { + return "SuperPlane Deploy" + } + + if configuration.Strategy == webhookStrategyResourceType && + configuration.ResourceType == webhookResourceTypeBuild { + return "SuperPlane Build" + } + + return "SuperPlane" +} + +func webhookEventFilter(configuration WebhookConfiguration) []string { + configuration = normalizeWebhookConfiguration(configuration) + allowedEventTypes := allowedEventTypesForWebhook(configuration) + requestedEventTypes := filterAllowedEventTypes(configuration.EventTypes, allowedEventTypes) + if len(requestedEventTypes) > 0 { + return requestedEventTypes + } + + defaultEventTypes := defaultEventTypesForWebhook(configuration) + if len(defaultEventTypes) > 0 { + return defaultEventTypes + } + + return allowedEventTypes +} + +func combineDeployAndBuildEventTypes(deploy, build []string) []string { + out := make([]string, 0, len(deploy)+len(build)) + out = append(out, deploy...) + out = append(out, build...) + return normalizeWebhookEventTypes(out) +} + +func allowedEventTypesForWebhook(configuration WebhookConfiguration) []string { + if configuration.Strategy == webhookStrategyResourceType { + switch configuration.ResourceType { + case webhookResourceTypeDeploy: + return deployAllowedEventTypes + case webhookResourceTypeBuild: + return buildAllowedEventTypes + } + } + return combineDeployAndBuildEventTypes(deployAllowedEventTypes, buildAllowedEventTypes) +} + +func defaultEventTypesForWebhook(configuration WebhookConfiguration) []string { + if configuration.Strategy == webhookStrategyResourceType { + switch configuration.ResourceType { + case webhookResourceTypeDeploy: + return normalizeWebhookEventTypes(deployDefaultEventTypes) + case webhookResourceTypeBuild: + return normalizeWebhookEventTypes(buildDefaultEventTypes) + } + } + return combineDeployAndBuildEventTypes(deployDefaultEventTypes, buildDefaultEventTypes) +} + +func webhookConfigurationsEqual(a, b WebhookConfiguration) bool { + normalizedA := normalizeWebhookConfiguration(a) + normalizedB := normalizeWebhookConfiguration(b) + + return normalizedA.Strategy == normalizedB.Strategy && + normalizedA.ResourceType == normalizedB.ResourceType && + slices.Equal(normalizedA.EventTypes, normalizedB.EventTypes) +} + +func onResourceEventConfigurationFields( + eventTypeOptions []configuration.FieldOption, + defaultEventTypes []string, +) []configuration.Field { + return []configuration.Field{ + { + Name: "service", + Label: "Service", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "service", + }, + }, + Description: "Render service to listen to", + }, + { + Name: "eventTypes", + Label: "Event Types", + Type: configuration.FieldTypeMultiSelect, + Required: false, + Default: defaultEventTypes, + Description: "Render event types to listen for", + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: eventTypeOptions, + }, + }, + }, + } +} + +func decodeOnResourceEventConfiguration(configuration any) (OnResourceEventConfiguration, error) { + config := OnResourceEventConfiguration{} + if err := mapstructure.Decode(configuration, &config); err != nil { + return config, err + } + + config.Service = strings.TrimSpace(config.Service) + config.EventTypes = normalizeWebhookEventTypes(config.EventTypes) + return config, nil +} + +func ensureServiceInMetadata(ctx core.TriggerContext, config OnResourceEventConfiguration) error { + serviceValue := strings.TrimSpace(config.Service) + if serviceValue == "" { + return fmt.Errorf("service is required") + } + + nodeMetadata := OnResourceEventMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &nodeMetadata); err != nil { + return fmt.Errorf("failed to decode node metadata: %w", err) + } + + if nodeMetadata.Service != nil && + (nodeMetadata.Service.ID == serviceValue || nodeMetadata.Service.Name == serviceValue) { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + workspaceID, err := workspaceIDForIntegration(client, ctx.Integration) + if err != nil { + return err + } + + services, err := client.ListServices(workspaceID) + if err != nil { + return fmt.Errorf("failed to list Render services: %w", err) + } + + service := findService(services, serviceValue) + if service == nil { + return fmt.Errorf("service %s is not accessible with this API key", serviceValue) + } + + return ctx.Metadata.Set(OnResourceEventMetadata{ + Service: &ServiceMetadata{ + ID: service.ID, + Name: service.Name, + }, + }) +} + +func findService(services []Service, value string) *Service { + trimmedValue := strings.TrimSpace(value) + if trimmedValue == "" { + return nil + } + + idIndex := slices.IndexFunc(services, func(service Service) bool { + return strings.TrimSpace(service.ID) == trimmedValue + }) + if idIndex >= 0 { + return &services[idIndex] + } + + nameIndex := slices.IndexFunc(services, func(service Service) bool { + return strings.EqualFold(strings.TrimSpace(service.Name), trimmedValue) + }) + if nameIndex < 0 { + return nil + } + + return &services[nameIndex] +} + +func handleOnResourceEventWebhook( + ctx core.WebhookRequestContext, + config OnResourceEventConfiguration, + allowedEventTypes []string, + defaultEventTypes []string, + requiredResourceIDField string, +) (int, error) { + if err := verifyWebhookSignature(ctx); err != nil { + return http.StatusForbidden, err + } + + payload := map[string]any{} + if err := json.Unmarshal(ctx.Body, &payload); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %w", err) + } + + eventType := readString(payload["type"]) + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing event type") + } + + if !slices.Contains(allowedEventTypes, eventType) { + return http.StatusOK, nil + } + + data := readMap(payload["data"]) + if readString(data["serviceId"]) == "" { + serviceID := readString(payload["serviceId"]) + if serviceID != "" { + data["serviceId"] = serviceID + } + } + + eventID := eventIDFromWebhookPayload(payload, data) + resolvedEvent, err := resolveWebhookEvent(ctx, eventID) + if err == nil { + data = mergeWebhookEventData(data, eventID, resolvedEvent) + } + + serviceID := readString(data["serviceId"]) + if config.Service == "" || serviceID == "" || config.Service != serviceID { + return http.StatusOK, nil + } + + selectedEventTypes := filterAllowedEventTypes(config.EventTypes, allowedEventTypes) + if len(selectedEventTypes) == 0 { + selectedEventTypes = defaultEventTypes + } + + if !slices.Contains(selectedEventTypes, eventType) { + return http.StatusOK, nil + } + + ensureResourceIDField(data, eventID, requiredResourceIDField) + removeAmbiguousIDField(data, eventID, requiredResourceIDField) + + if err := ctx.Events.Emit(payloadType(eventType), data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %w", err) + } + + return http.StatusOK, nil +} + +func eventIDFromWebhookPayload(payload map[string]any, data map[string]any) string { + eventID := readString(payload["id"]) + if eventID != "" { + return eventID + } + + return readString(data["id"]) +} + +func resolveWebhookEvent(ctx core.WebhookRequestContext, eventID string) (EventResponse, error) { + if eventID == "" || ctx.Integration == nil || ctx.HTTP == nil { + return EventResponse{}, nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return EventResponse{}, err + } + + return client.GetEvent(eventID) +} + +type EventDetailValues struct { + DeployID string + BuildID string +} + +func eventDetailValues(event EventResponse) EventDetailValues { + if event.Details == nil { + return EventDetailValues{} + } + + values := EventDetailValues{} + + switch details := event.Details.(type) { + case EventDeployResponseDetails: + values.DeployID = readString(details.DeployID) + case EventBuildResponseDetails: + values.BuildID = readString(details.BuildID) + } + + return values +} + +func mergeWebhookEventData( + data map[string]any, + eventID string, + event EventResponse, +) map[string]any { + mergedData := map[string]any{} + for key, value := range data { + mergedData[key] = value + } + + if readString(mergedData["serviceId"]) == "" { + serviceID := readString(event.ServiceID) + if serviceID != "" { + mergedData["serviceId"] = serviceID + } + } + + if event.Details == nil { + return mergedData + } + + detailValues := eventDetailValues(event) + resourceID := detailValues.DeployID + if resourceID == "" { + resourceID = detailValues.BuildID + } + if resourceID == "" { + return mergedData + } + + if readString(mergedData["deployId"]) == "" && detailValues.DeployID != "" { + mergedData["deployId"] = detailValues.DeployID + } + + if readString(mergedData["buildId"]) == "" && detailValues.BuildID != "" { + mergedData["buildId"] = detailValues.BuildID + } + + if shouldReplaceResourceID(readString(mergedData["id"]), eventID) && eventID != "" { + mergedData["eventId"] = eventID + } + + return mergedData +} + +func shouldReplaceResourceID(currentID string, eventID string) bool { + if currentID == "" { + return true + } + + if eventID == "" { + return false + } + + return currentID == eventID +} + +func ensureResourceIDField( + data map[string]any, + eventID string, + requiredField string, +) { + field := requiredField + if field == "" { + return + } + + if readString(data[field]) != "" { + return + } + + resourceID := readString(data["id"]) + if resourceID == "" { + return + } + + if eventID != "" && resourceID == eventID { + return + } + + data[field] = resourceID +} + +func removeAmbiguousIDField(data map[string]any, eventID string, requiredField string) { + if requiredField == "" { + return + } + + if readString(data["eventId"]) == "" && eventID != "" { + data["eventId"] = eventID + } + + delete(data, "id") +} + +func filterAllowedEventTypes(eventTypes []string, allowedEventTypes []string) []string { + filteredEventTypes := make([]string, 0, len(eventTypes)) + for _, eventType := range eventTypes { + if !slices.Contains(allowedEventTypes, eventType) { + continue + } + + if slices.Contains(filteredEventTypes, eventType) { + continue + } + + filteredEventTypes = append(filteredEventTypes, eventType) + } + + return filteredEventTypes +} + +func normalizeWebhookEventTypes(eventTypes []string) []string { + normalizedEventTypes := make([]string, 0, len(eventTypes)) + for _, eventType := range eventTypes { + normalizedEventType := strings.ToLower(strings.TrimSpace(eventType)) + if normalizedEventType == "" { + continue + } + + if slices.Contains(normalizedEventTypes, normalizedEventType) { + continue + } + + normalizedEventTypes = append(normalizedEventTypes, normalizedEventType) + } + + sort.Strings(normalizedEventTypes) + return normalizedEventTypes +} + +func verifyWebhookSignature(ctx core.WebhookRequestContext) error { + if ctx.Webhook == nil { + return fmt.Errorf("missing webhook context") + } + + secret, err := ctx.Webhook.GetSecret() + if err != nil { + return fmt.Errorf("error reading webhook secret") + } + + if len(secret) == 0 { + return fmt.Errorf("missing webhook secret") + } + + webhookID := strings.TrimSpace(ctx.Headers.Get("webhook-id")) + webhookTimestamp := strings.TrimSpace(ctx.Headers.Get("webhook-timestamp")) + signatureHeader := strings.TrimSpace(ctx.Headers.Get("webhook-signature")) + + if webhookID == "" || webhookTimestamp == "" || signatureHeader == "" { + return fmt.Errorf("missing signature headers") + } + + timestamp, err := parseWebhookTimestamp(webhookTimestamp) + if err != nil { + return fmt.Errorf("invalid webhook timestamp") + } + + if absDuration(time.Now().UTC().Sub(timestamp)) > webhookTimestampMaxSkew { + return fmt.Errorf("webhook timestamp expired") + } + + signatures, err := parseWebhookSignatures(signatureHeader) + if err != nil { + return err + } + + signingKeys := signingKeys(secret) + payloadPrefix := webhookID + "." + webhookTimestamp + "." + secretText := []byte(strings.TrimSpace(string(secret))) + + for _, key := range signingKeys { + h := hmac.New(sha256.New, key) + h.Write([]byte(payloadPrefix)) + h.Write(ctx.Body) + if matchesAnySignature(signatures, h.Sum(nil)) { + return nil + } + + // Compatibility fallback for providers that document signature input as: + // webhook-id.webhook-timestamp.body.webhook-secret + h = hmac.New(sha256.New, key) + h.Write([]byte(payloadPrefix)) + h.Write(ctx.Body) + h.Write([]byte(".")) + h.Write(secretText) + if matchesAnySignature(signatures, h.Sum(nil)) { + return nil + } + } + + return fmt.Errorf("invalid signature") +} + +func parseWebhookSignatures(headerValue string) ([][]byte, error) { + trimmed := strings.TrimSpace(headerValue) + if trimmed == "" { + return nil, fmt.Errorf("invalid signature header") + } + + // Format follows Standard Webhooks header values and may include multiple signatures, + // e.g. "v1," or "v1, v1,". + rawEntries := strings.Fields(trimmed) + if len(rawEntries) == 0 { + rawEntries = []string{trimmed} + } + + signatures := make([][]byte, 0, len(rawEntries)) + seen := map[string]struct{}{} + for _, entry := range rawEntries { + parts := strings.Split(strings.TrimSpace(entry), ",") + if len(parts) < 2 { + continue + } + + version := strings.TrimSpace(parts[0]) + if version != "v1" { + continue + } + + for _, encoded := range parts[1:] { + signature := strings.TrimSpace(encoded) + if signature == "" { + continue + } + + decoded, decodeErr := decodeBase64(signature) + if decodeErr != nil { + continue + } + + key := base64.StdEncoding.EncodeToString(decoded) + if _, exists := seen[key]; exists { + continue + } + + seen[key] = struct{}{} + signatures = append(signatures, decoded) + } + } + + if len(signatures) == 0 { + return nil, fmt.Errorf("invalid signature") + } + + return signatures, nil +} + +func decodeBase64(value string) ([]byte, error) { + decoders := []*base64.Encoding{ + base64.StdEncoding, + base64.RawStdEncoding, + base64.URLEncoding, + base64.RawURLEncoding, + } + + for _, decoder := range decoders { + decoded, err := decoder.DecodeString(value) + if err == nil { + return decoded, nil + } + } + + return nil, fmt.Errorf("failed to decode base64 value") +} + +func signingKeys(secret []byte) [][]byte { + trimmedSecret := strings.TrimSpace(string(secret)) + if trimmedSecret == "" { + return [][]byte{secret} + } + + keys := [][]byte{[]byte(trimmedSecret)} + encodedSecret := trimmedSecret + switch { + case strings.HasPrefix(trimmedSecret, "whsec_"): + encodedSecret = strings.TrimPrefix(trimmedSecret, "whsec_") + case strings.HasPrefix(trimmedSecret, "whsec-"): + encodedSecret = strings.TrimPrefix(trimmedSecret, "whsec-") + default: + return keys + } + + decodedSecret, err := decodeBase64(encodedSecret) + if err != nil || len(decodedSecret) == 0 { + return keys + } + + key := base64.StdEncoding.EncodeToString(decodedSecret) + if slices.ContainsFunc(keys, func(existing []byte) bool { + return base64.StdEncoding.EncodeToString(existing) == key + }) { + return keys + } + + return append(keys, decodedSecret) +} + +func matchesAnySignature(signatures [][]byte, expected []byte) bool { + return slices.ContainsFunc(signatures, func(signature []byte) bool { + return hmac.Equal(signature, expected) + }) +} + +func payloadType(eventType string) string { + trimmedEventType := strings.TrimSpace(eventType) + if trimmedEventType == "" { + return "render.event" + } + + parts := strings.Split(trimmedEventType, "_") + dotCaseParts := make([]string, 0, len(parts)) + for _, part := range parts { + trimmedPart := strings.TrimSpace(part) + if trimmedPart == "" { + continue + } + + dotCaseParts = append(dotCaseParts, strings.ToLower(trimmedPart)) + } + + if len(dotCaseParts) == 0 { + return "render.event" + } + + return "render." + strings.Join(dotCaseParts, ".") +} + +func parseWebhookTimestamp(value string) (time.Time, error) { + trimmedValue := strings.TrimSpace(value) + if trimmedValue == "" { + return time.Time{}, fmt.Errorf("missing timestamp") + } + + seconds, err := strconv.ParseInt(trimmedValue, 10, 64) + if err == nil { + return time.Unix(seconds, 0).UTC(), nil + } + + timestamp, err := time.Parse(time.RFC3339Nano, trimmedValue) + if err == nil { + return timestamp.UTC(), nil + } + + return time.Time{}, fmt.Errorf("invalid timestamp") +} + +func absDuration(value time.Duration) time.Duration { + if value < 0 { + return -value + } + + return value +} + +func readString(value any) string { + if value == nil { + return "" + } + + s, ok := value.(string) + if !ok { + return "" + } + + return strings.TrimSpace(s) +} + +func readMap(value any) map[string]any { + if value == nil { + return map[string]any{} + } + + item, ok := value.(map[string]any) + if !ok { + return map[string]any{} + } + + return item +} diff --git a/pkg/integrations/render/common_test.go b/pkg/integrations/render/common_test.go new file mode 100644 index 0000000000..67f5351f2f --- /dev/null +++ b/pkg/integrations/render/common_test.go @@ -0,0 +1,43 @@ +package render + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "net/http" + "strconv" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func Test__payloadType(t *testing.T) { + assert.Equal(t, "render.build.ended", payloadType("build_ended")) + assert.Equal(t, "render.server.failed", payloadType("server_failed")) + assert.Equal(t, "render.autoscaling.ended", payloadType("autoscaling_ended")) + assert.Equal(t, "render.event", payloadType("")) +} + +func buildSignedHeaders(secret string, body []byte) http.Header { + return buildSignedHeadersWithTimestamp(secret, body, strconv.FormatInt(time.Now().Unix(), 10)) +} + +func buildSignedHeadersWithTimestamp(secret string, body []byte, webhookTimestamp string) http.Header { + webhookID := "msg_2mN8M5S" + + h := hmac.New(sha256.New, []byte(secret)) + h.Write([]byte(webhookID)) + h.Write([]byte(".")) + h.Write([]byte(webhookTimestamp)) + h.Write([]byte(".")) + h.Write(body) + signature := base64.StdEncoding.EncodeToString(h.Sum(nil)) + + headers := http.Header{} + headers.Set("webhook-id", webhookID) + headers.Set("webhook-timestamp", webhookTimestamp) + headers.Set("webhook-signature", "v1,"+signature) + + return headers +} diff --git a/pkg/integrations/render/deploy.go b/pkg/integrations/render/deploy.go new file mode 100644 index 0000000000..c6b5a8e2a6 --- /dev/null +++ b/pkg/integrations/render/deploy.go @@ -0,0 +1,513 @@ +package render + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + "time" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + DeployPayloadType = "render.deploy.finished" + DeploySuccessOutputChannel = "success" + DeployFailedOutputChannel = "failed" + DeployPollInterval = 5 * time.Minute // fallback when deploy_ended webhook doesn't arrive + deployExecutionKey = "deploy_id" +) + +type Deploy struct{} + +type DeployExecutionMetadata struct { + Deploy *DeployMetadata `json:"deploy" mapstructure:"deploy"` +} + +type DeployMetadata struct { + ID string `json:"id"` + Status string `json:"status"` + ServiceID string `json:"serviceId"` + CreatedAt string `json:"createdAt"` + FinishedAt string `json:"finishedAt"` +} + +type DeployConfiguration struct { + Service string `json:"service" mapstructure:"service"` + ClearCache bool `json:"clearCache" mapstructure:"clearCache"` +} + +type deployWebhookPayload struct { + ID string `json:"id"` + Type string `json:"type"` + ServiceID string `json:"serviceId"` + Data map[string]any `json:"data"` +} + +type deployWebhookResult struct { + DeployID string + Status string + ServiceID string + CreatedAt string + FinishedAt string + EventID string +} + +func (c *Deploy) Name() string { + return "render.deploy" +} + +func (c *Deploy) Label() string { + return "Deploy" +} + +func (c *Deploy) Description() string { + return "Trigger a deploy for a Render service and wait for it to complete" +} + +func (c *Deploy) Documentation() string { + return `The Deploy component starts a new deploy for a Render service and waits for it to complete. + +## Use Cases + +- **Merge to deploy**: Trigger production deploys after a successful GitHub merge and CI pass +- **Scheduled redeploys**: Redeploy staging services on schedules or external content changes +- **Chained deploys**: Deploy service B when service A finishes successfully + +## How It Works + +1. Triggers a new deploy for the selected Render service via the Render API +2. Waits for the deploy to complete (via deploy_ended webhook and optional polling fallback) +3. Routes execution based on deploy outcome: + - **Success channel**: Deploy completed successfully + - **Failed channel**: Deploy failed or was cancelled + +## Configuration + +- **Service**: Render service to deploy +- **Clear Cache**: Clear build cache before deploying + +## Output Channels + +- **Success**: Emitted when the deploy completes successfully +- **Failed**: Emitted when the deploy fails or is cancelled + +## Notes + +- Uses the existing integration webhook for deploy_ended events (same as On Deploy trigger) +- Falls back to polling if the webhook does not arrive +- Requires a Render API key configured on the integration` +} + +func (c *Deploy) Icon() string { + return "rocket" +} + +func (c *Deploy) Color() string { + return "gray" +} + +func (c *Deploy) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{ + {Name: DeploySuccessOutputChannel, Label: "Success"}, + {Name: DeployFailedOutputChannel, Label: "Failed"}, + } +} + +func (c *Deploy) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "service", + Label: "Service", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "service", + }, + }, + Description: "Render service to deploy", + }, + { + Name: "clearCache", + Label: "Clear Cache", + Type: configuration.FieldTypeBool, + Required: false, + Default: false, + Description: "Clear build cache before triggering the deploy", + }, + } +} + +func decodeDeployConfiguration(configuration any) (DeployConfiguration, error) { + spec := DeployConfiguration{} + if err := mapstructure.Decode(configuration, &spec); err != nil { + return DeployConfiguration{}, fmt.Errorf("failed to decode configuration: %w", err) + } + + spec.Service = strings.TrimSpace(spec.Service) + if spec.Service == "" { + return DeployConfiguration{}, fmt.Errorf("service is required") + } + + return spec, nil +} + +func (c *Deploy) Setup(ctx core.SetupContext) error { + if _, err := decodeDeployConfiguration(ctx.Configuration); err != nil { + return err + } + + // Request webhook for deploy_ended so this component can receive completion events + ctx.Integration.RequestWebhook(webhookConfigurationForResource( + ctx.Integration, + webhookResourceTypeDeploy, + []string{"deploy_ended"}, + )) + + return nil +} + +func (c *Deploy) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *Deploy) Execute(ctx core.ExecutionContext) error { + spec, err := decodeDeployConfiguration(ctx.Configuration) + if err != nil { + return err + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + deploy, err := client.TriggerDeploy(spec.Service, spec.ClearCache) + if err != nil { + return err + } + + deployID := readString(deploy.ID) + if deployID == "" { + return fmt.Errorf("deploy response missing id") + } + + err = ctx.Metadata.Set(DeployExecutionMetadata{ + Deploy: &DeployMetadata{ + ID: deployID, + Status: readString(deploy.Status), + ServiceID: spec.Service, + CreatedAt: readString(deploy.CreatedAt), + FinishedAt: readString(deploy.FinishedAt), + }, + }) + if err != nil { + return err + } + + if err := ctx.ExecutionState.SetKV(deployExecutionKey, deployID); err != nil { + return err + } + + // Wait for deploy_ended webhook; poll as fallback + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, DeployPollInterval) +} + +func (c *Deploy) Actions() []core.Action { + return []core.Action{ + { + Name: "poll", + UserAccessible: false, + }, + } +} + +func (c *Deploy) HandleAction(ctx core.ActionContext) error { + switch ctx.Name { + case "poll": + return c.poll(ctx) + } + return fmt.Errorf("unknown action: %s", ctx.Name) +} + +func (c *Deploy) poll(ctx core.ActionContext) error { + if ctx.ExecutionState.IsFinished() { + return nil + } + + spec, err := decodeDeployConfiguration(ctx.Configuration) + if err != nil { + return err + } + + metadata := DeployExecutionMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + if metadata.Deploy == nil || metadata.Deploy.ID == "" { + return nil + } + + if metadata.Deploy.FinishedAt != "" { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + deploy, err := client.GetDeploy(spec.Service, metadata.Deploy.ID) + if err != nil { + return err + } + + if deploy.FinishedAt == "" { + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, DeployPollInterval) + } + + metadata.Deploy.Status = deploy.Status + metadata.Deploy.FinishedAt = readString(deploy.FinishedAt) + if err := ctx.Metadata.Set(metadata); err != nil { + return err + } + + return c.emitDeployResult(ctx, deploy) +} + +func (c *Deploy) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + if err := verifyWebhookSignature(ctx); err != nil { + return http.StatusForbidden, err + } + + payload, err := parseDeployWebhookPayload(ctx.Body) + if err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %w", err) + } + + if readString(payload.Type) != "deploy_ended" { + return http.StatusOK, nil + } + + result, err := c.resolveDeployWebhookResult(ctx, payload) + if err != nil { + return http.StatusOK, nil + } + if result.DeployID == "" || result.Status == "" { + return http.StatusOK, nil + } + + executionCtx, err := findDeployExecutionByID(ctx, result.DeployID) + if err != nil { + return http.StatusOK, nil + } + if executionCtx == nil { + return http.StatusOK, nil + } + + metadata := DeployExecutionMetadata{} + if err := mapstructure.Decode(executionCtx.Metadata.Get(), &metadata); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error decoding metadata: %w", err) + } + + if metadata.Deploy != nil && metadata.Deploy.FinishedAt != "" { + return http.StatusOK, nil + } + + applyDeployWebhookResultToMetadata(&metadata, result) + if err := executionCtx.Metadata.Set(metadata); err != nil { + return http.StatusInternalServerError, err + } + + if err := c.emitDeployResultFromWebhook(executionCtx, deployPayloadFromWebhookResult(result)); err != nil { + return http.StatusInternalServerError, err + } + + return http.StatusOK, nil +} + +func (c *Deploy) resolveDeployFromEvent( + ctx core.WebhookRequestContext, + eventID string, +) (deployWebhookResult, error) { + if eventID == "" { + return deployWebhookResult{}, nil + } + + event, err := resolveWebhookEvent(ctx, eventID) + if err != nil { + return deployWebhookResult{}, err + } + + detailValues := eventDetailValues(event) + if detailValues.DeployID == "" { + return deployWebhookResult{}, nil + } + + return deployWebhookResult{ + DeployID: detailValues.DeployID, + ServiceID: readString(event.ServiceID), + FinishedAt: readString(event.Timestamp), + EventID: eventID, + }, nil +} + +func (c *Deploy) emitDeployResult(ctx core.ActionContext, deploy DeployResponse) error { + payload := deployPayloadFromDeployResponse(deploy) + if deploy.Status == "live" { + return ctx.ExecutionState.Emit(DeploySuccessOutputChannel, DeployPayloadType, []any{payload}) + } + return ctx.ExecutionState.Emit(DeployFailedOutputChannel, DeployPayloadType, []any{payload}) +} + +func (c *Deploy) emitDeployResultFromWebhook(ctx *core.ExecutionContext, data map[string]any) error { + status := readString(data["status"]) + if status == "live" { + return ctx.ExecutionState.Emit(DeploySuccessOutputChannel, DeployPayloadType, []any{data}) + } + return ctx.ExecutionState.Emit(DeployFailedOutputChannel, DeployPayloadType, []any{data}) +} + +func parseDeployWebhookPayload(body []byte) (deployWebhookPayload, error) { + payload := deployWebhookPayload{} + if err := json.Unmarshal(body, &payload); err != nil { + return deployWebhookPayload{}, err + } + + if payload.Data == nil { + payload.Data = map[string]any{} + } + + return payload, nil +} + +func deployWebhookResultFromPayload(payload deployWebhookPayload) deployWebhookResult { + serviceID := readString(payload.ServiceID) + if serviceID == "" { + serviceID = readString(payload.Data["serviceId"]) + } + + eventID := readString(payload.ID) + if eventID == "" { + eventID = readString(payload.Data["id"]) + } + + return deployWebhookResult{ + DeployID: readString(payload.Data["deployId"]), + Status: readString(payload.Data["status"]), + ServiceID: serviceID, + CreatedAt: readString(payload.Data["createdAt"]), + FinishedAt: readString(payload.Data["finishedAt"]), + EventID: eventID, + } +} + +func mergeDeployWebhookResults(primary, fallback deployWebhookResult) deployWebhookResult { + if primary.DeployID == "" { + primary.DeployID = fallback.DeployID + } + if primary.Status == "" { + primary.Status = fallback.Status + } + if primary.ServiceID == "" { + primary.ServiceID = fallback.ServiceID + } + if primary.CreatedAt == "" { + primary.CreatedAt = fallback.CreatedAt + } + if primary.FinishedAt == "" { + primary.FinishedAt = fallback.FinishedAt + } + if primary.EventID == "" { + primary.EventID = fallback.EventID + } + + return primary +} + +func (c *Deploy) resolveDeployWebhookResult( + ctx core.WebhookRequestContext, + payload deployWebhookPayload, +) (deployWebhookResult, error) { + result := deployWebhookResultFromPayload(payload) + eventResult, err := c.resolveDeployFromEvent(ctx, result.EventID) + if err != nil { + return deployWebhookResult{}, err + } + + return mergeDeployWebhookResults(result, eventResult), nil +} + +func findDeployExecutionByID(ctx core.WebhookRequestContext, deployID string) (*core.ExecutionContext, error) { + if deployID == "" || ctx.FindExecutionByKV == nil { + return nil, nil + } + + return ctx.FindExecutionByKV(deployExecutionKey, deployID) +} + +func applyDeployWebhookResultToMetadata(metadata *DeployExecutionMetadata, result deployWebhookResult) { + if metadata.Deploy != nil { + if metadata.Deploy.ID == "" { + metadata.Deploy.ID = result.DeployID + } + metadata.Deploy.Status = result.Status + if result.FinishedAt != "" { + metadata.Deploy.FinishedAt = result.FinishedAt + } + if metadata.Deploy.ServiceID == "" { + metadata.Deploy.ServiceID = result.ServiceID + } + return + } + + metadata.Deploy = &DeployMetadata{ + ID: result.DeployID, + Status: result.Status, + ServiceID: result.ServiceID, + CreatedAt: result.CreatedAt, + FinishedAt: result.FinishedAt, + } +} + +func deployPayloadFromDeployResponse(deploy DeployResponse) map[string]any { + payload := map[string]any{ + "deployId": deploy.ID, + "status": deploy.Status, + "createdAt": deploy.CreatedAt, + } + if deploy.FinishedAt != "" { + payload["finishedAt"] = deploy.FinishedAt + } + + return payload +} + +func deployPayloadFromWebhookResult(result deployWebhookResult) map[string]any { + payload := map[string]any{ + "deployId": result.DeployID, + "status": result.Status, + "serviceId": result.ServiceID, + } + if result.EventID != "" { + payload["eventId"] = result.EventID + } + if result.FinishedAt != "" { + payload["finishedAt"] = result.FinishedAt + } + + return payload +} + +func (c *Deploy) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *Deploy) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/render/deploy_test.go b/pkg/integrations/render/deploy_test.go new file mode 100644 index 0000000000..c932389c95 --- /dev/null +++ b/pkg/integrations/render/deploy_test.go @@ -0,0 +1,399 @@ +package render + +import ( + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Render_Deploy__Setup(t *testing.T) { + component := &Deploy{} + + t.Run("missing service -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{}, + }) + + require.ErrorContains(t, err, "service is required") + }) + + t.Run("valid configuration -> success", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Integration: integrationCtx, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.WebhookRequests, 1) + }) +} + +func Test__Render_Deploy__Execute(t *testing.T) { + component := &Deploy{} + + t.Run("valid input with clear cache -> triggers deploy and schedules poll", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusCreated, + Body: io.NopCloser(strings.NewReader( + `{"deploy":{"id":"dep-cukouhrtq21c73e9scng","status":"build_in_progress","createdAt":"2026-02-05T16:10:00.000000Z","finishedAt":null}}`, + )), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + metadataCtx := &contexts.MetadataContext{} + requestCtx := &contexts.RequestContext{} + err := component.Execute(core.ExecutionContext{ + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Metadata: metadataCtx, + ExecutionState: executionState, + Requests: requestCtx, + Configuration: map[string]any{ + "service": "srv-cukouhrtq21c73e9scng", + "clearCache": true, + }, + }) + + require.NoError(t, err) + // Component waits for deploy_ended; no emit yet + assert.Empty(t, executionState.Channel) + assert.Equal(t, "dep-cukouhrtq21c73e9scng", executionState.KVs["deploy_id"]) + assert.Equal(t, "poll", requestCtx.Action) + assert.Equal(t, DeployPollInterval, requestCtx.Duration) + + require.Len(t, httpCtx.Requests, 1) + request := httpCtx.Requests[0] + assert.Equal(t, http.MethodPost, request.Method) + assert.Contains(t, request.URL.String(), "/v1/services/srv-cukouhrtq21c73e9scng/deploys") + + body, readErr := io.ReadAll(request.Body) + require.NoError(t, readErr) + + payload := map[string]any{} + require.NoError(t, json.Unmarshal(body, &payload)) + assert.Equal(t, "clear", payload["clearCache"]) + }) + + t.Run("missing service -> error", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + }, + ExecutionState: &contexts.ExecutionStateContext{KVs: map[string]string{}}, + Configuration: map[string]any{}, + }) + + require.ErrorContains(t, err, "service is required") + }) + + t.Run("render API error -> returns error", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusNotFound, + Body: io.NopCloser(strings.NewReader(`{"message":"service not found"}`)), + }, + }, + } + + err := component.Execute(core.ExecutionContext{ + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + }, + ExecutionState: &contexts.ExecutionStateContext{KVs: map[string]string{}}, + Configuration: map[string]any{ + "service": "srv-missing", + }, + }) + + require.Error(t, err) + }) +} + +func Test__Render_Deploy__HandleWebhook(t *testing.T) { + component := &Deploy{} + + payload := map[string]any{ + "id": "evt-cph1rs3idesc73a2b2mg", + "type": "deploy_ended", + "timestamp": "2026-02-08T21:08:59.718Z", + "serviceId": "srv-cukouhrtq21c73e9scng", + "data": map[string]any{ + "id": "evt-cph1rs3idesc73a2b2mg", + "serviceId": "srv-cukouhrtq21c73e9scng", + "status": "live", + }, + } + + body, err := json.Marshal(payload) + require.NoError(t, err) + + secret := "whsec-test" + headers := buildSignedHeaders(secret, body) + + t.Run("uses event details to resolve deploy and emit result", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evt-cph1rs3idesc73a2b2mg","timestamp":"2026-02-08T21:08:59.718Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"deployId":"dep-cukouhrtq21c73e9scng","status":"live"}}`, + )), + }, + }, + } + + lookupOrder := []string{} + metadataCtx := &contexts.MetadataContext{ + Metadata: DeployExecutionMetadata{ + Deploy: &DeployMetadata{ + ID: "dep-cukouhrtq21c73e9scng", + Status: "build_in_progress", + ServiceID: "srv-cukouhrtq21c73e9scng", + }, + }, + } + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + + status, webhookErr := component.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { + lookupOrder = append(lookupOrder, key+":"+value) + if key == "deploy_id" && value == "dep-cukouhrtq21c73e9scng" { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionState, + }, nil + } + + return nil, assert.AnError + }, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Equal(t, []string{ + "deploy_id:dep-cukouhrtq21c73e9scng", + }, lookupOrder) + + updatedMetadata, ok := metadataCtx.Metadata.(DeployExecutionMetadata) + require.True(t, ok) + require.NotNil(t, updatedMetadata.Deploy) + assert.Equal(t, "live", updatedMetadata.Deploy.Status) + assert.Equal(t, "2026-02-08T21:08:59.718Z", updatedMetadata.Deploy.FinishedAt) + + assert.Equal(t, DeploySuccessOutputChannel, executionState.Channel) + assert.Equal(t, DeployPayloadType, executionState.Type) + require.Len(t, executionState.Payloads, 1) + emittedPayload := readMap(executionState.Payloads[0]) + data := readMap(emittedPayload["data"]) + assert.Equal(t, "dep-cukouhrtq21c73e9scng", data["deployId"]) + assert.Equal(t, "live", data["status"]) + assert.Equal(t, "evt-cph1rs3idesc73a2b2mg", data["eventId"]) + + require.Len(t, httpCtx.Requests, 1) + assert.Equal(t, http.MethodGet, httpCtx.Requests[0].Method) + assert.Contains(t, httpCtx.Requests[0].URL.Path, "/v1/events/evt-cph1rs3idesc73a2b2mg") + }) + + t.Run("event without deploy details is ignored", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evt-cph1rs3idesc73a2b2mg","timestamp":"2026-02-08T21:08:59.718Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"autoscaling_config_changed","details":null}`, + )), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + status, webhookErr := component.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { + if key == "deploy_id" { + return &core.ExecutionContext{ + ExecutionState: executionState, + }, nil + } + return nil, assert.AnError + }, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Empty(t, executionState.Channel) + }) + + t.Run("event id from data.id resolves deploy", func(t *testing.T) { + payload := map[string]any{ + "type": "deploy_ended", + "data": map[string]any{ + "id": "evt-cph1rs3idesc73a2b2mg", + "serviceId": "srv-cukouhrtq21c73e9scng", + "status": "live", + }, + } + body, marshalErr := json.Marshal(payload) + require.NoError(t, marshalErr) + + headers := buildSignedHeaders(secret, body) + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evt-cph1rs3idesc73a2b2mg","timestamp":"2026-02-08T21:08:59.718Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"deployId":"dep-cukouhrtq21c73e9scng","status":"live"}}`, + )), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + metadataCtx := &contexts.MetadataContext{ + Metadata: DeployExecutionMetadata{ + Deploy: &DeployMetadata{ + ID: "dep-cukouhrtq21c73e9scng", + Status: "build_in_progress", + ServiceID: "srv-cukouhrtq21c73e9scng", + }, + }, + } + + status, webhookErr := component.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { + if key == "deploy_id" && value == "dep-cukouhrtq21c73e9scng" { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionState, + }, nil + } + + return nil, assert.AnError + }, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Equal(t, DeploySuccessOutputChannel, executionState.Channel) + }) + + t.Run("event details with generic id resolve deploy", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evt-cph1rs3idesc73a2b2mg","timestamp":"2026-02-08T21:08:59.718Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"id":"dep-cukouhrtq21c73e9scng","status":"live"}}`, + )), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + metadataCtx := &contexts.MetadataContext{ + Metadata: DeployExecutionMetadata{ + Deploy: &DeployMetadata{ + ID: "dep-cukouhrtq21c73e9scng", + Status: "build_in_progress", + ServiceID: "srv-cukouhrtq21c73e9scng", + }, + }, + } + + status, webhookErr := component.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { + if key == "deploy_id" && value == "dep-cukouhrtq21c73e9scng" { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionState, + }, nil + } + + return nil, assert.AnError + }, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Equal(t, DeploySuccessOutputChannel, executionState.Channel) + }) + + t.Run("event details with nested deploy resolve deploy", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evt-cph1rs3idesc73a2b2mg","timestamp":"2026-02-08T21:08:59.718Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"deploy":{"id":"dep-cukouhrtq21c73e9scng","status":"live"}}}`, + )), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + metadataCtx := &contexts.MetadataContext{ + Metadata: DeployExecutionMetadata{ + Deploy: &DeployMetadata{ + ID: "dep-cukouhrtq21c73e9scng", + Status: "build_in_progress", + ServiceID: "srv-cukouhrtq21c73e9scng", + }, + }, + } + + status, webhookErr := component.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { + if key == "deploy_id" && value == "dep-cukouhrtq21c73e9scng" { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionState, + }, nil + } + + return nil, assert.AnError + }, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Equal(t, DeploySuccessOutputChannel, executionState.Channel) + }) +} diff --git a/pkg/integrations/render/example.go b/pkg/integrations/render/example.go new file mode 100644 index 0000000000..c6fd3e3798 --- /dev/null +++ b/pkg/integrations/render/example.go @@ -0,0 +1,50 @@ +package render + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_data_on_deploy.json +var exampleDataOnDeployBytes []byte + +//go:embed example_data_on_build.json +var exampleDataOnBuildBytes []byte + +//go:embed example_output_deploy.json +var exampleOutputDeployBytes []byte + +var exampleDataOnDeployOnce sync.Once +var exampleDataOnDeploy map[string]any + +var exampleDataOnBuildOnce sync.Once +var exampleDataOnBuild map[string]any + +var exampleOutputDeployOnce sync.Once +var exampleOutputDeploy map[string]any + +func (t *OnDeploy) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleDataOnDeployOnce, + exampleDataOnDeployBytes, + &exampleDataOnDeploy, + ) +} + +func (t *OnBuild) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleDataOnBuildOnce, + exampleDataOnBuildBytes, + &exampleDataOnBuild, + ) +} + +func (c *Deploy) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleOutputDeployOnce, + exampleOutputDeployBytes, + &exampleOutputDeploy, + ) +} diff --git a/pkg/integrations/render/example_data_on_build.json b/pkg/integrations/render/example_data_on_build.json new file mode 100644 index 0000000000..4f83173ea9 --- /dev/null +++ b/pkg/integrations/render/example_data_on_build.json @@ -0,0 +1,12 @@ +{ + "data": { + "createdAt": "2026-02-05T16:00:00.000000Z", + "buildId": "bld-cukouhrtq21c73e9scng", + "eventId": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "failed" + }, + "timestamp": "2026-02-05T16:00:01.000000Z", + "type": "render.build.ended" +} diff --git a/pkg/integrations/render/example_data_on_deploy.json b/pkg/integrations/render/example_data_on_deploy.json new file mode 100644 index 0000000000..cb9e005460 --- /dev/null +++ b/pkg/integrations/render/example_data_on_deploy.json @@ -0,0 +1,12 @@ +{ + "data": { + "createdAt": "2026-02-05T16:00:00.000000Z", + "deployId": "dep-cukouhrtq21c73e9scng", + "eventId": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded" + }, + "timestamp": "2026-02-05T16:00:01.000000Z", + "type": "render.deploy.ended" +} diff --git a/pkg/integrations/render/example_output_deploy.json b/pkg/integrations/render/example_output_deploy.json new file mode 100644 index 0000000000..121fb46460 --- /dev/null +++ b/pkg/integrations/render/example_output_deploy.json @@ -0,0 +1,11 @@ +{ + "data": { + "deployId": "dep-cukouhrtq21c73e9scng", + "status": "succeeded", + "serviceId": "srv-cukouhrtq21c73e9scng", + "createdAt": "2026-02-05T16:10:00.000000Z", + "finishedAt": "2026-02-05T16:15:00.000000Z" + }, + "timestamp": "2026-02-05T16:15:00.000000Z", + "type": "render.deploy.finished" +} diff --git a/pkg/integrations/render/on_build.go b/pkg/integrations/render/on_build.go new file mode 100644 index 0000000000..a5d32cf3ea --- /dev/null +++ b/pkg/integrations/render/on_build.go @@ -0,0 +1,119 @@ +package render + +import ( + "fmt" + "net/http" + + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnBuild struct{} + +var buildEventTypeOptions = []configuration.FieldOption{ + {Label: "Build Ended", Value: "build_ended"}, + {Label: "Build Started", Value: "build_started"}, +} + +var buildAllowedEventTypes = normalizeWebhookEventTypes([]string{ + "build_ended", + "build_started", +}) + +var buildDefaultEventTypes = []string{"build_ended"} + +func (t *OnBuild) Name() string { + return "render.onBuild" +} + +func (t *OnBuild) Label() string { + return "On Build" +} + +func (t *OnBuild) Description() string { + return "Listen to Render build events for a service" +} + +func (t *OnBuild) Documentation() string { + return `The On Build trigger emits build-related Render events for one selected service. + +## Use Cases + +- **Build failure alerts**: Notify your team when builds fail +- **Build success hooks**: Trigger follow-up automation after successful builds + +## Configuration + +- **Service**: Required Render service. +- **Event Types**: Build event states to listen for. Defaults to ` + "`build_ended`" + `. + +## Webhook Verification + +Render webhooks are validated using the secret generated when SuperPlane creates the webhook via the Render API. Verification checks: +- ` + "`webhook-id`" + ` +- ` + "`webhook-timestamp`" + ` +- ` + "`webhook-signature`" + ` (` + "`v1,`" + `) + +## Event Data + +The default output emits payload data fields like ` + "`buildId`" + `, ` + "`eventId`" + `, ` + "`serviceId`" + `, ` + "`serviceName`" + `, and ` + "`status`" + ` (when present).` +} + +func (t *OnBuild) Icon() string { + return "server" +} + +func (t *OnBuild) Color() string { + return "gray" +} + +func (t *OnBuild) Configuration() []configuration.Field { + return onResourceEventConfigurationFields(buildEventTypeOptions, buildDefaultEventTypes) +} + +func (t *OnBuild) Setup(ctx core.TriggerContext) error { + config, err := decodeOnResourceEventConfiguration(ctx.Configuration) + if err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureServiceInMetadata(ctx, config); err != nil { + return err + } + + requestedEventTypes := filterAllowedEventTypes(config.EventTypes, buildAllowedEventTypes) + if len(requestedEventTypes) == 0 { + requestedEventTypes = buildDefaultEventTypes + } + + return ctx.Integration.RequestWebhook( + webhookConfigurationForResource(ctx.Integration, webhookResourceTypeBuild, requestedEventTypes), + ) +} + +func (t *OnBuild) Actions() []core.Action { + return []core.Action{} +} + +func (t *OnBuild) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (t *OnBuild) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + config, err := decodeOnResourceEventConfiguration(ctx.Configuration) + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + return handleOnResourceEventWebhook( + ctx, + config, + buildAllowedEventTypes, + buildDefaultEventTypes, + "buildId", + ) +} + +func (t *OnBuild) Cleanup(ctx core.TriggerContext) error { + return nil +} diff --git a/pkg/integrations/render/on_build_test.go b/pkg/integrations/render/on_build_test.go new file mode 100644 index 0000000000..38e63da854 --- /dev/null +++ b/pkg/integrations/render/on_build_test.go @@ -0,0 +1,198 @@ +package render + +import ( + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Render_OnBuild__Setup(t *testing.T) { + trigger := &OnBuild{} + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "usr-123", + Plan: "professional", + }, + }, + } + metadataCtx := &contexts.MetadataContext{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","service":{"id":"srv-cukouhrtq21c73e9scng","name":"backend-api"}}]`, + )), + }, + }, + } + + err := trigger.Setup(core.TriggerContext{ + HTTP: httpCtx, + Metadata: metadataCtx, + Integration: integrationCtx, + Configuration: map[string]any{ + "service": "srv-cukouhrtq21c73e9scng", + "eventTypes": []string{"build_ended"}, + }, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.WebhookRequests, 1) + webhookConfiguration, ok := integrationCtx.WebhookRequests[0].(WebhookConfiguration) + require.True(t, ok) + assert.Equal(t, WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"build_ended"}, + }, webhookConfiguration) +} + +func Test__Render_OnBuild__Setup__OrganizationWorkspace(t *testing.T) { + trigger := &OnBuild{} + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "tea-123", + Plan: "organization", + }, + }, + } + metadataCtx := &contexts.MetadataContext{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","service":{"id":"srv-cukouhrtq21c73e9scng","name":"backend-api"}}]`, + )), + }, + }, + } + + err := trigger.Setup(core.TriggerContext{ + HTTP: httpCtx, + Metadata: metadataCtx, + Integration: integrationCtx, + Configuration: map[string]any{ + "service": "srv-cukouhrtq21c73e9scng", + }, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.WebhookRequests, 1) + webhookConfiguration, ok := integrationCtx.WebhookRequests[0].(WebhookConfiguration) + require.True(t, ok) + assert.Equal(t, WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: webhookResourceTypeBuild, + EventTypes: []string{"build_ended"}, + }, webhookConfiguration) +} + +func Test__Render_OnBuild__HandleWebhook(t *testing.T) { + trigger := &OnBuild{} + + payload := map[string]any{ + "type": "build_ended", + "timestamp": "2026-02-05T16:00:01.000000Z", + "data": map[string]any{ + "id": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "failed", + }, + } + + body, err := json.Marshal(payload) + require.NoError(t, err) + + secret := "whsec-test" + headers := buildSignedHeaders(secret, body) + eventCtx := &contexts.EventContext{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evj-cukouhrtq21c73e9scng","timestamp":"2026-02-05T16:00:01.000000Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"build_ended","details":{"buildId":"bld-cukouhrtq21c73e9scng","status":"failed"}}`, + )), + }, + }, + } + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + require.Equal(t, 1, eventCtx.Count()) + assert.Equal(t, "render.build.ended", eventCtx.Payloads[0].Type) + assert.Equal(t, map[string]any{ + "eventId": "evj-cukouhrtq21c73e9scng", + "buildId": "bld-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "failed", + }, eventCtx.Payloads[0].Data) + require.Len(t, httpCtx.Requests, 1) + assert.Equal(t, http.MethodGet, httpCtx.Requests[0].Method) + assert.Contains(t, httpCtx.Requests[0].URL.Path, "/v1/events/evj-cukouhrtq21c73e9scng") +} + +func Test__Render_OnBuild__HandleWebhook__WithoutEventResolution(t *testing.T) { + trigger := &OnBuild{} + + payload := map[string]any{ + "type": "build_ended", + "timestamp": "2026-02-05T16:00:01.000000Z", + "data": map[string]any{ + "id": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "failed", + }, + } + + body, err := json.Marshal(payload) + require.NoError(t, err) + + secret := "whsec-test" + headers := buildSignedHeaders(secret, body) + eventCtx := &contexts.EventContext{} + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + require.Equal(t, 1, eventCtx.Count()) + assert.Equal(t, "render.build.ended", eventCtx.Payloads[0].Type) + assert.Equal(t, map[string]any{ + "eventId": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "failed", + }, eventCtx.Payloads[0].Data) +} diff --git a/pkg/integrations/render/on_deploy.go b/pkg/integrations/render/on_deploy.go new file mode 100644 index 0000000000..88824e521b --- /dev/null +++ b/pkg/integrations/render/on_deploy.go @@ -0,0 +1,128 @@ +package render + +import ( + "fmt" + "net/http" + + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnDeploy struct{} + +var deployEventTypeOptions = []configuration.FieldOption{ + {Label: "Deploy Ended", Value: "deploy_ended"}, + {Label: "Deploy Started", Value: "deploy_started"}, + {Label: "Image Pull Failed", Value: "image_pull_failed"}, + {Label: "Pipeline Minutes Exhausted", Value: "pipeline_minutes_exhausted"}, + {Label: "Pre-Deploy Ended", Value: "pre_deploy_ended"}, + {Label: "Pre-Deploy Started", Value: "pre_deploy_started"}, +} + +var deployAllowedEventTypes = normalizeWebhookEventTypes([]string{ + "deploy_ended", + "deploy_started", + "image_pull_failed", + "pipeline_minutes_exhausted", + "pre_deploy_ended", + "pre_deploy_started", +}) + +var deployDefaultEventTypes = []string{"deploy_ended"} + +func (t *OnDeploy) Name() string { + return "render.onDeploy" +} + +func (t *OnDeploy) Label() string { + return "On Deploy" +} + +func (t *OnDeploy) Description() string { + return "Listen to Render deploy events for a service" +} + +func (t *OnDeploy) Documentation() string { + return `The On Deploy trigger emits deploy-related Render events for one selected service. + +## Use Cases + +- **Deploy notifications**: Notify Slack or PagerDuty when deploys succeed/fail +- **Post-deploy automation**: Trigger smoke tests after successful deploy completion events +- **Release orchestration**: Trigger downstream workflows when deploy stages change + +## Configuration + +- **Service**: Required Render service. +- **Event Types**: Deploy event states to listen for. Defaults to ` + "`deploy_ended`" + `. + +## Webhook Verification + +Render webhooks are validated using the secret generated when SuperPlane creates the webhook via the Render API. Verification checks: +- ` + "`webhook-id`" + ` +- ` + "`webhook-timestamp`" + ` +- ` + "`webhook-signature`" + ` (` + "`v1,`" + `) + +## Event Data + +The default output emits payload data fields like ` + "`deployId`" + `, ` + "`eventId`" + `, ` + "`serviceId`" + `, ` + "`serviceName`" + `, and ` + "`status`" + ` (when present).` +} + +func (t *OnDeploy) Icon() string { + return "server" +} + +func (t *OnDeploy) Color() string { + return "gray" +} + +func (t *OnDeploy) Configuration() []configuration.Field { + return onResourceEventConfigurationFields(deployEventTypeOptions, deployDefaultEventTypes) +} + +func (t *OnDeploy) Setup(ctx core.TriggerContext) error { + config, err := decodeOnResourceEventConfiguration(ctx.Configuration) + if err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureServiceInMetadata(ctx, config); err != nil { + return err + } + + requestedEventTypes := filterAllowedEventTypes(config.EventTypes, deployAllowedEventTypes) + if len(requestedEventTypes) == 0 { + requestedEventTypes = deployDefaultEventTypes + } + + return ctx.Integration.RequestWebhook( + webhookConfigurationForResource(ctx.Integration, webhookResourceTypeDeploy, requestedEventTypes), + ) +} + +func (t *OnDeploy) Actions() []core.Action { + return []core.Action{} +} + +func (t *OnDeploy) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (t *OnDeploy) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + config, err := decodeOnResourceEventConfiguration(ctx.Configuration) + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + return handleOnResourceEventWebhook( + ctx, + config, + deployAllowedEventTypes, + deployDefaultEventTypes, + "deployId", + ) +} + +func (t *OnDeploy) Cleanup(ctx core.TriggerContext) error { + return nil +} diff --git a/pkg/integrations/render/on_deploy_test.go b/pkg/integrations/render/on_deploy_test.go new file mode 100644 index 0000000000..c654b7213f --- /dev/null +++ b/pkg/integrations/render/on_deploy_test.go @@ -0,0 +1,402 @@ +package render + +import ( + "encoding/base64" + "encoding/json" + "io" + "net/http" + "strconv" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Render_OnDeploy__Setup(t *testing.T) { + trigger := &OnDeploy{} + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "usr-123", + Plan: "professional", + }, + }, + } + metadataCtx := &contexts.MetadataContext{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","service":{"id":"srv-cukouhrtq21c73e9scng","name":"backend-api"}}]`, + )), + }, + }, + } + + err := trigger.Setup(core.TriggerContext{ + HTTP: httpCtx, + Metadata: metadataCtx, + Integration: integrationCtx, + Configuration: map[string]any{ + "service": "srv-cukouhrtq21c73e9scng", + "eventTypes": []string{"deploy_ended"}, + }, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.WebhookRequests, 1) + webhookConfiguration, ok := integrationCtx.WebhookRequests[0].(WebhookConfiguration) + require.True(t, ok) + assert.Equal(t, WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"deploy_ended"}, + }, webhookConfiguration) + + nodeMetadata, ok := metadataCtx.Metadata.(OnResourceEventMetadata) + require.True(t, ok) + require.NotNil(t, nodeMetadata.Service) + assert.Equal(t, "srv-cukouhrtq21c73e9scng", nodeMetadata.Service.ID) + assert.Equal(t, "backend-api", nodeMetadata.Service.Name) +} + +func Test__Render_OnDeploy__Setup__OrganizationWorkspace(t *testing.T) { + trigger := &OnDeploy{} + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "tea-123", + Plan: "organization", + }, + }, + } + metadataCtx := &contexts.MetadataContext{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","service":{"id":"srv-cukouhrtq21c73e9scng","name":"backend-api"}}]`, + )), + }, + }, + } + + err := trigger.Setup(core.TriggerContext{ + HTTP: httpCtx, + Metadata: metadataCtx, + Integration: integrationCtx, + Configuration: map[string]any{ + "service": "srv-cukouhrtq21c73e9scng", + }, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.WebhookRequests, 1) + webhookConfiguration, ok := integrationCtx.WebhookRequests[0].(WebhookConfiguration) + require.True(t, ok) + assert.Equal(t, WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: webhookResourceTypeDeploy, + EventTypes: []string{"deploy_ended"}, + }, webhookConfiguration) +} + +func Test__Render_OnDeploy__HandleWebhook(t *testing.T) { + trigger := &OnDeploy{} + + payload := map[string]any{ + "type": "deploy_ended", + "timestamp": "2026-02-05T16:00:01.000000Z", + "data": map[string]any{ + "id": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded", + }, + } + + body, err := json.Marshal(payload) + require.NoError(t, err) + + secret := "whsec-test" + headers := buildSignedHeaders(secret, body) + + t.Run("missing signature headers -> 403", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: http.Header{}, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusForbidden, status) + assert.ErrorContains(t, webhookErr, "missing signature headers") + assert.Zero(t, eventCtx.Count()) + }) + + t.Run("expired timestamp -> 403", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + expiredHeaders := buildSignedHeadersWithTimestamp( + secret, + body, + strconv.FormatInt(time.Now().Add(-10*time.Minute).Unix(), 10), + ) + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: expiredHeaders, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusForbidden, status) + assert.ErrorContains(t, webhookErr, "timestamp expired") + assert.Zero(t, eventCtx.Count()) + }) + + t.Run("invalid signature -> 403", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + invalidHeaders := buildSignedHeaders(secret, body) + invalidHeaders.Set("webhook-signature", "v1,invalid") + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: invalidHeaders, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusForbidden, status) + assert.ErrorContains(t, webhookErr, "invalid signature") + assert.Zero(t, eventCtx.Count()) + }) + + t.Run("unsupported event type -> ignored", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + + buildPayload := map[string]any{ + "type": "build_ended", + "data": payload["data"], + } + buildBody, marshalErr := json.Marshal(buildPayload) + require.NoError(t, marshalErr) + buildHeaders := buildSignedHeaders(secret, buildBody) + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: buildBody, + Headers: buildHeaders, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Zero(t, eventCtx.Count()) + }) + + t.Run("service filter mismatch -> ignored", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + Configuration: map[string]any{"service": "srv-other"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Zero(t, eventCtx.Count()) + }) + + t.Run("event type filter does not match -> ignored", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + Configuration: map[string]any{ + "service": "srv-cukouhrtq21c73e9scng", + "eventTypes": []string{"deploy_started"}, + }, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + assert.Zero(t, eventCtx.Count()) + }) + + t.Run("default event filter -> event emitted", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evj-cukouhrtq21c73e9scng","timestamp":"2026-02-05T16:00:01.000000Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"deployId":"dep-cukouhrtq21c73e9scng","status":"live"}}`, + )), + }, + }, + } + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + require.Equal(t, 1, eventCtx.Count()) + assert.Equal(t, "render.deploy.ended", eventCtx.Payloads[0].Type) + assert.Equal(t, map[string]any{ + "eventId": "evj-cukouhrtq21c73e9scng", + "deployId": "dep-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded", + }, eventCtx.Payloads[0].Data) + require.Len(t, httpCtx.Requests, 1) + assert.Equal(t, http.MethodGet, httpCtx.Requests[0].Method) + assert.Contains(t, httpCtx.Requests[0].URL.Path, "/v1/events/evj-cukouhrtq21c73e9scng") + }) + + t.Run("multiple signatures header -> accepts matching v1 signature", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + validHeaders := buildSignedHeaders(secret, body) + validSignature := strings.TrimPrefix(validHeaders.Get("webhook-signature"), "v1,") + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evj-cukouhrtq21c73e9scng","timestamp":"2026-02-05T16:00:01.000000Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"deployId":"dep-cukouhrtq21c73e9scng","status":"live"}}`, + )), + }, + }, + } + + headersWithMultipleSignatures := http.Header{} + headersWithMultipleSignatures.Set("webhook-id", validHeaders.Get("webhook-id")) + headersWithMultipleSignatures.Set("webhook-timestamp", validHeaders.Get("webhook-timestamp")) + headersWithMultipleSignatures.Set("webhook-signature", "v1,invalid v1,"+validSignature) + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headersWithMultipleSignatures, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + require.Equal(t, 1, eventCtx.Count()) + assert.Equal(t, "render.deploy.ended", eventCtx.Payloads[0].Type) + assert.Equal(t, map[string]any{ + "eventId": "evj-cukouhrtq21c73e9scng", + "deployId": "dep-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded", + }, eventCtx.Payloads[0].Data) + }) + + t.Run("whsec secret format -> accepts decoded signing key", func(t *testing.T) { + eventCtx := &contexts.EventContext{} + rawSecret := "test-secret" + webhookSecret := "whsec_" + base64.RawStdEncoding.EncodeToString([]byte(rawSecret)) + headers := buildSignedHeaders(rawSecret, body) + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"evj-cukouhrtq21c73e9scng","timestamp":"2026-02-05T16:00:01.000000Z","serviceId":"srv-cukouhrtq21c73e9scng","type":"deploy_ended","details":{"deployId":"dep-cukouhrtq21c73e9scng","status":"live"}}`, + )), + }, + }, + } + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{"apiKey": "rnd_test"}}, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: webhookSecret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + require.Equal(t, 1, eventCtx.Count()) + assert.Equal(t, "render.deploy.ended", eventCtx.Payloads[0].Type) + assert.Equal(t, map[string]any{ + "eventId": "evj-cukouhrtq21c73e9scng", + "deployId": "dep-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded", + }, eventCtx.Payloads[0].Data) + }) +} + +func Test__Render_OnDeploy__HandleWebhook__WithoutEventResolution(t *testing.T) { + trigger := &OnDeploy{} + + payload := map[string]any{ + "type": "deploy_ended", + "timestamp": "2026-02-05T16:00:01.000000Z", + "data": map[string]any{ + "id": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded", + }, + } + + body, err := json.Marshal(payload) + require.NoError(t, err) + + secret := "whsec-test" + headers := buildSignedHeaders(secret, body) + eventCtx := &contexts.EventContext{} + + status, webhookErr := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Headers: headers, + Configuration: map[string]any{"service": "srv-cukouhrtq21c73e9scng"}, + Webhook: &contexts.WebhookContext{Secret: secret}, + Events: eventCtx, + }) + + assert.Equal(t, http.StatusOK, status) + require.NoError(t, webhookErr) + require.Equal(t, 1, eventCtx.Count()) + assert.Equal(t, "render.deploy.ended", eventCtx.Payloads[0].Type) + assert.Equal(t, map[string]any{ + "eventId": "evj-cukouhrtq21c73e9scng", + "serviceId": "srv-cukouhrtq21c73e9scng", + "serviceName": "backend-api", + "status": "succeeded", + }, eventCtx.Payloads[0].Data) +} diff --git a/pkg/integrations/render/render.go b/pkg/integrations/render/render.go new file mode 100644 index 0000000000..29ea11e667 --- /dev/null +++ b/pkg/integrations/render/render.go @@ -0,0 +1,262 @@ +package render + +import ( + "fmt" + "slices" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +func init() { + registry.RegisterIntegrationWithWebhookHandler("render", &Render{}, &RenderWebhookHandler{}) +} + +type Render struct{} + +type Configuration struct { + APIKey string `json:"apiKey" mapstructure:"apiKey"` + Workspace string `json:"workspace" mapstructure:"workspace"` + WorkspacePlan string `json:"workspacePlan" mapstructure:"workspacePlan"` +} + +type Metadata struct { + Workspace *WorkspaceMetadata `json:"workspace,omitempty" mapstructure:"workspace"` +} + +type WorkspaceMetadata struct { + ID string `json:"id" mapstructure:"id"` + Plan string `json:"plan" mapstructure:"plan"` +} + +func (r *Render) Name() string { + return "render" +} + +func (r *Render) Label() string { + return "Render" +} + +func (r *Render) Icon() string { + return "server" +} + +func (r *Render) Description() string { + return "Deploy and manage Render services, and react to Render deploy/build events" +} + +func (r *Render) Instructions() string { + return ` +1. **API Key:** Create it in [Render Account Settings -> API Keys](https://dashboard.render.com/u/settings#api-keys). +2. **Workspace (optional):** Use your Render workspace ID (` + "`usr-...`" + ` or ` + "`tea-...`" + `) or workspace name. Leave empty to use the first workspace available to the API key. +3. **Workspace Plan:** Select **Professional** or **Organization / Enterprise** (used to choose webhook strategy). +4. **Auth:** SuperPlane sends requests to [Render API v1](https://api.render.com/v1/) using ` + "`Authorization: Bearer `" + `. +5. **Webhooks:** SuperPlane configures Render webhooks automatically via the [Render Webhooks API](https://render.com/docs/webhooks). No manual setup is required. +6. **Troubleshooting:** Check [Render Dashboard -> Integrations -> Webhooks](https://dashboard.render.com/) and the [Render webhook docs](https://render.com/docs/webhooks). + +Note: **Plan requirement:** Render webhooks require a Professional plan or higher.` +} + +func (r *Render) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "apiKey", + Label: "API Key", + Type: configuration.FieldTypeString, + Required: true, + Sensitive: true, + Description: "Render API key", + }, + { + Name: "workspace", + Label: "Workspace", + Type: configuration.FieldTypeString, + Required: false, + Description: "Optional Render workspace ID/name. Use this if your API key has access to multiple workspaces.", + }, + { + Name: "workspacePlan", + Label: "Workspace Plan", + Type: configuration.FieldTypeSelect, + Required: true, + Default: workspacePlanProfessional, + Description: "Render workspace plan used for webhook strategy. " + + "Use Organization / Enterprise when available.", + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Professional", Value: workspacePlanProfessional}, + {Label: "Organization / Enterprise", Value: workspacePlanOrganization}, + }, + }, + }, + }, + } +} + +func (r *Render) Components() []core.Component { + return []core.Component{ + &Deploy{}, + } +} + +func (r *Render) Triggers() []core.Trigger { + return []core.Trigger{ + &OnDeploy{}, + &OnBuild{}, + } +} + +func (r *Render) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (r *Render) Sync(ctx core.SyncContext) error { + config := Configuration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if config.APIKey == "" { + return fmt.Errorf("apiKey is required") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + if err := client.Verify(); err != nil { + return fmt.Errorf("failed to verify Render credentials: %w", err) + } + + workspace, err := resolveWorkspace(client, config.Workspace) + if err != nil { + return fmt.Errorf("failed to resolve workspace: %w", err) + } + + ctx.Integration.SetMetadata(buildMetadata(workspace.ID, config.WorkspacePlan)) + ctx.Integration.Ready() + return nil +} + +func (r *Render) HandleRequest(ctx core.HTTPRequestContext) { + // no-op +} + +func (r *Render) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + if resourceType != "service" { + return []core.IntegrationResource{}, nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + workspaceID, err := workspaceIDForIntegration(client, ctx.Integration) + if err != nil { + return nil, err + } + + services, err := client.ListServices(workspaceID) + if err != nil { + return nil, err + } + + resources := make([]core.IntegrationResource, 0, len(services)) + for _, service := range services { + if service.ID == "" || service.Name == "" { + continue + } + + resources = append(resources, core.IntegrationResource{Type: resourceType, Name: service.Name, ID: service.ID}) + } + + return resources, nil +} + +func (r *Render) Actions() []core.Action { + return []core.Action{} +} + +func (r *Render) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} + +func workspaceIDForIntegration(client *Client, integration core.IntegrationContext) (string, error) { + metadata := Metadata{} + if err := mapstructure.Decode(integration.GetMetadata(), &metadata); err == nil && metadata.Workspace != nil && metadata.Workspace.ID != "" { + return metadata.Workspace.ID, nil + } + + workspace := "" + workspaceValue, workspaceErr := integration.GetConfig("workspace") + if workspaceErr == nil { + workspace = string(workspaceValue) + } + + selectedWorkspace, err := resolveWorkspace(client, workspace) + if err != nil { + return "", err + } + + workspacePlan := workspacePlanProfessional + workspacePlanValue, workspacePlanErr := integration.GetConfig("workspacePlan") + if workspacePlanErr == nil { + workspacePlan = string(workspacePlanValue) + } + + integration.SetMetadata(buildMetadata(selectedWorkspace.ID, workspacePlan)) + return selectedWorkspace.ID, nil +} + +func resolveWorkspace(client *Client, workspace string) (Workspace, error) { + workspaces, err := client.ListWorkspaces() + if err != nil { + return Workspace{}, err + } + + if len(workspaces) == 0 { + return Workspace{}, fmt.Errorf("no workspaces found for this API key") + } + + if workspace == "" { + return workspaces[0], nil + } + + selectedWorkspace := slices.IndexFunc(workspaces, func(item Workspace) bool { + return item.ID == workspace + }) + if selectedWorkspace < 0 { + selectedWorkspace = slices.IndexFunc(workspaces, func(item Workspace) bool { + return strings.EqualFold(item.Name, workspace) + }) + } + + if selectedWorkspace < 0 { + return Workspace{}, fmt.Errorf("workspace %s is not accessible with this API key", workspace) + } + + return workspaces[selectedWorkspace], nil +} + +func (m Metadata) workspacePlan() string { + if m.Workspace == nil { + return workspacePlanProfessional + } + + return m.Workspace.Plan +} + +func buildMetadata(workspaceID, workspacePlan string) Metadata { + return Metadata{ + Workspace: &WorkspaceMetadata{ + ID: strings.TrimSpace(workspaceID), + Plan: strings.TrimSpace(workspacePlan), + }, + } +} diff --git a/pkg/integrations/render/render_test.go b/pkg/integrations/render/render_test.go new file mode 100644 index 0000000000..516b00c23f --- /dev/null +++ b/pkg/integrations/render/render_test.go @@ -0,0 +1,513 @@ +package render + +import ( + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +type integrationWebhookContext struct { + id string + url string + configuration any + metadata any + secret []byte +} + +func (w *integrationWebhookContext) GetID() string { return w.id } +func (w *integrationWebhookContext) GetURL() string { return w.url } +func (w *integrationWebhookContext) GetSecret() ([]byte, error) { return w.secret, nil } +func (w *integrationWebhookContext) GetMetadata() any { return w.metadata } +func (w *integrationWebhookContext) GetConfiguration() any { return w.configuration } +func (w *integrationWebhookContext) SetSecret(secret []byte) error { + w.secret = secret + return nil +} + +func Test__Render__Sync(t *testing.T) { + integration := &Render{} + + t.Run("valid API key -> ready", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","service":{"id":"srv-1","name":"backend"}}]`)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","owner":{"id":"usr-123","name":"Pedro"}}]`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "rnd_test", + "workspacePlan": "professional", + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "ready", integrationCtx.State) + + metadata, ok := integrationCtx.Metadata.(Metadata) + require.True(t, ok) + require.NotNil(t, metadata.Workspace) + assert.Equal(t, "usr-123", metadata.Workspace.ID) + assert.Equal(t, "professional", metadata.Workspace.Plan) + + require.Len(t, httpCtx.Requests, 2) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/v1/services") + assert.Contains(t, httpCtx.Requests[1].URL.String(), "/v1/owners") + }) + + t.Run("workspace not available -> error", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","service":{"id":"srv-1","name":"backend"}}]`)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","owner":{"id":"usr-123","name":"Pedro"}}]`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "rnd_test", + "workspace": "tea-999", + "workspacePlan": "professional", + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.ErrorContains(t, err, "is not accessible") + }) + + t.Run("organization plan -> metadata uses organization strategy", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","service":{"id":"srv-1","name":"backend"}}]`)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","owner":{"id":"usr-123","name":"Pedro"}}]`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "rnd_test", + "workspacePlan": "organization", + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + + metadata, ok := integrationCtx.Metadata.(Metadata) + require.True(t, ok) + require.NotNil(t, metadata.Workspace) + assert.Equal(t, "usr-123", metadata.Workspace.ID) + assert.Equal(t, "organization", metadata.Workspace.Plan) + }) + + t.Run("workspace can be selected by name", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[{"cursor":"x","service":{"id":"srv-1","name":"backend"}}]`)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","owner":{"id":"usr-123","name":"Personal"}},{"cursor":"y","owner":{"id":"tea-456","name":"Acme Team"}}]`, + )), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "rnd_test", + "workspace": "Acme Team", + "workspacePlan": "organization", + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + + metadata, ok := integrationCtx.Metadata.(Metadata) + require.True(t, ok) + require.NotNil(t, metadata.Workspace) + assert.Equal(t, "tea-456", metadata.Workspace.ID) + assert.Equal(t, "organization", metadata.Workspace.Plan) + }) +} + +func Test__Render__ListResources(t *testing.T) { + integration := &Render{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"a","service":{"id":"srv-1","name":"backend"}},{"cursor":"b","service":{"id":"srv-2","name":"worker"}}]`, + )), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "usr-123", + }, + }, + } + + resources, err := integration.ListResources("service", core.ListResourcesContext{ + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + require.Len(t, resources, 2) + assert.Equal(t, "backend", resources[0].Name) + assert.Equal(t, "srv-1", resources[0].ID) + assert.Equal(t, "worker", resources[1].Name) + assert.Equal(t, "srv-2", resources[1].ID) + + require.Len(t, httpCtx.Requests, 1) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "ownerId=usr-123") +} + +func Test__Render__SetupWebhook(t *testing.T) { + handler := &RenderWebhookHandler{} + + t.Run("create webhook and store secret", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`[]`)), + }, + { + StatusCode: http.StatusCreated, + Body: io.NopCloser(strings.NewReader( + `{"id":"whk-1","ownerId":"usr-123","name":"SuperPlane","url":"https://hooks.superplane.dev/render","enabled":true,"eventFilter":[],"secret":"whsec-abc"}`, + )), + }, + }, + } + + webhookCtx := &integrationWebhookContext{ + id: "wh_record_1", + url: "https://hooks.superplane.dev/render", + configuration: struct{}{}, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "usr-123", + }, + }, + } + + metadata, err := handler.Setup(core.WebhookHandlerContext{ + HTTP: httpCtx, + Webhook: webhookCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "whsec-abc", string(webhookCtx.secret)) + + storedMetadata, ok := metadata.(WebhookMetadata) + require.True(t, ok) + assert.Equal(t, "whk-1", storedMetadata.WebhookID) + assert.Equal(t, "usr-123", storedMetadata.WorkspaceID) + + require.Len(t, httpCtx.Requests, 2) + assert.Equal(t, http.MethodGet, httpCtx.Requests[0].Method) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/v1/webhooks") + assert.Contains(t, httpCtx.Requests[0].URL.String(), "ownerId=usr-123") + + assert.Equal(t, http.MethodPost, httpCtx.Requests[1].Method) + assert.Contains(t, httpCtx.Requests[1].URL.String(), "/v1/webhooks") + + body, readErr := io.ReadAll(httpCtx.Requests[1].Body) + require.NoError(t, readErr) + + payload := map[string]any{} + require.NoError(t, json.Unmarshal(body, &payload)) + assert.Equal(t, "usr-123", payload["ownerId"]) + assert.Equal(t, "https://hooks.superplane.dev/render", payload["url"]) + assert.Equal(t, true, payload["enabled"]) + assert.ElementsMatch(t, webhookEventFilter(WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + }), payload["eventFilter"]) + }) + + t.Run("reuse existing webhook with same URL and update event filter", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","webhook":{"id":"whk-existing","ownerId":"usr-123","name":"SuperPlane","url":"https://hooks.superplane.dev/render","enabled":true,"eventFilter":[],"secret":"whsec-existing"}}]`, + )), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"whk-existing","ownerId":"usr-123","name":"SuperPlane","url":"https://hooks.superplane.dev/render","enabled":true,"eventFilter":[],"secret":"whsec-existing"}`, + )), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `{"id":"whk-existing","ownerId":"usr-123","name":"SuperPlane","url":"https://hooks.superplane.dev/render","enabled":true,"eventFilter":["build_ended","build_started","deploy_ended","deploy_started","image_pull_failed","pipeline_minutes_exhausted","pre_deploy_ended","pre_deploy_started"],"secret":"whsec-existing"}`, + )), + }, + }, + } + + webhookCtx := &integrationWebhookContext{ + id: "wh_record_1", + url: "https://hooks.superplane.dev/render", + configuration: struct{}{}, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "usr-123", + }, + }, + } + + metadata, err := handler.Setup(core.WebhookHandlerContext{ + HTTP: httpCtx, + Webhook: webhookCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "whsec-existing", string(webhookCtx.secret)) + + storedMetadata, ok := metadata.(WebhookMetadata) + require.True(t, ok) + assert.Equal(t, "whk-existing", storedMetadata.WebhookID) + assert.Equal(t, "usr-123", storedMetadata.WorkspaceID) + + require.Len(t, httpCtx.Requests, 3) + assert.Equal(t, http.MethodGet, httpCtx.Requests[0].Method) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/v1/webhooks") + assert.Contains(t, httpCtx.Requests[0].URL.String(), "ownerId=usr-123") + assert.Equal(t, http.MethodGet, httpCtx.Requests[1].Method) + assert.Contains(t, httpCtx.Requests[1].URL.String(), "/v1/webhooks/whk-existing") + assert.Equal(t, http.MethodPatch, httpCtx.Requests[2].Method) + assert.Contains(t, httpCtx.Requests[2].URL.String(), "/v1/webhooks/whk-existing") + + updateBody, readErr := io.ReadAll(httpCtx.Requests[2].Body) + require.NoError(t, readErr) + + updatePayload := map[string]any{} + require.NoError(t, json.Unmarshal(updateBody, &updatePayload)) + assert.Equal(t, "SuperPlane", updatePayload["name"]) + assert.Equal(t, "https://hooks.superplane.dev/render", updatePayload["url"]) + assert.Equal(t, true, updatePayload["enabled"]) + assert.ElementsMatch(t, webhookEventFilter(WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + }), updatePayload["eventFilter"]) + + }) + + t.Run("organization strategy creates resource-specific webhook when URL already exists with different filter", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader( + `[{"cursor":"x","webhook":{"id":"whk-existing","ownerId":"usr-123","name":"SuperPlane Deploy","url":"https://hooks.superplane.dev/render","enabled":true,"eventFilter":["deploy_ended"],"secret":"whsec-existing"}}]`, + )), + }, + { + StatusCode: http.StatusCreated, + Body: io.NopCloser(strings.NewReader( + `{"id":"whk-build","ownerId":"usr-123","name":"SuperPlane Build","url":"https://hooks.superplane.dev/render","enabled":true,"eventFilter":["build_ended","build_started"],"secret":"whsec-build"}`, + )), + }, + }, + } + + webhookCtx := &integrationWebhookContext{ + id: "wh_record_2", + url: "https://hooks.superplane.dev/render", + configuration: WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: webhookResourceTypeBuild, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + Metadata: Metadata{ + Workspace: &WorkspaceMetadata{ + ID: "usr-123", + Plan: "organization", + }, + }, + } + + metadata, err := handler.Setup(core.WebhookHandlerContext{ + HTTP: httpCtx, + Webhook: webhookCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "whsec-build", string(webhookCtx.secret)) + + storedMetadata, ok := metadata.(WebhookMetadata) + require.True(t, ok) + assert.Equal(t, "whk-build", storedMetadata.WebhookID) + + require.Len(t, httpCtx.Requests, 2) + assert.Equal(t, http.MethodGet, httpCtx.Requests[0].Method) + assert.Equal(t, http.MethodPost, httpCtx.Requests[1].Method) + + body, readErr := io.ReadAll(httpCtx.Requests[1].Body) + require.NoError(t, readErr) + + payload := map[string]any{} + require.NoError(t, json.Unmarshal(body, &payload)) + assert.Equal(t, "SuperPlane Build", payload["name"]) + assert.ElementsMatch(t, webhookEventFilter(WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: webhookResourceTypeBuild, + }), payload["eventFilter"]) + }) +} + +func Test__Render__CleanupWebhook(t *testing.T) { + handler := &RenderWebhookHandler{} + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusNoContent, + Body: io.NopCloser(strings.NewReader("")), + }, + }, + } + + err := handler.Cleanup(core.WebhookHandlerContext{ + HTTP: httpCtx, + Webhook: &integrationWebhookContext{ + metadata: WebhookMetadata{WebhookID: "whk-1", WorkspaceID: "usr-123"}, + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "rnd_test"}, + }, + }) + + require.NoError(t, err) + require.Len(t, httpCtx.Requests, 1) + assert.Equal(t, http.MethodDelete, httpCtx.Requests[0].Method) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/v1/webhooks/whk-1") +} + +func Test__Render__CompareWebhookConfig(t *testing.T) { + handler := &RenderWebhookHandler{} + equal, err := handler.CompareConfig(struct{}{}, map[string]any{"eventTypes": []string{"deploy_ended"}}) + require.NoError(t, err) + assert.True(t, equal) + + equal, err = handler.CompareConfig( + WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: webhookResourceTypeDeploy, + }, + WebhookConfiguration{ + Strategy: webhookStrategyResourceType, + ResourceType: webhookResourceTypeBuild, + }, + ) + require.NoError(t, err) + assert.False(t, equal) + + equal, err = handler.CompareConfig( + WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"deploy_ended"}, + }, + WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"build_ended"}, + }, + ) + require.NoError(t, err) + assert.True(t, equal) +} + +func Test__Render__MergeWebhookConfig(t *testing.T) { + handler := &RenderWebhookHandler{} + merged, changed, err := handler.Merge( + WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"deploy_ended"}, + }, + WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"build_ended"}, + }, + ) + require.NoError(t, err) + require.True(t, changed) + assert.Equal(t, WebhookConfiguration{ + Strategy: webhookStrategyIntegration, + EventTypes: []string{"build_ended", "deploy_ended"}, + }, merged) +} diff --git a/pkg/integrations/render/webhook_handler.go b/pkg/integrations/render/webhook_handler.go new file mode 100644 index 0000000000..2ce6336b51 --- /dev/null +++ b/pkg/integrations/render/webhook_handler.go @@ -0,0 +1,394 @@ +package render + +import ( + "fmt" + "slices" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type RenderWebhookHandler struct{} + +type WebhookMetadata struct { + WebhookID string `json:"webhookId" mapstructure:"webhookId"` + WorkspaceID string `json:"workspaceId" mapstructure:"workspaceId"` +} + +type webhookSetupRequest struct { + URL string + Configuration WebhookConfiguration + Name string + EventFilter []string +} + +func (h *RenderWebhookHandler) CompareConfig(a, b any) (bool, error) { + configA, err := decodeWebhookConfiguration(a) + if err != nil { + return false, fmt.Errorf("failed to decode webhook configuration A: %w", err) + } + + configB, err := decodeWebhookConfiguration(b) + if err != nil { + return false, fmt.Errorf("failed to decode webhook configuration B: %w", err) + } + + if configA.Strategy != configB.Strategy { + return false, nil + } + + if configA.Strategy == webhookStrategyResourceType { + return configA.ResourceType == configB.ResourceType, nil + } + + return true, nil +} + +func (h *RenderWebhookHandler) Merge(current, requested any) (any, bool, error) { + currentConfiguration, err := decodeWebhookConfiguration(current) + if err != nil { + return nil, false, fmt.Errorf("failed to decode current webhook configuration: %w", err) + } + + requestedConfiguration, err := decodeWebhookConfiguration(requested) + if err != nil { + return nil, false, fmt.Errorf("failed to decode requested webhook configuration: %w", err) + } + + if currentConfiguration.Strategy != requestedConfiguration.Strategy { + return currentConfiguration, false, nil + } + + if currentConfiguration.Strategy == webhookStrategyResourceType && + currentConfiguration.ResourceType != requestedConfiguration.ResourceType { + return currentConfiguration, false, nil + } + + mergedConfiguration := currentConfiguration + mergedConfiguration.EventTypes = normalizeWebhookEventTypes( + append(currentConfiguration.EventTypes, requestedConfiguration.EventTypes...), + ) + + if len(mergedConfiguration.EventTypes) == 0 { + mergedConfiguration.EventTypes = defaultEventTypesForWebhook(currentConfiguration) + } + + return mergedConfiguration, !webhookConfigurationsEqual(currentConfiguration, mergedConfiguration), nil +} + +func (h *RenderWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + workspaceID, err := workspaceIDForIntegration(client, ctx.Integration) + if err != nil { + return nil, err + } + + request, err := buildWebhookSetupRequest(ctx) + if err != nil { + return nil, err + } + + selectedWebhook, err := h.findExistingWebhook( + client, + workspaceID, + request.URL, + request.Configuration, + request.Name, + request.EventFilter, + ) + if err != nil { + return nil, err + } + + if selectedWebhook == nil { + metadata, createErr := h.createWebhook(ctx, client, workspaceID, request) + if createErr != nil { + return nil, createErr + } + return metadata, nil + } + + metadata, reuseErr := h.reuseWebhook(ctx, client, workspaceID, *selectedWebhook, request) + if reuseErr != nil { + return nil, reuseErr + } + + return metadata, nil +} + +func buildWebhookSetupRequest(ctx core.WebhookHandlerContext) (webhookSetupRequest, error) { + webhookURL := ctx.Webhook.GetURL() + if webhookURL == "" { + return webhookSetupRequest{}, fmt.Errorf("webhook URL is required") + } + + webhookConfiguration, err := decodeWebhookConfiguration(ctx.Webhook.GetConfiguration()) + if err != nil { + return webhookSetupRequest{}, fmt.Errorf("failed to decode webhook configuration: %w", err) + } + + return webhookSetupRequest{ + URL: webhookURL, + Configuration: webhookConfiguration, + Name: webhookName(webhookConfiguration), + EventFilter: webhookEventFilter(webhookConfiguration), + }, nil +} + +func (h *RenderWebhookHandler) reuseWebhook( + ctx core.WebhookHandlerContext, + client *Client, + workspaceID string, + selectedWebhook Webhook, + request webhookSetupRequest, +) (WebhookMetadata, error) { + secret, err := h.updateWebhookIfNeeded( + client, + selectedWebhook, + request.Name, + request.URL, + request.EventFilter, + ) + if err != nil { + return WebhookMetadata{}, err + } + + return finalizeWebhookSetup(ctx, selectedWebhook.ID, workspaceID, secret) +} + +func (h *RenderWebhookHandler) createWebhook( + ctx core.WebhookHandlerContext, + client *Client, + workspaceID string, + request webhookSetupRequest, +) (WebhookMetadata, error) { + createdWebhook, err := client.CreateWebhook(CreateWebhookRequest{ + WorkspaceID: workspaceID, + Name: request.Name, + URL: request.URL, + Enabled: true, + EventFilter: request.EventFilter, + }) + if err != nil { + return WebhookMetadata{}, fmt.Errorf("failed to create Render webhook: %w", err) + } + + return finalizeWebhookSetup(ctx, createdWebhook.ID, workspaceID, createdWebhook.Secret) +} + +func finalizeWebhookSetup( + ctx core.WebhookHandlerContext, + webhookID string, + workspaceID string, + secret string, +) (WebhookMetadata, error) { + if err := setWebhookSecret(ctx, secret); err != nil { + return WebhookMetadata{}, err + } + + return WebhookMetadata{WebhookID: webhookID, WorkspaceID: workspaceID}, nil +} + +func (h *RenderWebhookHandler) findExistingWebhook( + client *Client, + workspaceID string, + webhookURL string, + webhookConfiguration WebhookConfiguration, + selectedWebhookName string, + eventFilter []string, +) (*Webhook, error) { + webhooks, err := client.ListWebhooks(workspaceID) + if err != nil { + return nil, fmt.Errorf("failed to list Render webhooks: %w", err) + } + + candidateWebhooks := filterWebhooksByURL(webhooks, webhookURL) + + if webhookConfiguration.Strategy == webhookStrategyIntegration { + return pickExistingRenderWebhook(candidateWebhooks, selectedWebhookName), nil + } + + webhook := pickExistingRenderWebhookByName(candidateWebhooks, selectedWebhookName) + if webhook != nil { + return webhook, nil + } + + return pickExistingRenderWebhookByEventFilter(candidateWebhooks, eventFilter), nil +} + +func (h *RenderWebhookHandler) updateWebhookIfNeeded( + client *Client, + selectedWebhook Webhook, + selectedWebhookName string, + webhookURL string, + eventFilter []string, +) (string, error) { + retrievedWebhook, err := client.GetWebhook(selectedWebhook.ID) + if err != nil { + return "", fmt.Errorf("failed to retrieve existing Render webhook: %w", err) + } + if retrievedWebhook == nil { + return "", fmt.Errorf("failed to retrieve existing Render webhook: empty response") + } + + existingEventFilter := existingWebhookEventFilter(*retrievedWebhook, selectedWebhook) + existingName := existingWebhookName(*retrievedWebhook, selectedWebhook) + mergedEventFilter := mergeWebhookEventFilters(existingEventFilter, eventFilter) + if shouldUpdateWebhook(retrievedWebhook.Enabled, existingName, selectedWebhookName, existingEventFilter, mergedEventFilter) { + _, err = client.UpdateWebhook(selectedWebhook.ID, UpdateWebhookRequest{ + Name: selectedWebhookName, + URL: webhookURL, + Enabled: true, + EventFilter: mergedEventFilter, + }) + if err != nil { + return "", fmt.Errorf("failed to update existing Render webhook: %w", err) + } + } + + return webhookSecret(*retrievedWebhook, selectedWebhook), nil +} + +func setWebhookSecret(ctx core.WebhookHandlerContext, secret string) error { + if secret == "" { + return fmt.Errorf("render webhook secret is empty") + } + if err := ctx.Webhook.SetSecret([]byte(secret)); err != nil { + return fmt.Errorf("failed to store webhook secret: %w", err) + } + return nil +} + +func decodeWebhookMetadata(value any) (WebhookMetadata, error) { + metadata := WebhookMetadata{} + if err := mapstructure.Decode(value, &metadata); err != nil { + return WebhookMetadata{}, err + } + + return metadata, nil +} + +func (h *RenderWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + metadata, err := decodeWebhookMetadata(ctx.Webhook.GetMetadata()) + if err != nil { + return fmt.Errorf("failed to decode webhook metadata: %w", err) + } + + if metadata.WebhookID == "" { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + err = client.DeleteWebhook(metadata.WebhookID) + if err == nil { + return nil + } + + apiErr, ok := err.(*APIError) + if ok && apiErr.StatusCode == 404 { + return nil + } + + return err +} + +func pickExistingRenderWebhook(webhooks []Webhook, webhookName string) *Webhook { + if len(webhooks) == 0 { + return nil + } + + nameMatch := pickExistingRenderWebhookByName(webhooks, webhookName) + if nameMatch != nil { + return nameMatch + } + + return &webhooks[0] +} + +func pickExistingRenderWebhookByName(webhooks []Webhook, webhookName string) *Webhook { + if webhookName == "" { + return nil + } + + for i := range webhooks { + if webhooks[i].Name == webhookName { + return &webhooks[i] + } + } + + return nil +} + +func pickExistingRenderWebhookByEventFilter(webhooks []Webhook, eventFilter []string) *Webhook { + for i := range webhooks { + if slices.Equal(normalizeWebhookEventTypes(webhooks[i].EventFilter), eventFilter) { + return &webhooks[i] + } + } + + return nil +} + +func filterWebhooksByURL(webhooks []Webhook, webhookURL string) []Webhook { + filteredWebhooks := make([]Webhook, 0, len(webhooks)) + for _, webhook := range webhooks { + if webhook.URL == webhookURL { + filteredWebhooks = append(filteredWebhooks, webhook) + } + } + + return filteredWebhooks +} + +func existingWebhookEventFilter(retrievedWebhook, selectedWebhook Webhook) []string { + existingEventFilter := normalizeWebhookEventTypes(retrievedWebhook.EventFilter) + if len(existingEventFilter) != 0 { + return existingEventFilter + } + + return normalizeWebhookEventTypes(selectedWebhook.EventFilter) +} + +func existingWebhookName(retrievedWebhook, selectedWebhook Webhook) string { + existingName := retrievedWebhook.Name + if existingName != "" { + return existingName + } + + return selectedWebhook.Name +} + +func mergeWebhookEventFilters(existingEventFilter, requestEventFilter []string) []string { + mergedEventFilter := normalizeWebhookEventTypes(append(existingEventFilter, requestEventFilter...)) + if len(mergedEventFilter) != 0 { + return mergedEventFilter + } + + return requestEventFilter +} + +func shouldUpdateWebhook( + enabled bool, + existingName string, + selectedWebhookName string, + existingEventFilter []string, + mergedEventFilter []string, +) bool { + return existingName != selectedWebhookName || !slices.Equal(existingEventFilter, mergedEventFilter) || !enabled +} + +func webhookSecret(retrievedWebhook, selectedWebhook Webhook) string { + if retrievedWebhook.Secret != "" { + return retrievedWebhook.Secret + } + + return selectedWebhook.Secret +} diff --git a/pkg/integrations/rootly/webhook_handler.go b/pkg/integrations/rootly/webhook_handler.go index cd7ed33327..68833d9717 100644 --- a/pkg/integrations/rootly/webhook_handler.go +++ b/pkg/integrations/rootly/webhook_handler.go @@ -42,6 +42,10 @@ func (h *RootlyWebhookHandler) CompareConfig(a, b any) (bool, error) { return true, nil } +func (h *RootlyWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} + func (h *RootlyWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { client, err := NewClient(ctx.HTTP, ctx.Integration) if err != nil { diff --git a/pkg/integrations/semaphore/webhook_handler.go b/pkg/integrations/semaphore/webhook_handler.go index 5e47e7e01b..591db48c65 100644 --- a/pkg/integrations/semaphore/webhook_handler.go +++ b/pkg/integrations/semaphore/webhook_handler.go @@ -43,6 +43,10 @@ func (h *SemaphoreWebhookHandler) CompareConfig(a, b any) (bool, error) { return configA.Project == configB.Project, nil } +func (h *SemaphoreWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} + func (h *SemaphoreWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { client, err := NewClient(ctx.HTTP, ctx.Integration) if err != nil { diff --git a/pkg/integrations/sendgrid/webhook_handler.go b/pkg/integrations/sendgrid/webhook_handler.go index d621334d46..e255bf050c 100644 --- a/pkg/integrations/sendgrid/webhook_handler.go +++ b/pkg/integrations/sendgrid/webhook_handler.go @@ -14,6 +14,10 @@ func (s *SendGridWebhookHandler) CompareConfig(a, b any) (bool, error) { return true, nil } +func (s *SendGridWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} + func (s *SendGridWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { client, err := NewClient(ctx.HTTP, ctx.Integration) if err != nil { diff --git a/pkg/public/server.go b/pkg/public/server.go index 942801aba2..6d3b72c9c0 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -789,14 +789,26 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht } tx := database.Conn() + var integrationCtx core.IntegrationContext + if node.AppInstallationID != nil { + integration, integrationErr := models.FindUnscopedIntegrationInTransaction(tx, *node.AppInstallationID) + if integrationErr != nil { + return http.StatusInternalServerError, integrationErr + } + + integrationCtx = contexts.NewIntegrationContext(tx, &node, integration, s.encryptor, s.registry) + } + return trigger.HandleWebhook(core.WebhookRequestContext{ Body: body, Headers: headers, WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), Events: contexts.NewEventContext(tx, &node), + Integration: integrationCtx, }) } @@ -808,14 +820,26 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers } tx := database.Conn() + var integrationCtx core.IntegrationContext + if node.AppInstallationID != nil { + integration, integrationErr := models.FindUnscopedIntegrationInTransaction(tx, *node.AppInstallationID) + if integrationErr != nil { + return http.StatusInternalServerError, integrationErr + } + + integrationCtx = contexts.NewIntegrationContext(tx, &node, integration, s.encryptor, s.registry) + } + return component.HandleWebhook(core.WebhookRequestContext{ Body: body, Headers: headers, WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), Events: contexts.NewEventContext(tx, &node), + Integration: integrationCtx, FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { execution, err := models.FirstNodeExecutionByKVInTransaction(tx, node.WorkflowID, node.NodeID, key, value) if err != nil { diff --git a/pkg/registry/webhook_handler.go b/pkg/registry/webhook_handler.go index b66158f246..be5433a72d 100644 --- a/pkg/registry/webhook_handler.go +++ b/pkg/registry/webhook_handler.go @@ -19,6 +19,17 @@ func (h *PanicableWebhookHandler) CompareConfig(a, b any) (bool, error) { return h.underlying.CompareConfig(a, b) } +func (h *PanicableWebhookHandler) Merge(current, requested any) (merged any, changed bool, err error) { + defer func() { + if r := recover(); r != nil { + merged = current + changed = false + err = fmt.Errorf("webhook handler panicked in Merge(): %v", r) + } + }() + return h.underlying.Merge(current, requested) +} + func (h *PanicableWebhookHandler) Setup(ctx core.WebhookHandlerContext) (metadata any, err error) { defer func() { if r := recover(); r != nil { diff --git a/pkg/registry/webhook_handler_test.go b/pkg/registry/webhook_handler_test.go index ed3cf3945a..dfdac1b5fe 100644 --- a/pkg/registry/webhook_handler_test.go +++ b/pkg/registry/webhook_handler_test.go @@ -16,6 +16,10 @@ func (p *panickingWebhookHandler) CompareConfig(a, b any) (bool, error) { panic("compare config panic") } +func (p *panickingWebhookHandler) Merge(current, requested any) (any, bool, error) { + panic("merge panic") +} + func (p *panickingWebhookHandler) Setup(ctx core.WebhookHandlerContext) (metadata any, err error) { panic("setup panic") } @@ -47,3 +51,14 @@ func Test_PanicableWebhookHandler_Cleanup_CatchesPanic(t *testing.T) { require.Error(t, err) assert.Contains(t, err.Error(), "cleanup panic") } + +func Test_PanicableWebhookHandler_Merge_CatchesPanic(t *testing.T) { + handler := &panickingWebhookHandler{} + panicable := NewPanicableWebhookHandler(handler) + + merged, changed, err := panicable.Merge(map[string]any{"a": "b"}, nil) + require.Error(t, err) + assert.False(t, changed) + assert.Equal(t, map[string]any{"a": "b"}, merged) + assert.Contains(t, err.Error(), "merge panic") +} diff --git a/pkg/server/server.go b/pkg/server/server.go index d295d856db..9864e9022e 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -44,6 +44,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/jira" _ "github.com/superplanehq/superplane/pkg/integrations/openai" _ "github.com/superplanehq/superplane/pkg/integrations/pagerduty" + _ "github.com/superplanehq/superplane/pkg/integrations/render" _ "github.com/superplanehq/superplane/pkg/integrations/rootly" _ "github.com/superplanehq/superplane/pkg/integrations/semaphore" _ "github.com/superplanehq/superplane/pkg/integrations/sendgrid" diff --git a/pkg/workers/contexts/integration_context.go b/pkg/workers/contexts/integration_context.go index 25401f566b..1579bbeb50 100644 --- a/pkg/workers/contexts/integration_context.go +++ b/pkg/workers/contexts/integration_context.go @@ -62,6 +62,10 @@ func (c *IntegrationContext) RequestWebhook(configuration any) error { } if ok { + if err := c.mergeWebhookConfiguration(handler, &hook, configuration); err != nil { + return err + } + c.node.WebhookID = &hook.ID return nil } @@ -129,6 +133,32 @@ func (c *IntegrationContext) createWebhook(configuration any) error { return nil } +func (c *IntegrationContext) mergeWebhookConfiguration( + handler core.WebhookHandler, + webhook *models.Webhook, + configuration any, +) error { + mergedConfiguration, changed, err := handler.Merge(webhook.Configuration.Data(), configuration) + if err != nil { + return err + } + + if !changed { + return nil + } + + webhook.Configuration = datatypes.NewJSONType(mergedConfiguration) + webhook.State = models.WebhookStatePending + webhook.RetryCount = 0 + + return c.tx.Model(webhook).Updates(map[string]any{ + "configuration": webhook.Configuration, + "state": webhook.State, + "retry_count": webhook.RetryCount, + "updated_at": time.Now(), + }).Error +} + func (c *IntegrationContext) ScheduleResync(interval time.Duration) error { if interval < time.Second { return fmt.Errorf("interval must be bigger than 1s") diff --git a/pkg/workers/contexts/integration_context_test.go b/pkg/workers/contexts/integration_context_test.go index 92889bf347..213dcbb73a 100644 --- a/pkg/workers/contexts/integration_context_test.go +++ b/pkg/workers/contexts/integration_context_test.go @@ -2,6 +2,7 @@ package contexts import ( "context" + "encoding/json" "reflect" "slices" "testing" @@ -149,3 +150,106 @@ func Test__IntegrationContext_RequestWebhook_ReplacesWebhookOnConfigChange(t *te require.False(t, newWebhook.DeletedAt.Valid) assert.Equal(t, newConfig, newWebhook.Configuration.Data()) } + +func Test__IntegrationContext_RequestWebhook_MergesExistingWebhookConfig(t *testing.T) { + r := support.Setup(t) + defer r.Close() + + r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{}) + r.Registry.WebhookHandlers["dummy"] = support.NewDummyWebhookHandler(support.DummyWebhookHandlerOptions{ + CompareConfigFunc: func(a, b any) (bool, error) { + return true, nil + }, + MergeFunc: func(current, requested any) (any, bool, error) { + currentMap := current.(map[string]any) + requestedMap := requested.(map[string]any) + + currentEvents := []string{} + switch values := currentMap["eventTypes"].(type) { + case []string: + currentEvents = append(currentEvents, values...) + case []any: + for _, item := range values { + if eventType, ok := item.(string); ok { + currentEvents = append(currentEvents, eventType) + } + } + } + + requestedEvents := []string{} + switch values := requestedMap["eventTypes"].(type) { + case []string: + requestedEvents = append(requestedEvents, values...) + case []any: + for _, item := range values { + if eventType, ok := item.(string); ok { + requestedEvents = append(requestedEvents, eventType) + } + } + } + + events := append(currentEvents, requestedEvents...) + slices.Sort(events) + events = slices.Compact(events) + + return map[string]any{"eventTypes": events}, len(events) != len(currentEvents), nil + }, + }) + + integration, err := models.CreateIntegration( + uuid.New(), + r.Organization.ID, + "dummy", + support.RandomName("installation"), + map[string]any{}, + ) + require.NoError(t, err) + + webhookID := uuid.New() + _, encryptedKey, err := crypto.NewRandomKey(context.Background(), r.Encryptor, webhookID.String()) + require.NoError(t, err) + + now := time.Now() + webhook := models.Webhook{ + ID: webhookID, + State: models.WebhookStateReady, + Secret: encryptedKey, + Configuration: datatypes.NewJSONType[any](map[string]any{"eventTypes": []string{"deploy_ended"}}), + Metadata: datatypes.NewJSONType[any](map[string]any{}), + AppInstallationID: &integration.ID, + CreatedAt: &now, + } + require.NoError(t, database.Conn().Create(&webhook).Error) + + inputNode := models.CanvasNode{ + NodeID: "node-1", + Name: "Node 1", + Type: models.NodeTypeTrigger, + Ref: datatypes.NewJSONType(models.NodeRef{Trigger: &models.TriggerRef{Name: "start"}}), + Configuration: datatypes.NewJSONType(map[string]any{}), + Metadata: datatypes.NewJSONType(map[string]any{}), + Position: datatypes.NewJSONType(models.Position{}), + } + + canvas, nodes := support.CreateCanvas(t, r.Organization.ID, r.User, []models.CanvasNode{inputNode}, nil) + require.NotNil(t, canvas) + require.Len(t, nodes, 1) + + node := nodes[0] + node.AppInstallationID = &integration.ID + require.NoError(t, database.Conn().Save(&node).Error) + + ctx := NewIntegrationContext(database.Conn(), &node, integration, r.Encryptor, r.Registry) + require.NoError(t, ctx.RequestWebhook(map[string]any{"eventTypes": []string{"build_ended"}})) + + require.NotNil(t, node.WebhookID) + require.Equal(t, webhookID, *node.WebhookID) + + updatedWebhook, err := models.FindWebhookInTransaction(database.Conn(), webhookID) + require.NoError(t, err) + assert.Equal(t, models.WebhookStatePending, updatedWebhook.State) + + configurationJSON, marshalErr := json.Marshal(updatedWebhook.Configuration.Data()) + require.NoError(t, marshalErr) + assert.JSONEq(t, `{"eventTypes":["build_ended","deploy_ended"]}`, string(configurationJSON)) +} diff --git a/test/support/application.go b/test/support/application.go index 4b80f7f63a..7471fc836c 100644 --- a/test/support/application.go +++ b/test/support/application.go @@ -20,15 +20,15 @@ type DummyIntegrationOptions struct { Actions []core.Action HandleAction func(ctx core.IntegrationActionContext) error OnSync func(ctx core.SyncContext) error + OnCleanup func(ctx core.IntegrationCleanupContext) error } -func NewDummyIntegration( - options DummyIntegrationOptions, -) *DummyIntegration { +func NewDummyIntegration(options DummyIntegrationOptions) *DummyIntegration { return &DummyIntegration{ actions: options.Actions, handleAction: options.HandleAction, onSync: options.OnSync, + onCleanup: options.OnCleanup, } } @@ -100,12 +100,14 @@ type DummyWebhookHandlerOptions struct { SetupFunc func(ctx core.WebhookHandlerContext) (any, error) CleanupFunc func(ctx core.WebhookHandlerContext) error CompareConfigFunc func(a, b any) (bool, error) + MergeFunc func(current, requested any) (any, bool, error) } type DummyWebhookHandler struct { setupFunc func(ctx core.WebhookHandlerContext) (any, error) cleanupFunc func(ctx core.WebhookHandlerContext) error compareConfigFunc func(a, b any) (bool, error) + mergeFunc func(current, requested any) (any, bool, error) } func NewDummyWebhookHandler(options DummyWebhookHandlerOptions) *DummyWebhookHandler { @@ -113,6 +115,7 @@ func NewDummyWebhookHandler(options DummyWebhookHandlerOptions) *DummyWebhookHan setupFunc: options.SetupFunc, cleanupFunc: options.CleanupFunc, compareConfigFunc: options.CompareConfigFunc, + mergeFunc: options.MergeFunc, } } @@ -136,3 +139,10 @@ func (t *DummyWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { } return t.cleanupFunc(ctx) } + +func (t *DummyWebhookHandler) Merge(current, requested any) (any, bool, error) { + if t.mergeFunc == nil { + return current, false, nil + } + return t.mergeFunc(current, requested) +} diff --git a/web_src/src/assets/icons/integrations/render.svg b/web_src/src/assets/icons/integrations/render.svg new file mode 100644 index 0000000000..52bbd5bc96 --- /dev/null +++ b/web_src/src/assets/icons/integrations/render.svg @@ -0,0 +1,13 @@ + + + + + + + + diff --git a/web_src/src/pages/workflowv2/index.tsx b/web_src/src/pages/workflowv2/index.tsx index 75ecd662c6..50c23f107d 100644 --- a/web_src/src/pages/workflowv2/index.tsx +++ b/web_src/src/pages/workflowv2/index.tsx @@ -3075,7 +3075,7 @@ function prepareTriggerNode( const lastEvent = nodeEventsMap[node.id!]?.[0]; const triggerProps = renderer.getTriggerProps({ node: buildNodeInfo(node), - definition: buildComponentDefinition(triggerMetadata!), + definition: buildComponentDefinition(triggerMetadata), lastEvent: buildEventInfo(lastEvent), }); diff --git a/web_src/src/pages/workflowv2/mappers/aws/lambda/run_function.ts b/web_src/src/pages/workflowv2/mappers/aws/lambda/run_function.ts index c702a8e6a7..04605d06f5 100644 --- a/web_src/src/pages/workflowv2/mappers/aws/lambda/run_function.ts +++ b/web_src/src/pages/workflowv2/mappers/aws/lambda/run_function.ts @@ -127,6 +127,7 @@ function runFunctionEventSections(nodes: NodeInfo[], execution: ExecutionInfo, c id: execution.rootEvent?.id!, createdAt: execution.rootEvent?.createdAt!, data: execution.rootEvent?.data || {}, + type: execution.rootEvent?.type!, }, }); diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 208b01d1ee..9668e345ee 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -65,6 +65,11 @@ import { triggerRenderers as sendgridTriggerRenderers, eventStateRegistry as sendgridEventStateRegistry, } from "./sendgrid"; +import { + componentMappers as renderComponentMappers, + triggerRenderers as renderTriggerRenderers, + eventStateRegistry as renderEventStateRegistry, +} from "./render"; import { componentMappers as rootlyComponentMappers, triggerRenderers as rootlyTriggerRenderers, @@ -138,6 +143,7 @@ const appMappers: Record> = { slack: slackComponentMappers, smtp: smtpComponentMappers, sendgrid: sendgridComponentMappers, + render: renderComponentMappers, rootly: rootlyComponentMappers, aws: awsComponentMappers, discord: discordComponentMappers, @@ -157,6 +163,7 @@ const appTriggerRenderers: Record> = { slack: slackTriggerRenderers, smtp: smtpTriggerRenderers, sendgrid: sendgridTriggerRenderers, + render: renderTriggerRenderers, rootly: rootlyTriggerRenderers, aws: awsTriggerRenderers, discord: discordTriggerRenderers, @@ -176,6 +183,7 @@ const appEventStateRegistries: Record slack: slackEventStateRegistry, smtp: smtpEventStateRegistry, sendgrid: sendgridEventStateRegistry, + render: renderEventStateRegistry, discord: discordEventStateRegistry, rootly: rootlyEventStateRegistry, openai: openaiEventStateRegistry, diff --git a/web_src/src/pages/workflowv2/mappers/render/common.ts b/web_src/src/pages/workflowv2/mappers/render/common.ts new file mode 100644 index 0000000000..5a9ecc79ca --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/render/common.ts @@ -0,0 +1,167 @@ +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { TriggerProps } from "@/ui/trigger"; +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; +import renderIcon from "@/assets/icons/integrations/render.svg"; + +interface RenderEventData { + eventId?: string; + serviceId?: string; + serviceName?: string; + status?: string; + deployId?: string; + buildId?: string; +} + +interface OnEventConfiguration { + eventTypes?: string[]; + service?: string; +} + +/** Labels for event types as received in payloads (dot-case, e.g. render.deploy.ended). */ +const eventLabelsByType: Record = { + "render.deploy.ended": "Deploy Ended", + "render.deploy.started": "Deploy Started", + "render.build.ended": "Build Ended", + "render.build.started": "Build Started", + "render.image.pull.failed": "Image Pull Failed", + "render.pipeline.minutes.exhausted": "Pipeline Minutes Exhausted", + "render.pre.deploy.ended": "Pre-Deploy Ended", + "render.pre.deploy.started": "Pre-Deploy Started", +}; + +/** Labels for event types as stored in configuration (snake_case, e.g. deploy_ended). */ +const eventLabelsByConfig: Record = { + deploy_ended: "Deploy Ended", + deploy_started: "Deploy Started", + build_ended: "Build Ended", + build_started: "Build Started", + image_pull_failed: "Image Pull Failed", + pipeline_minutes_exhausted: "Pipeline Minutes Exhausted", + pre_deploy_ended: "Pre-Deploy Ended", + pre_deploy_started: "Pre-Deploy Started", +}; + +function formatEventLabel(event?: string): string { + if (!event) { + return "Render Event"; + } + + return eventLabelsByType[event] || eventLabelsByConfig[event] || event; +} + +export const renderTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const event = context.event?.data as RenderEventData | undefined; + const title = buildTitle(event, context.event?.type as string); + + return { + title, + subtitle: buildSubtitle(context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const event = context.event?.data as RenderEventData | undefined; + const values: Record = { + "Received At": formatTimestamp(context.event?.createdAt), + Event: stringOrDash(context.event?.type), + "Event ID": stringOrDash(event?.eventId), + "Service ID": stringOrDash(event?.serviceId), + "Service Name": stringOrDash(event?.serviceName), + }; + + if (event?.deployId) { + values["Deploy ID"] = event.deployId; + } + if (event?.buildId) { + values["Build ID"] = event.buildId; + } + + if (event?.status) { + values["Status"] = event.status; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext) => { + const { node, definition, lastEvent } = context; + const configuration = node.configuration as OnEventConfiguration | undefined; + const metadata = buildMetadata(configuration); + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: renderIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata, + }; + + if (lastEvent) { + const event = lastEvent.data as RenderEventData; + props.lastEventData = { + title: buildTitle(event, lastEvent.type as string), + subtitle: buildSubtitle(lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt), + state: "triggered", + eventId: lastEvent.id, + }; + } + + return props; + }, +}; + +function buildMetadata(configuration: OnEventConfiguration | undefined): TriggerProps["metadata"] { + const metadata: TriggerProps["metadata"] = []; + + if (configuration?.eventTypes && configuration.eventTypes.length > 0) { + const eventTypes = configuration.eventTypes.map((event: string) => formatEventLabel(event)); + metadata.push({ + icon: "funnel", + label: eventTypes.length > 3 ? `Events: ${eventTypes.length} selected` : `Events: ${eventTypes.join(", ")}`, + }); + } + + const service = configuration?.service; + if (service) { + metadata.push({ + icon: "server", + label: `Service: ${service}`, + }); + } + + return metadata; +} + +function buildTitle(event: RenderEventData | undefined, type?: string): string { + const serviceLabel = event?.serviceName || event?.serviceId || "Service"; + const eventLabel = formatEventLabel(type); + return `${serviceLabel} · ${eventLabel}`; +} + +function buildSubtitle(createdAt?: string): string { + return createdAt ? formatTimeAgo(new Date(createdAt)) : ""; +} + +/** Shared: value or "-" for display. */ +export function stringOrDash(value?: unknown): string { + if (value === undefined || value === null || value === "") { + return "-"; + } + return String(value); +} + +/** Shared: format timestamp for display, or "-" if missing/invalid. */ +export function formatTimestamp(value?: string, fallback?: string): string { + const timestamp = value || fallback; + if (!timestamp) { + return "-"; + } + const date = new Date(timestamp); + if (Number.isNaN(date.getTime())) { + return "-"; + } + return date.toLocaleString(); +} diff --git a/web_src/src/pages/workflowv2/mappers/render/deploy.ts b/web_src/src/pages/workflowv2/mappers/render/deploy.ts new file mode 100644 index 0000000000..1837a7a92c --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/render/deploy.ts @@ -0,0 +1,142 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + EventStateRegistry, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + StateFunction, + SubtitleContext, +} from "../types"; +import { ComponentBaseProps, DEFAULT_EVENT_STATE_MAP, EventSection, EventStateMap } from "@/ui/componentBase"; +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import { getState, getTriggerRenderer } from ".."; +import { MetadataItem } from "@/ui/metadataList"; +import { formatTimeAgo } from "@/utils/date"; +import renderIcon from "@/assets/icons/integrations/render.svg"; +import { formatTimestamp, stringOrDash } from "./common"; +import { defaultStateFunction } from "../stateRegistry"; + +interface DeployConfiguration { + service?: string; + clearCache?: boolean; +} + +interface DeployOutput { + deployId?: string; + status?: string; + createdAt?: string; + finishedAt?: string; +} + +export const DEPLOY_STATE_MAP: EventStateMap = { + ...DEFAULT_EVENT_STATE_MAP, + failed: { + icon: "circle-x", + textColor: "text-gray-800", + backgroundColor: "bg-red-100", + badgeColor: "bg-red-500", + }, + cancelled: { + icon: "circle-slash-2", + textColor: "text-gray-800", + backgroundColor: "bg-gray-100", + badgeColor: "bg-gray-500", + }, +}; + +export const deployStateFunction: StateFunction = (execution) => { + if (!execution) return "neutral"; + + const outputs = execution.outputs as { failed?: OutputPayload[] } | undefined; + if (outputs?.failed?.length) { + const failedOutput = outputs.failed[0]?.data as DeployOutput | undefined; + const failedStatus = failedOutput?.status?.toLowerCase(); + if (failedStatus === "cancelled" || failedStatus === "canceled") { + return "cancelled"; + } + return "failed"; + } + + const state = defaultStateFunction(execution); + return state === "success" ? "triggered" : state; +}; + +export const DEPLOY_STATE_REGISTRY: EventStateRegistry = { + stateMap: DEPLOY_STATE_MAP, + getState: deployStateFunction, +}; + +export const deployMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name || context.node.componentName || "unknown"; + + return { + title: + context.node.name || + context.componentDefinition.label || + context.componentDefinition.name || + "Unnamed component", + iconSrc: renderIcon, + iconColor: getColorClass(context.componentDefinition.color), + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + eventSections: lastExecution ? deployEventSections(context.nodes, lastExecution, componentName) : undefined, + includeEmptyState: !lastExecution, + metadata: deployMetadataList(context.node), + eventStateMap: DEPLOY_STATE_MAP, + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { success?: OutputPayload[]; failed?: OutputPayload[] } | undefined; + const result = + (outputs?.success?.[0]?.data as DeployOutput | undefined) ?? + (outputs?.failed?.[0]?.data as DeployOutput | undefined); + + return { + "Triggered At": formatTimestamp(result?.createdAt, context.execution.createdAt), + "Deploy ID": stringOrDash(result?.deployId), + Status: stringOrDash(result?.status), + "Finished At": formatTimestamp(result?.finishedAt), + }; + }, + + subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) return ""; + return formatTimeAgo(new Date(context.execution.createdAt)); + }, +}; + +function deployMetadataList(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as DeployConfiguration | undefined; + + if (configuration?.service) { + metadata.push({ icon: "server", label: `Service: ${configuration.service}` }); + } + + if (configuration?.clearCache) { + metadata.push({ icon: "trash-2", label: "Clear cache" }); + } + + return metadata; +} + +function deployEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((node) => node.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName || ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/render/index.ts b/web_src/src/pages/workflowv2/mappers/render/index.ts new file mode 100644 index 0000000000..8a46ae5bad --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/render/index.ts @@ -0,0 +1,17 @@ +import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; +import { deployMapper, DEPLOY_STATE_REGISTRY } from "./deploy"; +import { onBuildTriggerRenderer } from "./on_build"; +import { onDeployTriggerRenderer } from "./on_deploy"; + +export const componentMappers: Record = { + deploy: deployMapper, +}; + +export const triggerRenderers: Record = { + onDeploy: onDeployTriggerRenderer, + onBuild: onBuildTriggerRenderer, +}; + +export const eventStateRegistry: Record = { + deploy: DEPLOY_STATE_REGISTRY, +}; diff --git a/web_src/src/pages/workflowv2/mappers/render/on_build.ts b/web_src/src/pages/workflowv2/mappers/render/on_build.ts new file mode 100644 index 0000000000..6ec94271c6 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/render/on_build.ts @@ -0,0 +1,4 @@ +import { TriggerRenderer } from "../types"; +import { renderTriggerRenderer } from "./common"; + +export const onBuildTriggerRenderer: TriggerRenderer = renderTriggerRenderer; diff --git a/web_src/src/pages/workflowv2/mappers/render/on_deploy.ts b/web_src/src/pages/workflowv2/mappers/render/on_deploy.ts new file mode 100644 index 0000000000..eafc954f37 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/render/on_deploy.ts @@ -0,0 +1,4 @@ +import { TriggerRenderer } from "../types"; +import { renderTriggerRenderer } from "./common"; + +export const onDeployTriggerRenderer: TriggerRenderer = renderTriggerRenderer; diff --git a/web_src/src/pages/workflowv2/mappers/types.ts b/web_src/src/pages/workflowv2/mappers/types.ts index f173d8d900..a637347ede 100644 --- a/web_src/src/pages/workflowv2/mappers/types.ts +++ b/web_src/src/pages/workflowv2/mappers/types.ts @@ -54,6 +54,7 @@ export type EventInfo = customName?: string; data: any; nodeId: string; + type: string; } | undefined; diff --git a/web_src/src/pages/workflowv2/utils.ts b/web_src/src/pages/workflowv2/utils.ts index 3941a9019d..5904e06ed6 100644 --- a/web_src/src/pages/workflowv2/utils.ts +++ b/web_src/src/pages/workflowv2/utils.ts @@ -1148,13 +1148,13 @@ export function buildExecutionInfo(execution: CanvasesCanvasNodeExecution): Exec }; } -export function buildComponentDefinition(component: ComponentsComponent): ComponentDefinition { +export function buildComponentDefinition(component?: Partial): ComponentDefinition { return { - name: component.name!, - label: component.label!, - description: component.description!, - icon: component.icon!, - color: component.color!, + name: component?.name || "unknown", + label: component?.label || "Unknown", + description: component?.description || "", + icon: component?.icon || "bolt", + color: component?.color || "gray", }; } @@ -1166,6 +1166,7 @@ export function buildEventInfo(event: CanvasesCanvasEvent): EventInfo | undefine createdAt: event.createdAt!, data: event.data?.data || {}, nodeId: event.nodeId!, + type: (event.data?.type as string) || "", }; } diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index f060fefb42..546254986a 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -31,6 +31,7 @@ import awsCodeArtifactIcon from "@/assets/icons/integrations/aws.codeartifact.sv import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; +import renderIcon from "@/assets/icons/integrations/render.svg"; export interface BuildingBlock { name: string; @@ -405,6 +406,7 @@ function CategorySection({ semaphore: SemaphoreLogo, slack: slackIcon, sendgrid: sendgridIcon, + render: renderIcon, aws: { codeArtifact: awsIcon, lambda: awsLambdaIcon, @@ -475,6 +477,7 @@ function CategorySection({ semaphore: SemaphoreLogo, slack: slackIcon, sendgrid: sendgridIcon, + render: renderIcon, aws: { codeArtifact: awsCodeArtifactIcon, ecr: awsEcrIcon, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index c46a490c90..25846ebe99 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -17,6 +17,7 @@ import slackIcon from "@/assets/icons/integrations/slack.svg"; import smtpIcon from "@/assets/icons/integrations/smtp.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; +import renderIcon from "@/assets/icons/integrations/render.svg"; /** Integration type name (e.g. "github") → logo src. Used for Settings tab and header. */ export const INTEGRATION_APP_LOGO_MAP: Record = { @@ -37,6 +38,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { slack: slackIcon, smtp: smtpIcon, sendgrid: sendgridIcon, + render: renderIcon, }; /** Block name first part (e.g. "github") or compound (e.g. aws.lambda) → logo src for header. */ @@ -56,6 +58,7 @@ export const APP_LOGO_MAP: Record> = { semaphore: SemaphoreLogo, slack: slackIcon, sendgrid: sendgridIcon, + render: renderIcon, aws: { lambda: awsLambdaIcon, }, From f0ba11b7de766ceb6aef1947618eb39b9f2f26fe Mon Sep 17 00:00:00 2001 From: dimbaja Date: Mon, 9 Feb 2026 22:18:45 +0100 Subject: [PATCH 031/160] feat: Implement initial GitLab integration (OAuth + PAT) (#2696) The GitLab integration is implemented using OAuth and Personal Access Tokens (PATs). The client credentials grant is not supported by GitLab, and Service Accounts and Group Access Tokens are Premium-tier features both on GitLab.com and self-hosted GitLab instances. OAuth and PATs were chosen as the initial supported authentication mechanisms. Closes #1912 #1914 #1936 registering-integration-1 registering-integration-2 registering-integration-3 oauth-setup-1 oauth-setup-2 create-issue-component Create-issue-config on-issue-component on-issue-config --------- Signed-off-by: dimbaja Signed-off-by: Lucas Pinheiro Co-authored-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitLab.mdx | 244 +++++++++ pkg/integrations/gitlab/auth.go | 138 +++++ pkg/integrations/gitlab/auth_test.go | 132 +++++ pkg/integrations/gitlab/client.go | 250 +++++++++ pkg/integrations/gitlab/client_test.go | 378 +++++++++++++ pkg/integrations/gitlab/common.go | 116 ++++ pkg/integrations/gitlab/create_issue.go | 259 +++++++++ pkg/integrations/gitlab/create_issue_test.go | 137 +++++ pkg/integrations/gitlab/example.go | 18 + .../gitlab/example_data_on_issue.json | 67 +++ .../gitlab/example_output_create_issue.json | 78 +++ pkg/integrations/gitlab/gitlab.go | 508 ++++++++++++++++++ pkg/integrations/gitlab/gitlab_test.go | 389 ++++++++++++++ pkg/integrations/gitlab/hooks.go | 140 +++++ pkg/integrations/gitlab/list_resources.go | 94 ++++ .../gitlab/list_resources_test.go | 136 +++++ pkg/integrations/gitlab/on_issue.go | 238 ++++++++ pkg/integrations/gitlab/on_issue_test.go | 274 ++++++++++ pkg/integrations/gitlab/webhook_handler.go | 102 ++++ .../gitlab/webhook_handler_test.go | 120 +++++ pkg/server/server.go | 1 + .../src/assets/icons/integrations/gitlab.svg | 5 + .../pages/workflowv2/mappers/github/utils.ts | 20 +- .../pages/workflowv2/mappers/gitlab/base.ts | 57 ++ .../workflowv2/mappers/gitlab/create_issue.ts | 43 ++ .../pages/workflowv2/mappers/gitlab/index.ts | 16 + .../workflowv2/mappers/gitlab/issue_utils.ts | 76 +++ .../workflowv2/mappers/gitlab/on_issue.ts | 110 ++++ .../pages/workflowv2/mappers/gitlab/types.ts | 43 ++ .../pages/workflowv2/mappers/gitlab/utils.ts | 4 + web_src/src/pages/workflowv2/mappers/index.ts | 8 + web_src/src/pages/workflowv2/mappers/utils.ts | 21 + .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + web_src/src/ui/IntegrationInstructions.tsx | 19 +- .../ui/componentSidebar/integrationIcons.tsx | 3 + web_src/src/utils/integrationDisplayName.ts | 1 + 36 files changed, 4230 insertions(+), 18 deletions(-) create mode 100644 docs/components/GitLab.mdx create mode 100644 pkg/integrations/gitlab/auth.go create mode 100644 pkg/integrations/gitlab/auth_test.go create mode 100644 pkg/integrations/gitlab/client.go create mode 100644 pkg/integrations/gitlab/client_test.go create mode 100644 pkg/integrations/gitlab/common.go create mode 100644 pkg/integrations/gitlab/create_issue.go create mode 100644 pkg/integrations/gitlab/create_issue_test.go create mode 100644 pkg/integrations/gitlab/example.go create mode 100644 pkg/integrations/gitlab/example_data_on_issue.json create mode 100644 pkg/integrations/gitlab/example_output_create_issue.json create mode 100644 pkg/integrations/gitlab/gitlab.go create mode 100644 pkg/integrations/gitlab/gitlab_test.go create mode 100644 pkg/integrations/gitlab/hooks.go create mode 100644 pkg/integrations/gitlab/list_resources.go create mode 100644 pkg/integrations/gitlab/list_resources_test.go create mode 100644 pkg/integrations/gitlab/on_issue.go create mode 100644 pkg/integrations/gitlab/on_issue_test.go create mode 100644 pkg/integrations/gitlab/webhook_handler.go create mode 100644 pkg/integrations/gitlab/webhook_handler_test.go create mode 100644 web_src/src/assets/icons/integrations/gitlab.svg create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/base.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/create_issue.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/index.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/issue_utils.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_issue.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/types.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/utils.ts diff --git a/docs/components/GitLab.mdx b/docs/components/GitLab.mdx new file mode 100644 index 0000000000..3e56b7ae24 --- /dev/null +++ b/docs/components/GitLab.mdx @@ -0,0 +1,244 @@ +--- +title: "GitLab" +--- + +Manage and react to changes in your GitLab repositories + +## Triggers + + + + + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + +## Instructions + +When connecting using App OAuth: +- Leave **Client ID** and **Secret** empty to start the setup wizard. + +When connecting using Personal Access Token: +- Go to Preferences → Personal Access Token → Add New token +- Use **Scopes**: api, read_user, read_api, write_repository, read_repository +- Copy the token and paste it into the **Access Token** configuration field, then click **Save**. + + + +## On Issue + +The On Issue trigger starts a workflow execution when issue events occur in a GitLab project. + +### Use Cases + +- **Notify Slack** when an issue is created or assigned for triage +- **Create a Jira issue** when a GitLab issue is created for traceability +- **Update external dashboards** or close linked tickets when an issue is closed + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which issue actions to listen for (opened, closed, reopened, etc.). Default: opened. +- **Labels** (optional): Only trigger for issues with specific labels + +### Outputs + +- **Default channel**: Emits issue payload including issue IID, title, state, labels, assignees, author, and action type + +### Webhook Setup + +This trigger automatically sets up a GitLab webhook when configured. The webhook is managed by SuperPlane and will be cleaned up when the trigger is removed. + +### Example Data + +```json +{ + "data": { + "assignees": [ + { + "avatar_url": "https://www.gravatar.com/avatar/abc123", + "id": 1, + "name": "John Doe", + "username": "johndoe" + } + ], + "event_type": "issue", + "labels": [ + { + "color": "#dc3545", + "created_at": "2026-01-01T00:00:00Z", + "description": "Bug reports", + "group_id": null, + "id": 206, + "project_id": 15, + "template": false, + "title": "bug", + "type": "ProjectLabel", + "updated_at": "2026-01-01T00:00:00Z" + } + ], + "object_attributes": { + "action": "open", + "created_at": "2026-02-05T14:00:00Z", + "description": "This is an example issue description for testing the webhook", + "id": 301, + "iid": 1, + "state": "opened", + "title": "Example Issue", + "updated_at": "2026-02-05T14:00:00Z", + "url": "https://gitlab.com/group/my-project/-/issues/1" + }, + "object_kind": "issue", + "project": { + "avatar_url": null, + "default_branch": "main", + "description": "Example project", + "git_http_url": "https://gitlab.com/group/my-project.git", + "git_ssh_url": "git@gitlab.com:group/my-project.git", + "id": 15, + "name": "my-project", + "namespace": "group", + "path_with_namespace": "group/my-project", + "visibility_level": 20, + "web_url": "https://gitlab.com/group/my-project" + }, + "repository": { + "description": "Example project", + "homepage": "https://gitlab.com/group/my-project", + "name": "my-project", + "url": "git@gitlab.com:group/my-project.git" + }, + "user": { + "avatar_url": "https://www.gravatar.com/avatar/abc123", + "email": "johndoe@example.com", + "id": 1, + "name": "John Doe", + "username": "johndoe" + } + }, + "timestamp": "2026-02-05T14:00:00.000000000Z", + "type": "gitlab.issue" +} +``` + + + +## Create Issue + +The Create Issue component creates a new issue in a specified GitLab project. + +### Use Cases + +- **Automated Bug Reporting**: Create issues when a monitoring system detects an error +- **Task Management**: Automatically create tasks for new employee onboarding +- **Feedback Loop**: Turn customer feedback into actionable issues + +### Configuration + +- **Project** (required): The GitLab project where the issue will be created +- **Title** (required): The title of the new issue +- **Description** (optional): The description/body of the issue +- **Assignees** (optional): Users to assign the issue to +- **Labels** (optional): Labels to apply to the issue (e.g., bug, enhancement) +- **Milestone** (optional): Milestone to associate with the issue +- **Due Date** (optional): Date when the issue is due + +### Output + +The component outputs the created issue object, including: +- **id**: The internal ID of the issue +- **iid**: The project-relative ID of the issue +- **web_url**: The URL to view the issue in GitLab +- **state**: The current state of the issue (opened/closed) + +### Example Output + +```json +{ + "data": { + "_links": { + "award_emoji": "http://gitlab.example.com/api/v4/projects/1/issues/1/award_emoji", + "notes": "http://gitlab.example.com/api/v4/projects/1/issues/1/notes", + "project": "http://gitlab.example.com/api/v4/projects/1", + "self": "http://gitlab.example.com/api/v4/projects/1/issues/1" + }, + "assignee": { + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon", + "id": 1, + "name": "Administrator", + "state": "active", + "username": "root", + "web_url": "http://gitlab.example.com/root" + }, + "assignees": [ + { + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon", + "id": 1, + "name": "Administrator", + "state": "active", + "username": "root", + "web_url": "http://gitlab.example.com/root" + } + ], + "author": { + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon", + "id": 1, + "name": "Administrator", + "state": "active", + "username": "root", + "web_url": "http://gitlab.example.com/root" + }, + "blocking_issues_count": 0, + "closed_at": null, + "closed_by": null, + "confidential": false, + "created_at": "2023-01-01T10:00:00.000Z", + "description": "This is an example issue created via SuperPlane", + "discussion_locked": null, + "downvotes": 0, + "due_date": null, + "has_tasks": false, + "id": 1, + "iid": 1, + "issue_type": "issue", + "labels": [ + "bug", + "urgent" + ], + "merge_requests_count": 0, + "milestone": null, + "project_id": 3, + "references": { + "full": "gitlab-org/gitlab-test#1", + "relative": "#1", + "short": "#1" + }, + "state": "opened", + "task_completion_status": { + "completed_count": 0, + "count": 0 + }, + "time_stats": { + "human_time_estimate": null, + "human_total_time_spent": null, + "time_estimate": 0, + "total_time_spent": 0 + }, + "title": "Example Issue", + "type": "ISSUE", + "updated_at": "2023-01-01T10:00:00.000Z", + "upvotes": 0, + "user_notes_count": 0, + "web_url": "http://gitlab.example.com/gitlab-org/gitlab-test/issues/1", + "weight": null + }, + "timestamp": "2023-01-01T10:00:00.000Z", + "type": "gitlab.issue" +} +``` + diff --git a/pkg/integrations/gitlab/auth.go b/pkg/integrations/gitlab/auth.go new file mode 100644 index 0000000000..4f92174e76 --- /dev/null +++ b/pkg/integrations/gitlab/auth.go @@ -0,0 +1,138 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/superplanehq/superplane/pkg/core" +) + +type Auth struct { + client core.HTTPContext +} + +func NewAuth(client core.HTTPContext) *Auth { + return &Auth{client: client} +} + +func (a *Auth) RefreshToken(baseURL, clientID, clientSecret, refreshToken string) (*TokenResponse, error) { + data := url.Values{} + data.Set("grant_type", "refresh_token") + data.Set("client_id", clientID) + data.Set("client_secret", clientSecret) + data.Set("refresh_token", refreshToken) + + req, err := http.NewRequest(http.MethodPost, baseURL+"/oauth/token", strings.NewReader(data.Encode())) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + resp, err := a.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("token refresh failed: status %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var tokenResp TokenResponse + if err := json.Unmarshal(body, &tokenResp); err != nil { + return nil, err + } + + return &tokenResp, nil +} + +func (a *Auth) exchangeCode(baseURL, clientID, clientSecret, code, redirectURI string) (*TokenResponse, error) { + data := url.Values{} + data.Set("grant_type", "authorization_code") + data.Set("client_id", clientID) + data.Set("client_secret", clientSecret) + data.Set("code", code) + data.Set("redirect_uri", redirectURI) + + req, err := http.NewRequest(http.MethodPost, baseURL+"/oauth/token", strings.NewReader(data.Encode())) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set("Accept", "application/json") + + resp, err := a.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("token exchange failed: status %d, body: %s", resp.StatusCode, string(body)) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var tokenResp TokenResponse + if err := json.Unmarshal(body, &tokenResp); err != nil { + return nil, err + } + + return &tokenResp, nil +} + +func (a *Auth) HandleCallback(req *http.Request, config *Configuration, expectedState, redirectURI string) (*TokenResponse, error) { + code := req.URL.Query().Get("code") + state := req.URL.Query().Get("state") + errorParam := req.URL.Query().Get("error") + + if errorParam != "" { + errorDesc := req.URL.Query().Get("error_description") + return nil, fmt.Errorf("OAuth error: %s - %s", errorParam, errorDesc) + } + + if code == "" || state == "" { + return nil, fmt.Errorf("missing code or state") + } + + if state != expectedState { + return nil, fmt.Errorf("invalid state") + } + + baseURL := config.BaseURL + + return a.exchangeCode(baseURL, config.ClientID, config.ClientSecret, code, redirectURI) +} + +type TokenResponse struct { + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + TokenType string `json:"token_type"` + ExpiresIn int `json:"expires_in"` + Scope string `json:"scope"` +} + +func (t *TokenResponse) GetExpiration() time.Duration { + if t.ExpiresIn > 0 { + seconds := t.ExpiresIn / 2 + if seconds < 1 { + seconds = 1 + } + return time.Duration(seconds) * time.Second + } + return time.Hour +} diff --git a/pkg/integrations/gitlab/auth_test.go b/pkg/integrations/gitlab/auth_test.go new file mode 100644 index 0000000000..20e368dc80 --- /dev/null +++ b/pkg/integrations/gitlab/auth_test.go @@ -0,0 +1,132 @@ +package gitlab + +import ( + "io" + "net/http" + "net/url" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Auth__exchangeCode(t *testing.T) { + t.Run("success", func(t *testing.T) { + mock := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "access_token": "access-123", + "refresh_token": "refresh-123", + "expires_in": 7200 + }`), + }, + } + + service := NewAuth(mock) + resp, err := service.exchangeCode("https://gitlab.com", "id", "secret", "code-123", "redirect") + + require.NoError(t, err) + assert.Equal(t, "access-123", resp.AccessToken) + assert.Equal(t, "refresh-123", resp.RefreshToken) + assert.Equal(t, 7200, resp.ExpiresIn) + + require.Len(t, mock.Requests, 1) + req := mock.Requests[0] + assert.Equal(t, "POST", req.Method) + assert.Equal(t, "https://gitlab.com/oauth/token", req.URL.String()) + + body, _ := io.ReadAll(req.Body) + values, _ := url.ParseQuery(string(body)) + assert.Equal(t, "authorization_code", values.Get("grant_type")) + assert.Equal(t, "code-123", values.Get("code")) + }) + + t.Run("error response", func(t *testing.T) { + mock := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusBadRequest, `{"error": "invalid_grant"}`), + }, + } + + service := NewAuth(mock) + _, err := service.exchangeCode("https://gitlab.com", "id", "secret", "code", "redirect") + + require.Error(t, err) + assert.Contains(t, err.Error(), "status 400") + }) +} + +func Test__Auth__RefreshToken(t *testing.T) { + t.Run("success", func(t *testing.T) { + mock := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "access_token": "access-new", + "refresh_token": "refresh-new", + "expires_in": 7200 + }`), + }, + } + + service := NewAuth(mock) + resp, err := service.RefreshToken("https://gitlab.com", "id", "secret", "refresh-old") + + require.NoError(t, err) + assert.Equal(t, "access-new", resp.AccessToken) + + require.Len(t, mock.Requests, 1) + req := mock.Requests[0] + assert.Equal(t, "POST", req.Method) + + body, _ := io.ReadAll(req.Body) + values, _ := url.ParseQuery(string(body)) + assert.Equal(t, "refresh_token", values.Get("grant_type")) + assert.Equal(t, "refresh-old", values.Get("refresh_token")) + }) +} + +func Test__Auth__HandleCallback(t *testing.T) { + + mock := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{"access_token": "ok"}`), + }, + } + service := NewAuth(mock) + + t.Run("valid callback", func(t *testing.T) { + state := "xyz" + req, _ := http.NewRequest("GET", "/?code=123&state="+url.QueryEscape(state), nil) + id := "id" + secret := "secret" + config := &Configuration{ + BaseURL: "https://gitlab.com", + ClientID: id, + ClientSecret: secret, + } + + resp, err := service.HandleCallback(req, config, state, "uri") + require.NoError(t, err) + assert.Equal(t, "ok", resp.AccessToken) + }) + + t.Run("invalid state", func(t *testing.T) { + req, _ := http.NewRequest("GET", "/?code=123&state=bad", nil) + config := &Configuration{} + + _, err := service.HandleCallback(req, config, "valid-state", "uri") + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid state") + }) + + t.Run("error param", func(t *testing.T) { + req, _ := http.NewRequest("GET", "/?error=access_denied&error_description=bad", nil) + config := &Configuration{} + + _, err := service.HandleCallback(req, config, "xyz", "uri") + require.Error(t, err) + assert.Contains(t, err.Error(), "OAuth error") + }) +} diff --git a/pkg/integrations/gitlab/client.go b/pkg/integrations/gitlab/client.go new file mode 100644 index 0000000000..dc79dcd1b0 --- /dev/null +++ b/pkg/integrations/gitlab/client.go @@ -0,0 +1,250 @@ +package gitlab + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/superplanehq/superplane/pkg/core" +) + +const apiVersion = "v4" + +type Client struct { + baseURL string + token string + authType string + groupID string + httpClient core.HTTPContext +} + +func NewClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*Client, error) { + config, err := ctx.GetConfig("authType") + if err != nil { + return nil, fmt.Errorf("failed to get authType: %v", err) + } + authType := string(config) + + baseURLBytes, _ := ctx.GetConfig("baseUrl") + baseURL := normalizeBaseURL(string(baseURLBytes)) + + groupIDBytes, err := ctx.GetConfig("groupId") + if err != nil || len(groupIDBytes) == 0 { + return nil, fmt.Errorf("groupId is required") + } + groupID := string(groupIDBytes) + + token, err := getAuthToken(ctx, authType) + if err != nil { + return nil, err + } + + return &Client{ + baseURL: baseURL, + token: token, + authType: authType, + groupID: groupID, + httpClient: httpClient, + }, nil +} + +func (c *Client) do(req *http.Request) (*http.Response, error) { + setAuthHeaders(req, c.authType, c.token) + return c.httpClient.Do(req) +} + +// T is the type of the resource item (e.g. Project, Milestone, User). +func fetchResourcesPage[T any](c *Client, apiURL string) ([]T, string, error) { + req, err := http.NewRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, "", err + } + + resp, err := c.do(req) + if err != nil { + return nil, "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + if resp.StatusCode == http.StatusNotFound { + return nil, "", fmt.Errorf("resource not found: status 404") + } + return nil, "", fmt.Errorf("failed to list resources: status %d", resp.StatusCode) + } + + var resources []T + if err := json.NewDecoder(resp.Body).Decode(&resources); err != nil { + return nil, "", fmt.Errorf("failed to decode resources: %v", err) + } + + return resources, resp.Header.Get("X-Next-Page"), nil +} + +// urlBuilder is a function that returns the URL for a given page. +func fetchAllResources[T any](c *Client, urlBuilder func(page int) string) ([]T, error) { + var allResources []T + page := 1 + + for { + resources, nextPage, err := fetchResourcesPage[T](c, urlBuilder(page)) + if err != nil { + return nil, err + } + + allResources = append(allResources, resources...) + + if nextPage == "" { + break + } + page++ + } + + return allResources, nil +} + +type Project struct { + ID int `json:"id"` + PathWithNamespace string `json:"path_with_namespace"` + WebURL string `json:"web_url"` +} + +func (c *Client) listProjects() ([]Project, error) { + if c.groupID == "" { + return nil, fmt.Errorf("groupID is missing") + } + + return fetchAllResources[Project](c, func(page int) string { + return fmt.Sprintf("%s/api/%s/groups/%s/projects?include_subgroups=true&per_page=100&page=%d", c.baseURL, apiVersion, url.PathEscape(c.groupID), page) + }) +} + +type IssueRequest struct { + Title string `json:"title"` + Description string `json:"description,omitempty"` + Labels string `json:"labels,omitempty"` + AssigneeIDs []int `json:"assignee_ids,omitempty"` + MilestoneID *int `json:"milestone_id,omitempty"` + DueDate string `json:"due_date,omitempty"` +} + +type Issue struct { + ID int `json:"id"` + IID int `json:"iid"` + ProjectID int `json:"project_id"` + Title string `json:"title"` + Description string `json:"description"` + State string `json:"state"` + CreatedAt string `json:"created_at"` + UpdatedAt string `json:"updated_at"` + ClosedAt *string `json:"closed_at"` + ClosedBy *User `json:"closed_by"` + Labels []string `json:"labels"` + Milestone *Milestone `json:"milestone"` + DueDate *string `json:"due_date"` + WebURL string `json:"web_url"` + Author User `json:"author"` + Assignees []User `json:"assignees"` +} + +type User struct { + ID int `json:"id"` + Name string `json:"name"` + Username string `json:"username"` + State string `json:"state"` + AvatarURL string `json:"avatar_url"` + WebURL string `json:"web_url"` +} + +func (c *Client) CreateIssue(ctx context.Context, projectID string, req *IssueRequest) (*Issue, error) { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/issues", c.baseURL, apiVersion, url.PathEscape(projectID)) + + body, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %v", err) + } + + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, apiURL, bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + httpReq.Header.Set("Content-Type", "application/json") + + resp, err := c.do(httpReq) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusCreated { + return nil, fmt.Errorf("failed to create issue: status %d", resp.StatusCode) + } + + var issue Issue + if err := json.NewDecoder(resp.Body).Decode(&issue); err != nil { + return nil, fmt.Errorf("failed to decode issue: %v", err) + } + + return &issue, nil +} + +type Milestone struct { + ID int `json:"id"` + IID int `json:"iid"` + Title string `json:"title"` + State string `json:"state"` +} + +func (c *Client) ListMilestones(projectID string) ([]Milestone, error) { + return fetchAllResources[Milestone](c, func(page int) string { + return fmt.Sprintf("%s/api/%s/projects/%s/milestones?per_page=100&page=%d&state=active", c.baseURL, apiVersion, url.PathEscape(projectID), page) + }) +} + +func (c *Client) getCurrentUser() (*User, error) { + apiURL := fmt.Sprintf("%s/api/%s/user", c.baseURL, apiVersion) + req, err := http.NewRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + resp, err := c.do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to get current user: status %d", resp.StatusCode) + } + + var user User + if err := json.NewDecoder(resp.Body).Decode(&user); err != nil { + return nil, fmt.Errorf("failed to decode user: %v", err) + } + + return &user, nil +} + +func (c *Client) ListGroupMembers(groupID string) ([]User, error) { + return fetchAllResources[User](c, func(page int) string { + return fmt.Sprintf("%s/api/%s/groups/%s/members?per_page=100&page=%d", c.baseURL, apiVersion, url.PathEscape(groupID), page) + }) +} + +func (c *Client) FetchIntegrationData() (*User, []Project, error) { + user, err := c.getCurrentUser() + if err != nil { + return nil, nil, fmt.Errorf("failed to get current user: %v", err) + } + + projects, err := c.listProjects() + if err != nil { + return nil, nil, fmt.Errorf("failed to list projects: %v", err) + } + + return user, projects, nil +} diff --git a/pkg/integrations/gitlab/client_test.go b/pkg/integrations/gitlab/client_test.go new file mode 100644 index 0000000000..2bef640f3b --- /dev/null +++ b/pkg/integrations/gitlab/client_test.go @@ -0,0 +1,378 @@ +package gitlab + +import ( + "context" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Client__NewClient(t *testing.T) { + mockClient := &contexts.HTTPContext{} + + t.Run("valid configuration - personal access token", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "baseUrl": "https://gitlab.example.com", + "accessToken": "pat-123", + "groupId": "group-123", + }, + } + + client, err := NewClient(mockClient, ctx) + require.NoError(t, err) + assert.NotNil(t, client) + assert.Equal(t, "https://gitlab.example.com", client.baseURL) + assert.Equal(t, "pat-123", client.token) + assert.Equal(t, AuthTypePersonalAccessToken, client.authType) + assert.Equal(t, "group-123", client.groupID) + assert.Equal(t, mockClient, client.httpClient) + }) + + t.Run("valid configuration - oauth", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAppOAuth, + "groupId": "group-456", + }, + Secrets: map[string]core.IntegrationSecret{ + OAuthAccessToken: {Name: OAuthAccessToken, Value: []byte("oauth-token-123")}, + }, + } + + client, err := NewClient(mockClient, ctx) + require.NoError(t, err) + assert.NotNil(t, client) + assert.Equal(t, "https://gitlab.com", client.baseURL) + assert.Equal(t, "oauth-token-123", client.token) + assert.Equal(t, AuthTypeAppOAuth, client.authType) + assert.Equal(t, "group-456", client.groupID) + }) + + t.Run("missing authType", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + _, err := NewClient(mockClient, ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get authType") + }) + + t.Run("missing groupId", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + }, + } + _, err := NewClient(mockClient, ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "groupId is required") + }) + + t.Run("missing personal access token", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + }, + } + _, err := NewClient(mockClient, ctx) + require.Error(t, err) + }) +} + +func Test__Client__FetchIntegrationData(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{"id": 1, "username": "user1"}`), + GitlabMockResponse(http.StatusOK, `[{"id": 1, "path_with_namespace": "group/project1", "web_url": "https://gitlab.com/group/project1"}]`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + user, projects, err := client.FetchIntegrationData() + require.NoError(t, err) + + require.Len(t, mockClient.Requests, 2) + assert.Equal(t, "https://gitlab.com/api/v4/user", mockClient.Requests[0].URL.String()) + assert.Equal(t, "token", mockClient.Requests[0].Header.Get("PRIVATE-TOKEN")) + + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=1", mockClient.Requests[1].URL.String()) + assert.Equal(t, "token", mockClient.Requests[1].Header.Get("PRIVATE-TOKEN")) + + require.NotNil(t, user) + assert.Equal(t, 1, user.ID) + assert.Equal(t, "user1", user.Username) + + require.Len(t, projects, 1) + assert.Equal(t, 1, projects[0].ID) + assert.Equal(t, "group/project1", projects[0].PathWithNamespace) + }) + + t.Run("pagination", func(t *testing.T) { + resp1 := GitlabMockResponse(http.StatusOK, `[{"id": 1}]`) + resp1.Header.Set("X-Next-Page", "2") + + resp2 := GitlabMockResponse(http.StatusOK, `[{"id": 2}]`) + + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{"id": 1, "username": "user1"}`), + resp1, + resp2, + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + _, projects, err := client.FetchIntegrationData() + require.NoError(t, err) + + require.Len(t, mockClient.Requests, 3) + assert.Equal(t, "https://gitlab.com/api/v4/user", mockClient.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=1", mockClient.Requests[1].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=2", mockClient.Requests[2].URL.String()) + + require.Len(t, projects, 2) + assert.Equal(t, 1, projects[0].ID) + assert.Equal(t, 2, projects[1].ID) + }) + + t.Run("forbidden", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{"id": 1}`), + GitlabMockResponse(http.StatusForbidden, `{"error": "fraud"}`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + _, _, err := client.FetchIntegrationData() + require.Error(t, err) + assert.Contains(t, err.Error(), "status 403") + }) + + t.Run("oauth headers", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{"id": 1}`), + GitlabMockResponse(http.StatusOK, `[]`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "oauth-token", + authType: AuthTypeAppOAuth, + groupID: "123", + httpClient: mockClient, + } + + _, _, err := client.FetchIntegrationData() + require.NoError(t, err) + + require.Len(t, mockClient.Requests, 2) + assert.Equal(t, "Bearer oauth-token", mockClient.Requests[0].Header.Get("Authorization")) + }) +} + +func Test__Client__CreateIssue(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusCreated, `{ + "id": 101, + "iid": 1, + "title": "Test Issue", + "web_url": "https://gitlab.com/group/project/issues/1", + "due_date": "2023-10-27", + "milestone": {"id": 12, "title": "v1.0"}, + "closed_at": "2023-10-28T10:00:00Z", + "closed_by": {"id": 5, "username": "closer"} + }`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + req := &IssueRequest{Title: "Test Issue"} + result, err := client.CreateIssue(context.Background(), "1", req) + + require.NoError(t, err) + assert.NotNil(t, result) + assert.Equal(t, 101, result.ID) + assert.Equal(t, "Test Issue", result.Title) + + require.NotNil(t, result.DueDate) + assert.Equal(t, "2023-10-27", *result.DueDate) + + require.NotNil(t, result.Milestone) + assert.Equal(t, 12, result.Milestone.ID) + assert.Equal(t, "v1.0", result.Milestone.Title) + + require.NotNil(t, result.ClosedAt) + assert.Equal(t, "2023-10-28T10:00:00Z", *result.ClosedAt) + + require.NotNil(t, result.ClosedBy) + assert.Equal(t, 5, result.ClosedBy.ID) + assert.Equal(t, "closer", result.ClosedBy.Username) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, http.MethodPost, mockClient.Requests[0].Method) + assert.Equal(t, "https://gitlab.com/api/v4/projects/1/issues", mockClient.Requests[0].URL.String()) + }) +} + +func Test__Client__ListGroupMembers(t *testing.T) { + t.Run("pagination", func(t *testing.T) { + resp1 := GitlabMockResponse(http.StatusOK, `[{"id": 1, "username": "user1"}]`) + resp1.Header.Set("X-Next-Page", "2") + + resp2 := GitlabMockResponse(http.StatusOK, `[{"id": 2, "username": "user2"}]`) + + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + resp1, + resp2, + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + members, err := client.ListGroupMembers("123") + require.NoError(t, err) + + require.Len(t, mockClient.Requests, 2) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/members?per_page=100&page=1", mockClient.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/members?per_page=100&page=2", mockClient.Requests[1].URL.String()) + + require.Len(t, members, 2) + assert.Equal(t, "user1", members[0].Username) + assert.Equal(t, "user2", members[1].Username) + }) +} + +func Test__Client__ListMilestones(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `[ + {"id": 1, "iid": 1, "title": "v1.0", "state": "active"}, + {"id": 2, "iid": 2, "title": "v2.0", "state": "active"} + ]`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + milestones, err := client.ListMilestones("456") + require.NoError(t, err) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/milestones?per_page=100&page=1&state=active", mockClient.Requests[0].URL.String()) + assert.Equal(t, "token", mockClient.Requests[0].Header.Get("PRIVATE-TOKEN")) + + require.Len(t, milestones, 2) + assert.Equal(t, 1, milestones[0].ID) + assert.Equal(t, "v1.0", milestones[0].Title) + assert.Equal(t, 2, milestones[1].ID) + assert.Equal(t, "v2.0", milestones[1].Title) + }) + + t.Run("pagination", func(t *testing.T) { + resp1 := GitlabMockResponse(http.StatusOK, `[{"id": 1, "iid": 1, "title": "v1.0", "state": "active"}]`) + resp1.Header.Set("X-Next-Page", "2") + + resp2 := GitlabMockResponse(http.StatusOK, `[{"id": 2, "iid": 2, "title": "v2.0", "state": "active"}]`) + + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + resp1, + resp2, + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + milestones, err := client.ListMilestones("456") + require.NoError(t, err) + + require.Len(t, mockClient.Requests, 2) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/milestones?per_page=100&page=1&state=active", mockClient.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/milestones?per_page=100&page=2&state=active", mockClient.Requests[1].URL.String()) + + require.Len(t, milestones, 2) + assert.Equal(t, "v1.0", milestones[0].Title) + assert.Equal(t, "v2.0", milestones[1].Title) + }) + + t.Run("error", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusNotFound, `{"error": "not found"}`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + _, err := client.ListMilestones("456") + require.Error(t, err) + assert.Contains(t, err.Error(), "status 404") + }) +} diff --git a/pkg/integrations/gitlab/common.go b/pkg/integrations/gitlab/common.go new file mode 100644 index 0000000000..de215e180e --- /dev/null +++ b/pkg/integrations/gitlab/common.go @@ -0,0 +1,116 @@ +package gitlab + +import ( + "crypto/subtle" + "fmt" + "net/http" + "slices" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type WebhookConfiguration struct { + EventType string `json:"eventType" mapstructure:"eventType"` + ProjectID string `json:"projectId" mapstructure:"projectId"` +} + +type WebhookMetadata struct { + ID int `json:"id" mapstructure:"id"` +} + +type NodeMetadata struct { + Project *ProjectMetadata `json:"project"` +} + +// getAuthToken retrieves the appropriate authentication token based on auth type +func getAuthToken(ctx core.IntegrationContext, authType string) (string, error) { + switch authType { + case AuthTypePersonalAccessToken: + tokenBytes, err := ctx.GetConfig("accessToken") + if err != nil { + return "", err + } + token := string(tokenBytes) + if token == "" { + return "", fmt.Errorf("personal access token not found") + } + return token, nil + + case AuthTypeAppOAuth: + token, err := findSecret(ctx, OAuthAccessToken) + if err != nil { + return "", err + } + if token == "" { + return "", fmt.Errorf("OAuth access token not found") + } + return token, nil + + default: + return "", fmt.Errorf("unknown auth type: %s", authType) + } +} + +func setAuthHeaders(req *http.Request, authType, token string) { + if authType == AuthTypePersonalAccessToken { + req.Header.Set("PRIVATE-TOKEN", token) + } else { + req.Header.Set("Authorization", "Bearer "+token) + } +} + +// verifyWebhookToken verifies the X-Gitlab-Token header matches the expected secret. +func verifyWebhookToken(ctx core.WebhookRequestContext) (int, error) { + token := ctx.Headers.Get("X-Gitlab-Token") + if token == "" { + return http.StatusForbidden, fmt.Errorf("missing X-Gitlab-Token header") + } + + secret, err := ctx.Webhook.GetSecret() + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("error getting webhook secret: %v", err) + } + + if subtle.ConstantTimeCompare([]byte(token), secret) != 1 { + return http.StatusForbidden, fmt.Errorf("invalid webhook token") + } + + return http.StatusOK, nil +} + +func ensureProjectInMetadata(ctx core.MetadataContext, app core.IntegrationContext, projectID string) error { + var nodeMetadata NodeMetadata + err := mapstructure.Decode(ctx.Get(), &nodeMetadata) + if err != nil { + return fmt.Errorf("failed to decode node metadata: %w", err) + } + + if projectID == "" { + return fmt.Errorf("project is required") + } + + // + // Validate that the app has access to this repository + // + var appMetadata Metadata + if err := mapstructure.Decode(app.GetMetadata(), &appMetadata); err != nil { + return fmt.Errorf("failed to decode application metadata: %w", err) + } + + repoIndex := slices.IndexFunc(appMetadata.Projects, func(r ProjectMetadata) bool { + return fmt.Sprintf("%d", r.ID) == projectID + }) + + if repoIndex == -1 { + return fmt.Errorf("project %s is not accessible to integration", projectID) + } + + if nodeMetadata.Project != nil && fmt.Sprintf("%d", nodeMetadata.Project.ID) == projectID { + return nil + } + + return ctx.Set(NodeMetadata{ + Project: &appMetadata.Projects[repoIndex], + }) +} diff --git a/pkg/integrations/gitlab/create_issue.go b/pkg/integrations/gitlab/create_issue.go new file mode 100644 index 0000000000..0734a072cc --- /dev/null +++ b/pkg/integrations/gitlab/create_issue.go @@ -0,0 +1,259 @@ +package gitlab + +import ( + "context" + _ "embed" + "encoding/json" + "fmt" + "strings" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +//go:embed example_output_create_issue.json +var exampleOutputCreateIssue []byte + +type CreateIssue struct{} + +type CreateIssueConfiguration struct { + Project string `mapstructure:"project"` + Title string `mapstructure:"title"` + Body string `mapstructure:"body"` + Assignees []string `mapstructure:"assignees"` + Labels []string `mapstructure:"labels"` + Milestone string `mapstructure:"milestone"` + DueDate string `mapstructure:"dueDate"` +} + +func (c *CreateIssue) Name() string { + return "gitlab.createIssue" +} + +func (c *CreateIssue) Label() string { + return "Create Issue" +} + +func (c *CreateIssue) Description() string { + return "Create a new issue in a GitLab project" +} + +func (c *CreateIssue) Documentation() string { + return `The Create Issue component creates a new issue in a specified GitLab project. + +## Use Cases + +- **Automated Bug Reporting**: Create issues when a monitoring system detects an error +- **Task Management**: Automatically create tasks for new employee onboarding +- **Feedback Loop**: Turn customer feedback into actionable issues + +## Configuration + +- **Project** (required): The GitLab project where the issue will be created +- **Title** (required): The title of the new issue +- **Description** (optional): The description/body of the issue +- **Assignees** (optional): Users to assign the issue to +- **Labels** (optional): Labels to apply to the issue (e.g., bug, enhancement) +- **Milestone** (optional): Milestone to associate with the issue +- **Due Date** (optional): Date when the issue is due + +## Output + +The component outputs the created issue object, including: +- **id**: The internal ID of the issue +- **iid**: The project-relative ID of the issue +- **web_url**: The URL to view the issue in GitLab +- **state**: The current state of the issue (opened/closed)` +} + +func (c *CreateIssue) Icon() string { + return "gitlab" +} + +func (c *CreateIssue) Color() string { + return "orange" +} + +func (c *CreateIssue) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateIssue) ExampleOutput() map[string]any { + var example map[string]any + if err := json.Unmarshal(exampleOutputCreateIssue, &example); err != nil { + return map[string]any{} + } + return example +} + +func (c *CreateIssue) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "title", + Label: "Title", + Type: configuration.FieldTypeString, + Required: true, + }, + { + Name: "body", + Label: "Description", + Type: configuration.FieldTypeText, + Required: false, + }, + { + Name: "assignees", + Label: "Assignees", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeMember, + Multi: true, + }, + }, + }, + { + Name: "labels", + Label: "Labels", + Type: configuration.FieldTypeList, + Required: false, + TypeOptions: &configuration.TypeOptions{ + List: &configuration.ListTypeOptions{ + ItemLabel: "Label", + ItemDefinition: &configuration.ListItemDefinition{ + Type: configuration.FieldTypeString, + }, + }, + }, + }, + { + Name: "milestone", + Label: "Milestone", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeMilestone, + Parameters: []configuration.ParameterRef{ + { + Name: "project", + ValueFrom: &configuration.ParameterValueFrom{Field: "project"}, + }, + }, + }, + }, + }, + { + Name: "dueDate", + Label: "Due Date", + Type: configuration.FieldTypeDate, + Required: false, + }, + } +} + +func (c *CreateIssue) Setup(ctx core.SetupContext) error { + var config CreateIssueConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if config.Project == "" { + return fmt.Errorf("project is required") + } + + if config.Title == "" { + return fmt.Errorf("title is required") + } + + return ensureProjectInMetadata( + ctx.Metadata, + ctx.Integration, + config.Project, + ) +} + +func (c *CreateIssue) Execute(ctx core.ExecutionContext) error { + var config CreateIssueConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to initialize GitLab client: %w", err) + } + + var assigneeIDs []int + for _, idStr := range config.Assignees { + var id int + if _, err := fmt.Sscanf(idStr, "%d", &id); err == nil { + assigneeIDs = append(assigneeIDs, id) + } + } + + var milestoneID *int + if config.Milestone != "" { + var id int + if _, err := fmt.Sscanf(config.Milestone, "%d", &id); err == nil { + milestoneID = &id + } + } + + req := &IssueRequest{ + Title: config.Title, + Description: config.Body, + Labels: strings.Join(config.Labels, ","), + AssigneeIDs: assigneeIDs, + MilestoneID: milestoneID, + DueDate: config.DueDate, + } + + issue, err := client.CreateIssue(context.Background(), config.Project, req) + if err != nil { + return fmt.Errorf("failed to create issue: %w", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "gitlab.issue", + []any{issue}, + ) +} + +func (c *CreateIssue) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateIssue) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *CreateIssue) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateIssue) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateIssue) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateIssue) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/gitlab/create_issue_test.go b/pkg/integrations/gitlab/create_issue_test.go new file mode 100644 index 0000000000..aba6af8c2f --- /dev/null +++ b/pkg/integrations/gitlab/create_issue_test.go @@ -0,0 +1,137 @@ +package gitlab + +import ( + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__CreateIssue__Setup(t *testing.T) { + c := &CreateIssue{} + + t.Run("missing project", func(t *testing.T) { + ctx := core.SetupContext{ + Configuration: map[string]any{ + "title": "Issue Title", + }, + Metadata: &contexts.MetadataContext{}, + } + err := c.Setup(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "project is required") + }) + + t.Run("missing title", func(t *testing.T) { + ctx := core.SetupContext{ + Configuration: map[string]any{ + "project": "123", + }, + Metadata: &contexts.MetadataContext{}, + } + err := c.Setup(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "title is required") + }) + + t.Run("valid configuration", func(t *testing.T) { + ctx := core.SetupContext{ + Configuration: map[string]any{ + "project": "123", + "title": "Issue Title", + }, + Integration: &contexts.IntegrationContext{ + Metadata: Metadata{ + Projects: []ProjectMetadata{ + {ID: 123, Name: "repo", URL: "http://repo"}, + }, + }, + }, + Metadata: &contexts.MetadataContext{}, + } + err := c.Setup(ctx) + require.NoError(t, err) + }) +} + +func Test__CreateIssue__Execute(t *testing.T) { + c := &CreateIssue{} + + t.Run("success", func(t *testing.T) { + executionState := &contexts.ExecutionStateContext{} + ctx := core.ExecutionContext{ + Configuration: map[string]any{ + "project": "123", + "title": "Issue Title", + "body": "Issue Body", + "assignees": []string{"99"}, + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusCreated, `{ + "id": 1, + "title": "Issue Title", + "description": "Issue Body", + "web_url": "https://gitlab.com/issue/1" + }`), + }, + }, + ExecutionState: executionState, + } + + err := c.Execute(ctx) + require.NoError(t, err) + + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any) + assert.Equal(t, core.DefaultOutputChannel.Name, executionState.Channel) + assert.Equal(t, "gitlab.issue", executionState.Type) + + var issue Issue + issuePayload := payload["data"] + payloadBytes, _ := json.Marshal(issuePayload) + json.Unmarshal(payloadBytes, &issue) + + assert.Equal(t, 1, issue.ID) + assert.Equal(t, "Issue Title", issue.Title) + assert.Equal(t, "Issue Body", issue.Description) + }) + + t.Run("failure", func(t *testing.T) { + ctx := core.ExecutionContext{ + Configuration: map[string]any{ + "project": "123", + "title": "Issue Title", + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusInternalServerError, `{"error": "internal server error"}`), + }, + }, + } + + err := c.Execute(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to create issue") + }) +} diff --git a/pkg/integrations/gitlab/example.go b/pkg/integrations/gitlab/example.go new file mode 100644 index 0000000000..f328477b63 --- /dev/null +++ b/pkg/integrations/gitlab/example.go @@ -0,0 +1,18 @@ +package gitlab + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_data_on_issue.json +var exampleDataOnIssueBytes []byte + +var exampleDataOnIssueOnce sync.Once +var exampleDataOnIssue map[string]any + +func (i *OnIssue) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnIssueOnce, exampleDataOnIssueBytes, &exampleDataOnIssue) +} diff --git a/pkg/integrations/gitlab/example_data_on_issue.json b/pkg/integrations/gitlab/example_data_on_issue.json new file mode 100644 index 0000000000..f7a825204f --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_issue.json @@ -0,0 +1,67 @@ +{ + "data": { + "object_kind": "issue", + "event_type": "issue", + "user": { + "id": 1, + "name": "John Doe", + "username": "johndoe", + "avatar_url": "https://www.gravatar.com/avatar/abc123", + "email": "johndoe@example.com" + }, + "project": { + "id": 15, + "name": "my-project", + "description": "Example project", + "web_url": "https://gitlab.com/group/my-project", + "avatar_url": null, + "git_ssh_url": "git@gitlab.com:group/my-project.git", + "git_http_url": "https://gitlab.com/group/my-project.git", + "namespace": "group", + "visibility_level": 20, + "path_with_namespace": "group/my-project", + "default_branch": "main" + }, + "object_attributes": { + "id": 301, + "iid": 1, + "title": "Example Issue", + "description": "This is an example issue description for testing the webhook", + "state": "opened", + "action": "open", + "created_at": "2026-02-05T14:00:00Z", + "updated_at": "2026-02-05T14:00:00Z", + "url": "https://gitlab.com/group/my-project/-/issues/1" + }, + "labels": [ + { + "id": 206, + "title": "bug", + "color": "#dc3545", + "project_id": 15, + "created_at": "2026-01-01T00:00:00Z", + "updated_at": "2026-01-01T00:00:00Z", + "template": false, + "description": "Bug reports", + "type": "ProjectLabel", + "group_id": null + } + ], + "assignees": [ + { + "id": 1, + "name": "John Doe", + "username": "johndoe", + "avatar_url": "https://www.gravatar.com/avatar/abc123" + } + ], + "repository": { + "name": "my-project", + "url": "git@gitlab.com:group/my-project.git", + "description": "Example project", + "homepage": "https://gitlab.com/group/my-project" + } + }, + "timestamp": "2026-02-05T14:00:00.000000000Z", + "type": "gitlab.issue" +} diff --git a/pkg/integrations/gitlab/example_output_create_issue.json b/pkg/integrations/gitlab/example_output_create_issue.json new file mode 100644 index 0000000000..1413811195 --- /dev/null +++ b/pkg/integrations/gitlab/example_output_create_issue.json @@ -0,0 +1,78 @@ +{ + "data": { + "id": 1, + "iid": 1, + "project_id": 3, + "title": "Example Issue", + "description": "This is an example issue created via SuperPlane", + "state": "opened", + "created_at": "2023-01-01T10:00:00.000Z", + "updated_at": "2023-01-01T10:00:00.000Z", + "closed_at": null, + "closed_by": null, + "labels": ["bug", "urgent"], + "milestone": null, + "assignees": [ + { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://gitlab.example.com/root" + } + ], + "author": { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://gitlab.example.com/root" + }, + "type": "ISSUE", + "assignee": { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://gitlab.example.com/root" + }, + "user_notes_count": 0, + "merge_requests_count": 0, + "upvotes": 0, + "downvotes": 0, + "due_date": null, + "confidential": false, + "discussion_locked": null, + "issue_type": "issue", + "web_url": "http://gitlab.example.com/gitlab-org/gitlab-test/issues/1", + "time_stats": { + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": null, + "human_total_time_spent": null + }, + "task_completion_status": { + "count": 0, + "completed_count": 0 + }, + "weight": null, + "blocking_issues_count": 0, + "has_tasks": false, + "_links": { + "self": "http://gitlab.example.com/api/v4/projects/1/issues/1", + "notes": "http://gitlab.example.com/api/v4/projects/1/issues/1/notes", + "award_emoji": "http://gitlab.example.com/api/v4/projects/1/issues/1/award_emoji", + "project": "http://gitlab.example.com/api/v4/projects/1" + }, + "references": { + "short": "#1", + "relative": "#1", + "full": "gitlab-org/gitlab-test#1" + } + }, + "timestamp": "2023-01-01T10:00:00.000Z", + "type": "gitlab.issue" +} diff --git a/pkg/integrations/gitlab/gitlab.go b/pkg/integrations/gitlab/gitlab.go new file mode 100644 index 0000000000..6b7f64d989 --- /dev/null +++ b/pkg/integrations/gitlab/gitlab.go @@ -0,0 +1,508 @@ +package gitlab + +import ( + "fmt" + "net/http" + "net/url" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/crypto" + "github.com/superplanehq/superplane/pkg/registry" +) + +const ( + AuthTypePersonalAccessToken = "personalAccessToken" + AuthTypeAppOAuth = "appOAuth" + OAuthAccessToken = "accessToken" + OAuthRefreshToken = "refreshToken" +) + +var scopeList = []string{ + "api", + "read_user", + "read_api", + "write_repository", + "read_repository", +} + +const ( + appSetupDescription = ` +- Click the **Continue** button to go to the Applications page in GitLab +- Add new application: + - **Name**: SuperPlane + - **Redirect URI**: ` + "`%s`" + ` + - **Scopes**: %s +- Copy the **Client ID** and **Client Secret**, and paste them in the fields below. +- Click **Save** to complete the setup. +` + + appConnectDescription = `Click **Continue** to authorize SuperPlane to access your GitLab account.` +) + +func init() { + registry.RegisterIntegrationWithWebhookHandler("gitlab", &GitLab{}, &GitLabWebhookHandler{}) +} + +type GitLab struct { +} + +type Configuration struct { + AuthType string `mapstructure:"authType" json:"authType"` + BaseURL string `mapstructure:"baseUrl" json:"baseUrl"` + ClientID string `mapstructure:"clientId" json:"clientId"` + ClientSecret string `mapstructure:"clientSecret" json:"clientSecret"` + GroupID string `mapstructure:"groupId" json:"groupId"` + AccessToken string `mapstructure:"accessToken" json:"accessToken"` +} + +type Metadata struct { + State *string `mapstructure:"state,omitempty" json:"state,omitempty"` + Projects []ProjectMetadata `mapstructure:"projects" json:"projects"` + User *UserMetadata `mapstructure:"user,omitempty" json:"user,omitempty"` +} + +type UserMetadata struct { + ID int `mapstructure:"id" json:"id"` + Name string `mapstructure:"name" json:"name"` + Username string `mapstructure:"username" json:"username"` +} + +type ProjectMetadata struct { + ID int `mapstructure:"id" json:"id"` + Name string `mapstructure:"name" json:"name"` + URL string `mapstructure:"url" json:"url"` +} + +func (g *GitLab) Name() string { + return "gitlab" +} + +func (g *GitLab) Label() string { + return "GitLab" +} + +func (g *GitLab) Icon() string { + return "gitlab" +} + +func (g *GitLab) Description() string { + return "Manage and react to changes in your GitLab repositories" +} + +func (g *GitLab) Instructions() string { + return fmt.Sprintf(` +When connecting using App OAuth: +- Leave **Client ID** and **Secret** empty to start the setup wizard. + +When connecting using Personal Access Token: +- Go to Preferences → Personal Access Token → Add New token +- Use **Scopes**: %s +- Copy the token and paste it into the **Access Token** configuration field, then click **Save**. +`, strings.Join(scopeList, ", ")) +} + +func (g *GitLab) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "baseUrl", + Label: "GitLab URL", + Type: configuration.FieldTypeString, + Description: "GitLab instance URL (or leave empty for https://gitlab.com)", + Default: "https://gitlab.com", + }, + { + Name: "groupId", + Label: "Group ID", + Type: configuration.FieldTypeString, + Description: "Group ID", + Required: true, + }, + { + Name: "authType", + Label: "Auth Type", + Type: configuration.FieldTypeSelect, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "App OAuth", Value: AuthTypeAppOAuth}, + {Label: "Personal Access Token", Value: AuthTypePersonalAccessToken}, + }, + }, + }, + }, + { + Name: "clientId", + Label: "Client ID", + Type: configuration.FieldTypeString, + Description: "OAuth Client ID from your GitLab app", + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "authType", Values: []string{AuthTypeAppOAuth}}, + }, + }, + { + Name: "clientSecret", + Label: "Client Secret", + Type: configuration.FieldTypeString, + Sensitive: true, + Description: "OAuth Client Secret from your GitLab app", + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "authType", Values: []string{AuthTypeAppOAuth}}, + }, + }, + { + Name: "accessToken", + Label: "Access Token", + Type: configuration.FieldTypeString, + Sensitive: true, + Description: "Access Token from your GitLab user settings", + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "authType", Values: []string{AuthTypePersonalAccessToken}}, + }, + }, + } +} + +func (g *GitLab) Components() []core.Component { + return []core.Component{ + &CreateIssue{}, + } +} + +func (g *GitLab) Triggers() []core.Trigger { + return []core.Trigger{ + &OnIssue{}, + } +} + +func (g *GitLab) Sync(ctx core.SyncContext) error { + + configuration := Configuration{} + err := mapstructure.Decode(ctx.Configuration, &configuration) + if err != nil { + return fmt.Errorf("failed to decode config: %v", err) + } + + configuration.BaseURL = normalizeBaseURL(configuration.BaseURL) + + if configuration.AuthType == "" { + return fmt.Errorf("authType is required") + } + + switch configuration.AuthType { + case AuthTypeAppOAuth: + return g.oauthSync(ctx, configuration) + + case AuthTypePersonalAccessToken: + return g.personalAccessTokenSync(ctx) + + default: + return fmt.Errorf("unknown authType: %s", configuration.AuthType) + } +} + +func (g *GitLab) oauthSync(ctx core.SyncContext, configuration Configuration) error { + baseURL := configuration.BaseURL + callbackURL := fmt.Sprintf("%s/api/v1/integrations/%s/callback", ctx.BaseURL, ctx.Integration.ID()) + + clientID, _ := ctx.Integration.GetConfig("clientId") + clientSecret, _ := ctx.Integration.GetConfig("clientSecret") + + // + // If no client ID or secret, show setup instructions + // + if string(clientID) == "" || string(clientSecret) == "" { + ctx.Integration.NewBrowserAction(core.BrowserAction{ + Description: fmt.Sprintf(appSetupDescription, callbackURL, strings.Join(scopeList, ", ")), + URL: fmt.Sprintf("%s/-/user_settings/applications", baseURL), + Method: "GET", + }) + + return nil + } + + // + // If access token is not available, ask user to authorize the app. + // + accessToken, _ := findSecret(ctx.Integration, OAuthAccessToken) + if accessToken == "" { + return g.handleOAuthNoAccessToken(ctx, baseURL, callbackURL, string(clientID)) + } + + // + // If refresh token is available, refresh the token + // and update the metadata. + // + err := g.refreshToken(ctx, baseURL, string(clientID), string(clientSecret)) + if err != nil { + ctx.Logger.Errorf("Failed to refresh token: %v", err) + return err + } + + if err := g.updateMetadata(ctx); err != nil { + ctx.Integration.Error(err.Error()) + return nil + } + + ctx.Integration.RemoveBrowserAction() + ctx.Integration.Ready() + return nil +} + +func (g *GitLab) refreshToken(ctx core.SyncContext, baseURL, clientID, clientSecret string) error { + refreshToken, _ := findSecret(ctx.Integration, OAuthRefreshToken) + if refreshToken == "" { + ctx.Logger.Warn("GitLab integration has no refresh token - not refreshing token") + return nil + } + + ctx.Logger.Info("Refreshing GitLab token") + auth := NewAuth(ctx.HTTP) + tokenResponse, err := auth.RefreshToken(baseURL, clientID, clientSecret, refreshToken) + + if err != nil { + _ = ctx.Integration.SetSecret(OAuthRefreshToken, []byte("")) + _ = ctx.Integration.SetSecret(OAuthAccessToken, []byte("")) + return fmt.Errorf("failed to refresh token: %v", err) + } + + if tokenResponse.AccessToken != "" { + ctx.Logger.Info("Saving access token") + err := ctx.Integration.SetSecret(OAuthAccessToken, []byte(tokenResponse.AccessToken)) + if err != nil { + return fmt.Errorf("failed to save access token: %v", err) + } + } + + if tokenResponse.RefreshToken != "" { + ctx.Logger.Info("Saving refresh token") + err := ctx.Integration.SetSecret(OAuthRefreshToken, []byte(tokenResponse.RefreshToken)) + if err != nil { + return fmt.Errorf("failed to save refresh token: %v", err) + } + } + + ctx.Logger.Info("Token refreshed successfully") + return ctx.Integration.ScheduleResync(tokenResponse.GetExpiration()) +} + +func (g *GitLab) handleOAuthNoAccessToken(ctx core.SyncContext, baseURL string, callbackURL string, clientID string) error { + metadata := Metadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { + ctx.Logger.Errorf("Failed to decode metadata while setting state: %v", err) + } + + if metadata.State == nil { + var err error + s, err := crypto.Base64String(32) + if err != nil { + return fmt.Errorf("failed to generate state: %v", err) + } + metadata.State = &s + ctx.Integration.SetMetadata(metadata) + } + + authURL := fmt.Sprintf( + "%s/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code&scope=%s&state=%s", + baseURL, + url.QueryEscape(clientID), + url.QueryEscape(callbackURL), + url.QueryEscape(strings.Join(scopeList, " ")), + url.QueryEscape(*metadata.State), + ) + + ctx.Integration.NewBrowserAction(core.BrowserAction{ + Description: appConnectDescription, + URL: authURL, + Method: "GET", + }) + + return nil +} + +func (g *GitLab) personalAccessTokenSync(ctx core.SyncContext) error { + token, err := ctx.Integration.GetConfig("accessToken") + if err != nil { + return fmt.Errorf("access token is required") + } + + if string(token) == "" { + return fmt.Errorf("access token is required") + } + + if err := g.updateMetadata(ctx); err != nil { + ctx.Integration.Error(err.Error()) + return nil + } + + ctx.Integration.RemoveBrowserAction() + ctx.Integration.Ready() + return nil +} + +func (g *GitLab) updateMetadata(ctx core.SyncContext) error { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + user, projects, err := client.FetchIntegrationData() + if err != nil { + return err + } + + ps := []ProjectMetadata{} + for _, p := range projects { + ps = append(ps, ProjectMetadata{ + ID: p.ID, + Name: p.PathWithNamespace, + URL: p.WebURL, + }) + } + + metadata := Metadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + metadata.Projects = ps + metadata.State = nil + metadata.User = &UserMetadata{ + ID: user.ID, + Name: user.Name, + Username: user.Username, + } + + ctx.Integration.SetMetadata(metadata) + + return nil +} + +func (g *GitLab) HandleRequest(ctx core.HTTPRequestContext) { + if !strings.HasSuffix(ctx.Request.URL.Path, "/callback") { + ctx.Response.WriteHeader(http.StatusNotFound) + return + } + + clientID, err := ctx.Integration.GetConfig("clientId") + if err != nil { + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + + clientSecret, err := ctx.Integration.GetConfig("clientSecret") + if err != nil { + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + + baseURL, _ := ctx.Integration.GetConfig("baseUrl") + strBaseURL := normalizeBaseURL(string(baseURL)) + + strClientID := string(clientID) + strClientSecret := string(clientSecret) + + config := &Configuration{ + BaseURL: strBaseURL, + ClientID: strClientID, + ClientSecret: strClientSecret, + } + + g.handleCallback(ctx, config) +} + +func (g *GitLab) handleCallback(ctx core.HTTPRequestContext, config *Configuration) { + redirectBaseURL := ctx.BaseURL + metadata := Metadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + + redirectURI := fmt.Sprintf("%s/api/v1/integrations/%s/callback", redirectBaseURL, ctx.Integration.ID().String()) + + auth := NewAuth(ctx.HTTP) + tokenResponse, err := auth.HandleCallback(ctx.Request, config, *metadata.State, redirectURI) + + if err != nil { + ctx.Logger.Errorf("Callback error: %v", err) + http.Redirect(ctx.Response, ctx.Request, + fmt.Sprintf("%s/%s/settings/integrations/%s", redirectBaseURL, ctx.OrganizationID, ctx.Integration.ID().String()), + http.StatusSeeOther) + return + } + + if tokenResponse.AccessToken != "" { + if err := ctx.Integration.SetSecret(OAuthAccessToken, []byte(tokenResponse.AccessToken)); err != nil { + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + } + + if tokenResponse.RefreshToken != "" { + if err := ctx.Integration.SetSecret(OAuthRefreshToken, []byte(tokenResponse.RefreshToken)); err != nil { + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + } + + if err := ctx.Integration.ScheduleResync(tokenResponse.GetExpiration()); err != nil { + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + + if err := g.updateMetadata(core.SyncContext{ + HTTP: ctx.HTTP, + Integration: ctx.Integration, + }); err != nil { + ctx.Logger.Errorf("Callback error: failed to update metadata: %v", err) + ctx.Response.WriteHeader(http.StatusInternalServerError) + return + } + + ctx.Integration.RemoveBrowserAction() + ctx.Integration.Ready() + + http.Redirect(ctx.Response, ctx.Request, + fmt.Sprintf("%s/%s/settings/integrations/%s", redirectBaseURL, ctx.OrganizationID, ctx.Integration.ID().String()), + http.StatusSeeOther) +} + +func findSecret(integration core.IntegrationContext, name string) (string, error) { + secrets, err := integration.GetSecrets() + if err != nil { + return "", err + } + for _, secret := range secrets { + if secret.Name == name { + return string(secret.Value), nil + } + } + return "", nil +} + +func normalizeBaseURL(url string) string { + if url == "" { + return "https://gitlab.com" + } + if !strings.HasPrefix(url, "http://") && !strings.HasPrefix(url, "https://") { + url = "https://" + url + } + + return strings.TrimSuffix(url, "/") +} + +func (g *GitLab) Actions() []core.Action { + return []core.Action{} +} + +func (g *GitLab) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} + +func (g *GitLab) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} diff --git a/pkg/integrations/gitlab/gitlab_test.go b/pkg/integrations/gitlab/gitlab_test.go new file mode 100644 index 0000000000..6a5cc6e36a --- /dev/null +++ b/pkg/integrations/gitlab/gitlab_test.go @@ -0,0 +1,389 @@ +package gitlab + +import ( + "bytes" + "io" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func GitlabMockResponse(status int, body string) *http.Response { + return &http.Response{ + StatusCode: status, + Body: io.NopCloser(bytes.NewBufferString(body)), + Header: http.Header{"Content-Type": []string{"application/json"}}, + } +} + +func Test__GitLab__Sync(t *testing.T) { + g := &GitLab{} + + t.Run("personal access token - success", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "accessToken": "pat-123", + "groupId": "123", + "baseUrl": "https://gitlab.com", + }, + } + + mockHTTP := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{"id": 1}`), + GitlabMockResponse(http.StatusOK, `[{"id": 1, "path_with_namespace": "group/project1", "web_url": "https://gitlab.com/group/project1"}]`), + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + HTTP: mockHTTP, + Logger: logrus.NewEntry(logrus.New()), + }) + + require.NoError(t, err) + assert.Equal(t, "ready", ctx.State) + + require.Len(t, mockHTTP.Requests, 2) + assert.Equal(t, "https://gitlab.com/api/v4/user", mockHTTP.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=1", mockHTTP.Requests[1].URL.String()) + }) + + t.Run("personal access token - missing token - error state", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "", + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "access token is required") + assert.Empty(t, ctx.State) + assert.Nil(t, ctx.BrowserAction) + }) + + t.Run("oauth - missing client id - setup instructions", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAppOAuth, + "groupId": "123", + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + }) + + require.NoError(t, err) + assert.NotNil(t, ctx.BrowserAction) + assert.Contains(t, ctx.BrowserAction.Description, "Click the **Continue** button to go to the Applications page in GitLab") + assert.Equal(t, "https://gitlab.com/-/user_settings/applications", ctx.BrowserAction.URL) + }) + + t.Run("oauth - missing client secret - setup instructions", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAppOAuth, + "groupId": "123", + "clientId": "id", + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + }) + + require.NoError(t, err) + assert.NotNil(t, ctx.BrowserAction) + assert.Contains(t, ctx.BrowserAction.Description, "Click the **Continue** button to go to the Applications page in GitLab") + assert.Equal(t, "https://gitlab.com/-/user_settings/applications", ctx.BrowserAction.URL) + }) + + t.Run("oauth - has client id, no tokens - connect button", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAppOAuth, + "groupId": "123", + "clientId": "id", + "clientSecret": "secret", + }, + Metadata: Metadata{ + User: &UserMetadata{ + ID: 123, + Name: "John Doe", + Username: "johndoe", + }, + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + }) + + require.NoError(t, err) + assert.NotNil(t, ctx.BrowserAction) + assert.Contains(t, ctx.BrowserAction.URL, "/oauth/authorize") + assert.Contains(t, ctx.BrowserAction.Description, "authorize SuperPlane") + + // Verify metadata preservation + metadata, ok := ctx.Metadata.(Metadata) + assert.True(t, ok) + assert.Equal(t, 123, metadata.User.ID) + assert.Equal(t, "John Doe", metadata.User.Name) + assert.Equal(t, "johndoe", metadata.User.Username) + assert.NotEmpty(t, metadata.State) + }) + + t.Run("oauth - has tokens - success", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAppOAuth, + "groupId": "123", + "clientId": "id", + "clientSecret": "secret", + }, + Secrets: map[string]core.IntegrationSecret{ + OAuthAccessToken: {Name: OAuthAccessToken, Value: []byte("access-token")}, + OAuthRefreshToken: {Name: OAuthRefreshToken, Value: []byte("refresh-token")}, + }, + } + + mockHTTP := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "access_token": "new-access-token", + "refresh_token": "new-refresh-token", + "expires_in": 7200, + "token_type": "Bearer" + }`), + GitlabMockResponse(http.StatusOK, `{"id": 1, "name": "John Doe", "username": "johndoe"}`), + GitlabMockResponse(http.StatusOK, `[{"id": 1, "path_with_namespace": "group/project1", "web_url": "https://gitlab.com/group/project1"}]`), + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + HTTP: mockHTTP, + Logger: logrus.NewEntry(logrus.New()), + }) + + require.NoError(t, err) + assert.Equal(t, "ready", ctx.State) + + require.Len(t, mockHTTP.Requests, 3) + assert.Equal(t, "https://gitlab.com/oauth/token", mockHTTP.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/user", mockHTTP.Requests[1].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=1", mockHTTP.Requests[2].URL.String()) + }) + + t.Run("oauth - access token present but no refresh token - success", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAppOAuth, + "groupId": "123", + "clientId": "id", + "clientSecret": "secret", + }, + Secrets: map[string]core.IntegrationSecret{ + OAuthAccessToken: {Name: OAuthAccessToken, Value: []byte("existing-access-token")}, + }, + } + + mockHTTP := &contexts.HTTPContext{ + Responses: []*http.Response{ + // No token refresh request expected + GitlabMockResponse(http.StatusOK, `{"id": 1}`), + GitlabMockResponse(http.StatusOK, `[{"id": 1, "path_with_namespace": "group/project1", "web_url": "https://gitlab.com/group/project1"}]`), + }, + } + + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + HTTP: mockHTTP, + Logger: logrus.NewEntry(logrus.New()), + }) + + require.NoError(t, err) + assert.Equal(t, "ready", ctx.State) + + // Verification: Should skip token endpoint and go straight to API calls + require.Len(t, mockHTTP.Requests, 2) + assert.Equal(t, "https://gitlab.com/api/v4/user", mockHTTP.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=1", mockHTTP.Requests[1].URL.String()) + }) + + t.Run("error cases", func(t *testing.T) { + t.Run("missing authType", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "authType is required") + }) + + t.Run("unknown authType", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Configuration: map[string]any{"authType": "unknown"}, + } + err := g.Sync(core.SyncContext{ + Configuration: ctx.Configuration, + Integration: ctx, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown authType") + }) + }) +} + +func Test__GitLab__HandleRequest(t *testing.T) { + g := &GitLab{} + logger := logrus.NewEntry(logrus.New()) + + t.Run("handle callback success", func(t *testing.T) { + state := "xyz" + ctx := &contexts.IntegrationContext{ + Metadata: Metadata{State: &state}, + Configuration: map[string]any{ + "clientId": "id", + "clientSecret": "secret", + "baseUrl": "https://gitlab.com", + "authType": AuthTypeAppOAuth, + }, + Secrets: make(map[string]core.IntegrationSecret), + } + + recorder := httptest.NewRecorder() + req := httptest.NewRequest("GET", "/callback?code=code123&state="+url.QueryEscape(state), nil) + + // Sequence: Exchange Code -> Verify (User) -> Verify (Projects) + mockHTTP := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "access_token": "access", + "refresh_token": "refresh", + "expires_in": 3600 + }`), + GitlabMockResponse(http.StatusOK, `{"id": 1, "name": "John Doe", "username": "johndoe"}`), + GitlabMockResponse(http.StatusOK, `[{"id": 1, "path_with_namespace": "group/project1", "web_url": "https://gitlab.com/group/project1"}]`), + }, + } + + ctx.Configuration["groupId"] = "123" + + g.HandleRequest(core.HTTPRequestContext{ + Request: req, + Response: recorder, + Integration: ctx, + HTTP: mockHTTP, + Logger: logger, + }) + + assert.Equal(t, http.StatusSeeOther, recorder.Code) + assert.Equal(t, "ready", ctx.State) + + require.Len(t, mockHTTP.Requests, 3) + assert.Equal(t, "https://gitlab.com/oauth/token", mockHTTP.Requests[0].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/user", mockHTTP.Requests[1].URL.String()) + assert.Equal(t, "https://gitlab.com/api/v4/groups/123/projects?include_subgroups=true&per_page=100&page=1", mockHTTP.Requests[2].URL.String()) + + assert.Equal(t, 1, ctx.Metadata.(Metadata).User.ID) + assert.Equal(t, "John Doe", ctx.Metadata.(Metadata).User.Name) + assert.Equal(t, "johndoe", ctx.Metadata.(Metadata).User.Username) + assert.Len(t, ctx.Metadata.(Metadata).Projects, 1) + }) + + t.Run("error cases", func(t *testing.T) { + t.Run("unknown path", func(t *testing.T) { + ctx := &contexts.IntegrationContext{} + recorder := httptest.NewRecorder() + req := httptest.NewRequest("GET", "/unknown", nil) + + g.HandleRequest(core.HTTPRequestContext{ + Request: req, + Response: recorder, + Integration: ctx, + Logger: logger, + }) + + assert.Equal(t, http.StatusNotFound, recorder.Code) + }) + + t.Run("callback failure", func(t *testing.T) { + state := "valid-state" + ctx := &contexts.IntegrationContext{ + Metadata: Metadata{State: &state}, + Configuration: map[string]any{ + "clientId": "id", + "clientSecret": "secret", + "baseUrl": "https://gitlab.com", + }, + Secrets: make(map[string]core.IntegrationSecret), + } + + mockHTTP := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusBadRequest, "{}"), + }, + } + + recorder := httptest.NewRecorder() + req := httptest.NewRequest("GET", "/callback?code=bad&state=valid-state", nil) + + g.HandleRequest(core.HTTPRequestContext{ + Request: req, + Response: recorder, + Integration: ctx, + HTTP: mockHTTP, + Logger: logger, + }) + + assert.Equal(t, http.StatusSeeOther, recorder.Code) + + assert.NotContains(t, ctx.State, "error") + }) + }) +} + +func Test__GitLab__BaseURLNormalization(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {"gitlab.com", "https://gitlab.com"}, + {"http://gitlab.com", "http://gitlab.com"}, + {"https://gitlab.com", "https://gitlab.com"}, + {"https://gitlab.com/", "https://gitlab.com"}, + {"", "https://gitlab.com"}, + } + + for _, tc := range tests { + t.Run(tc.input, func(t *testing.T) { + assert.Equal(t, tc.expected, normalizeBaseURL(tc.input)) + }) + } +} diff --git a/pkg/integrations/gitlab/hooks.go b/pkg/integrations/gitlab/hooks.go new file mode 100644 index 0000000000..e060f15131 --- /dev/null +++ b/pkg/integrations/gitlab/hooks.go @@ -0,0 +1,140 @@ +package gitlab + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/superplanehq/superplane/pkg/core" +) + +type HooksClient struct { + *Client +} + +type Hook struct { + ID int `json:"id"` + URL string `json:"url"` + ProjectID int `json:"project_id"` + IssuesEvents bool `json:"issues_events"` + MergeRequestsEvents bool `json:"merge_requests_events"` + PushEvents bool `json:"push_events"` + TagPushEvents bool `json:"tag_push_events"` + NoteEvents bool `json:"note_events"` + ConfidentialIssuesEvents bool `json:"confidential_issues_events"` + PipelineEvents bool `json:"pipeline_events"` + WikiPageEvents bool `json:"wiki_page_events"` + DeploymentEvents bool `json:"deployment_events"` + ReleasesEvents bool `json:"releases_events"` +} + +type HookEvents struct { + IssuesEvents bool + MergeRequestsEvents bool + PushEvents bool + TagPushEvents bool + NoteEvents bool + ConfidentialIssuesEvents bool + PipelineEvents bool + WikiPageEvents bool + DeploymentEvents bool + ReleasesEvents bool +} + +func NewHooksClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*HooksClient, error) { + config, err := ctx.GetConfig("authType") + if err != nil { + return nil, fmt.Errorf("failed to get authType: %v", err) + } + authType := string(config) + + baseURLBytes, _ := ctx.GetConfig("baseUrl") + baseURL := normalizeBaseURL(string(baseURLBytes)) + + token, err := getAuthToken(ctx, authType) + if err != nil { + return nil, err + } + + return &HooksClient{ + Client: &Client{ + baseURL: baseURL, + token: token, + authType: authType, + httpClient: httpClient, + }, + }, nil +} + +func (c *HooksClient) CreateHook(projectID string, webhookURL string, secret string, events HookEvents) (*Hook, error) { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/hooks", c.baseURL, apiVersion, url.PathEscape(projectID)) + + payload := map[string]any{ + "url": webhookURL, + "token": secret, + "issues_events": events.IssuesEvents, + "merge_requests_events": events.MergeRequestsEvents, + "push_events": events.PushEvents, + "tag_push_events": events.TagPushEvents, + "note_events": events.NoteEvents, + "confidential_issues_events": events.ConfidentialIssuesEvents, + "pipeline_events": events.PipelineEvents, + "wiki_page_events": events.WikiPageEvents, + "deployment_events": events.DeploymentEvents, + "releases_events": events.ReleasesEvents, + } + + body, err := json.Marshal(payload) + if err != nil { + return nil, fmt.Errorf("failed to marshal hook payload: %v", err) + } + + req, err := http.NewRequest(http.MethodPost, apiURL, bytes.NewBuffer(body)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %v", err) + } + req.Header.Set("Content-Type", "application/json") + + resp, err := c.do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %v", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusCreated { + respBody := make([]byte, 1024) + n, _ := resp.Body.Read(respBody) + return nil, fmt.Errorf("failed to create hook for project %s: status %d, response: %s", + projectID, resp.StatusCode, string(respBody[:n])) + } + + var hook Hook + if err := json.NewDecoder(resp.Body).Decode(&hook); err != nil { + return nil, fmt.Errorf("failed to decode hook response: %v", err) + } + + return &hook, nil +} + +func (c *HooksClient) DeleteHook(projectID string, hookID int) error { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/hooks/%d", c.baseURL, apiVersion, url.PathEscape(projectID), hookID) + + req, err := http.NewRequest(http.MethodDelete, apiURL, nil) + if err != nil { + return fmt.Errorf("failed to create request: %v", err) + } + + resp, err := c.do(req) + if err != nil { + return fmt.Errorf("failed to execute request: %v", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusOK { + return fmt.Errorf("failed to delete hook: status %d", resp.StatusCode) + } + + return nil +} diff --git a/pkg/integrations/gitlab/list_resources.go b/pkg/integrations/gitlab/list_resources.go new file mode 100644 index 0000000000..e124cf7b70 --- /dev/null +++ b/pkg/integrations/gitlab/list_resources.go @@ -0,0 +1,94 @@ +package gitlab + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + ResourceTypeMember = "member" + ResourceTypeMilestone = "milestone" + ResourceTypeProject = "project" +) + +func (g *GitLab) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + switch resourceType { + case ResourceTypeMember: + return ListMembers(ctx) + case ResourceTypeMilestone: + return ListMilestones(ctx) + case ResourceTypeProject: + return ListProjects(ctx) + default: + return []core.IntegrationResource{}, nil + } +} + +func ListProjects(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + metadata := Metadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { + return nil, fmt.Errorf("failed to decode metadata: %v", err) + } + + resources := make([]core.IntegrationResource, 0, len(metadata.Projects)) + for _, project := range metadata.Projects { + resources = append(resources, core.IntegrationResource{ + Type: ResourceTypeProject, + Name: project.Name, + ID: fmt.Sprintf("%d", project.ID), + }) + } + + return resources, nil +} + +func ListMembers(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to create client: %v", err) + } + + members, err := client.ListGroupMembers(client.groupID) + if err != nil { + return nil, fmt.Errorf("failed to list members: %v", err) + } + + resources := make([]core.IntegrationResource, 0, len(members)) + for _, m := range members { + resources = append(resources, core.IntegrationResource{ + Type: ResourceTypeMember, + Name: fmt.Sprintf("%s (@%s)", m.Name, m.Username), + ID: fmt.Sprintf("%d", m.ID), + }) + } + return resources, nil +} + +func ListMilestones(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + projectID := ctx.Parameters["project"] + if projectID == "" { + return []core.IntegrationResource{}, nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to create client: %v", err) + } + + milestones, err := client.ListMilestones(projectID) + if err != nil { + return nil, fmt.Errorf("failed to list milestones: %v", err) + } + + resources := make([]core.IntegrationResource, 0, len(milestones)) + for _, m := range milestones { + resources = append(resources, core.IntegrationResource{ + Type: ResourceTypeMilestone, + Name: m.Title, + ID: fmt.Sprintf("%d", m.ID), + }) + } + return resources, nil +} diff --git a/pkg/integrations/gitlab/list_resources_test.go b/pkg/integrations/gitlab/list_resources_test.go new file mode 100644 index 0000000000..5f138f0ba6 --- /dev/null +++ b/pkg/integrations/gitlab/list_resources_test.go @@ -0,0 +1,136 @@ +package gitlab + +import ( + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GitLab__ListResources(t *testing.T) { + g := &GitLab{} + + t.Run("returns empty list for unknown resource type", func(t *testing.T) { + ctx := &contexts.IntegrationContext{} + resources, err := g.ListResources("unknown", core.ListResourcesContext{ + Integration: ctx, + }) + require.NoError(t, err) + assert.Empty(t, resources) + }) + + t.Run("returns list of members", func(t *testing.T) { + ctx := core.ListResourcesContext{ + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseUrl": "https://gitlab.com", + "groupId": "123", + "authType": AuthTypePersonalAccessToken, + "accessToken": "token", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `[ + {"id": 101, "name": "User One", "username": "user1"}, + {"id": 102, "name": "User Two", "username": "user2"} + ]`), + }, + }, + } + + resources, err := g.ListResources("member", ctx) + + require.NoError(t, err) + assert.Len(t, resources, 2) + assert.Equal(t, "101", resources[0].ID) + assert.Equal(t, "User One (@user1)", resources[0].Name) + assert.Equal(t, "member", resources[0].Type) + assert.Equal(t, "102", resources[1].ID) + }) + + t.Run("returns list of projects from metadata", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Metadata: Metadata{ + Projects: []ProjectMetadata{ + {ID: 1, Name: "project1", URL: "http://project1"}, + {ID: 2, Name: "project2", URL: "http://project2"}, + }, + }, + } + + resources, err := g.ListResources("project", core.ListResourcesContext{ + Integration: ctx, + }) + + require.NoError(t, err) + assert.Len(t, resources, 2) + assert.Equal(t, "1", resources[0].ID) + assert.Equal(t, "project1", resources[0].Name) + assert.Equal(t, "project", resources[0].Type) + assert.Equal(t, "2", resources[1].ID) + assert.Equal(t, "project2", resources[1].Name) + }) + + t.Run("handles invalid metadata gracefully", func(t *testing.T) { + ctx := &contexts.IntegrationContext{ + Metadata: "invalid-string-metadata", + } + + _, err := g.ListResources("project", core.ListResourcesContext{ + Integration: ctx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to decode metadata") + }) + + t.Run("returns list of milestones for project", func(t *testing.T) { + ctx := core.ListResourcesContext{ + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseUrl": "https://gitlab.com", + "groupId": "123", + "authType": AuthTypePersonalAccessToken, + "accessToken": "token", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `[ + {"id": 1, "iid": 1, "title": "v1.0", "state": "active"}, + {"id": 2, "iid": 2, "title": "v2.0", "state": "active"} + ]`), + }, + }, + Parameters: map[string]string{ + "project": "456", + }, + } + + resources, err := g.ListResources("milestone", ctx) + + require.NoError(t, err) + assert.Len(t, resources, 2) + assert.Equal(t, "1", resources[0].ID) + assert.Equal(t, "v1.0", resources[0].Name) + assert.Equal(t, "milestone", resources[0].Type) + assert.Equal(t, "2", resources[1].ID) + assert.Equal(t, "v2.0", resources[1].Name) + }) + + t.Run("returns empty list for milestone without project", func(t *testing.T) { + ctx := core.ListResourcesContext{ + Integration: &contexts.IntegrationContext{}, + Parameters: map[string]string{}, + } + + resources, err := g.ListResources("milestone", ctx) + + require.NoError(t, err) + assert.Empty(t, resources) + }) +} diff --git a/pkg/integrations/gitlab/on_issue.go b/pkg/integrations/gitlab/on_issue.go new file mode 100644 index 0000000000..fe92ca62a3 --- /dev/null +++ b/pkg/integrations/gitlab/on_issue.go @@ -0,0 +1,238 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + "slices" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnIssue struct{} + +type OnIssueConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Actions []string `json:"actions" mapstructure:"actions"` + Labels []configuration.Predicate `json:"labels" mapstructure:"labels"` +} + +func (i *OnIssue) Name() string { + return "gitlab.onIssue" +} + +func (i *OnIssue) Label() string { + return "On Issue" +} + +func (i *OnIssue) Description() string { + return "Listen to issue events from GitLab" +} + +func (i *OnIssue) Documentation() string { + return `The On Issue trigger starts a workflow execution when issue events occur in a GitLab project. + +## Use Cases + +- **Notify Slack** when an issue is created or assigned for triage +- **Create a Jira issue** when a GitLab issue is created for traceability +- **Update external dashboards** or close linked tickets when an issue is closed + +## Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which issue actions to listen for (opened, closed, reopened, etc.). Default: opened. +- **Labels** (optional): Only trigger for issues with specific labels + +## Outputs + +- **Default channel**: Emits issue payload including issue IID, title, state, labels, assignees, author, and action type + +## Webhook Setup + +This trigger automatically sets up a GitLab webhook when configured. The webhook is managed by SuperPlane and will be cleaned up when the trigger is removed.` +} + +func (i *OnIssue) Icon() string { + return "gitlab" +} + +func (i *OnIssue) Color() string { + return "orange" +} + +func (i *OnIssue) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "actions", + Label: "Actions", + Type: configuration.FieldTypeMultiSelect, + Required: true, + Default: []string{"open"}, + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Opened", Value: "open"}, + {Label: "Closed", Value: "close"}, + {Label: "Reopened", Value: "reopen"}, + {Label: "Updated", Value: "update"}, + }, + }, + }, + }, + { + Name: "labels", + Label: "Labels", + Type: configuration.FieldTypeAnyPredicateList, + Required: false, + TypeOptions: &configuration.TypeOptions{ + AnyPredicateList: &configuration.AnyPredicateListTypeOptions{ + Operators: configuration.AllPredicateOperators, + }, + }, + }, + } +} + +func (i *OnIssue) Setup(ctx core.TriggerContext) error { + var config OnIssueConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "issues", + ProjectID: config.Project, + }) +} + +func (i *OnIssue) Actions() []core.Action { + return []core.Action{} +} + +func (i *OnIssue) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (i *OnIssue) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnIssueConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Issue Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + // + // Verify that the action is in the allowed list + // + if len(config.Actions) > 0 && !i.whitelistedAction(data, config.Actions) { + return http.StatusOK, nil + } + + // + // Verify that the labels are in the allowed list + // + if len(config.Labels) > 0 && !i.hasWhitelistedLabel(data, config.Labels) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("gitlab.issue", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (i *OnIssue) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string) bool { + attrs, ok := data["object_attributes"].(map[string]any) + if !ok { + return false + } + + action, ok := attrs["action"].(string) + if !ok { + return false + } + + if !slices.Contains(allowedActions, action) { + return false + } + + // + // If not an update action, just return true, + // since it's in the allowed list. + // + if action != "update" { + return true + } + + // + // Otherwise, we are dealing with an update, + // and for those, we only accept if the issue is opened. + // + state, ok := attrs["state"].(string) + if !ok { + return false + } + + return state == "opened" +} + +func (i *OnIssue) hasWhitelistedLabel(data map[string]any, allowedLabels []configuration.Predicate) bool { + labels, ok := data["labels"].([]any) + if !ok { + return false + } + + for _, label := range labels { + labelMap, ok := label.(map[string]any) + if !ok { + continue + } + + title, _ := labelMap["title"].(string) + if configuration.MatchesAnyPredicate(allowedLabels, title) { + return true + } + } + + return false +} diff --git a/pkg/integrations/gitlab/on_issue_test.go b/pkg/integrations/gitlab/on_issue_test.go new file mode 100644 index 0000000000..3a9ad0efe4 --- /dev/null +++ b/pkg/integrations/gitlab/on_issue_test.go @@ -0,0 +1,274 @@ +package gitlab + +import ( + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnIssue__HandleWebhook__MissingEventHeader(t *testing.T) { + trigger := &OnIssue{} + + ctx := core.WebhookRequestContext{ + Headers: http.Header{}, + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusBadRequest, code) + assert.Error(t, err) + assert.Contains(t, err.Error(), "X-Gitlab-Event") +} + +func Test__OnIssue__HandleWebhook__WrongEventType(t *testing.T) { + trigger := &OnIssue{} + + eventsCtx := &contexts.EventContext{} + headers := http.Header{} + headers.Set("X-Gitlab-Event", "Push Hook") + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Events: eventsCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, eventsCtx.Count()) +} + +func Test__OnIssue__HandleWebhook__InvalidToken(t *testing.T) { + trigger := &OnIssue{} + + headers := http.Header{} + headers.Set("X-Gitlab-Event", "Issue Hook") + headers.Set("X-Gitlab-Token", "wrong-token") + + webhookCtx := &contexts.WebhookContext{Secret: "correct-token"} + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Webhook: webhookCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusForbidden, code) + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid webhook token") +} + +func Test__OnIssue__HandleWebhook__StateNotOpened(t *testing.T) { + trigger := &OnIssue{} + + headers := http.Header{} + headers.Set("X-Gitlab-Event", "Issue Hook") + headers.Set("X-Gitlab-Token", "token") + + webhookCtx := &contexts.WebhookContext{Secret: "token"} + eventsCtx := &contexts.EventContext{} + + data := map[string]any{ + "object_attributes": map[string]any{ + "state": "closed", + "action": "close", + }, + } + body, _ := json.Marshal(data) + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"close"}}, + Webhook: webhookCtx, + Events: eventsCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + + assert.Equal(t, 1, eventsCtx.Count()) + assert.Equal(t, "gitlab.issue", eventsCtx.Payloads[0].Type) +} + +func Test__OnIssue__HandleWebhook__Success(t *testing.T) { + trigger := &OnIssue{} + + headers := http.Header{} + headers.Set("X-Gitlab-Event", "Issue Hook") + headers.Set("X-Gitlab-Token", "token") + + webhookCtx := &contexts.WebhookContext{Secret: "token"} + eventsCtx := &contexts.EventContext{} + + data := map[string]any{ + "object_attributes": map[string]any{ + "state": "opened", + "action": "open", + "title": "Test Issue", + }, + } + body, _ := json.Marshal(data) + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Webhook: webhookCtx, + Events: eventsCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + + assert.Equal(t, 1, eventsCtx.Count()) + assert.Equal(t, "gitlab.issue", eventsCtx.Payloads[0].Type) +} + +func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { + trigger := &OnIssue{} + + headers := http.Header{} + headers.Set("X-Gitlab-Event", "Issue Hook") + headers.Set("X-Gitlab-Token", "token") + + webhookCtx := &contexts.WebhookContext{Secret: "token"} + + baseAttributes := map[string]any{ + "state": "opened", + "action": "open", + } + + t.Run("label match", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + data := map[string]any{ + "object_attributes": baseAttributes, + "labels": []map[string]any{ + {"title": "bug"}, + {"title": "backend"}, + }, + } + body, _ := json.Marshal(data) + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, + Webhook: webhookCtx, + Events: eventsCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + + assert.Equal(t, 1, eventsCtx.Count()) + assert.Equal(t, "gitlab.issue", eventsCtx.Payloads[0].Type) + }) + + t.Run("label no match", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + data := map[string]any{ + "object_attributes": baseAttributes, + "labels": []map[string]any{ + {"title": "bug"}, + }, + } + body, _ := json.Marshal(data) + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, + Webhook: webhookCtx, + Events: eventsCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + + assert.Zero(t, eventsCtx.Count()) + }) + +} + +func Test__WhitelistedAction__ValidAction(t *testing.T) { + trigger := &OnIssue{} + + t.Run("valid action", func(t *testing.T) { + data := map[string]any{ + "object_attributes": map[string]any{ + "action": "open", + }, + } + result := trigger.whitelistedAction(data, []string{"open", "close"}) + assert.True(t, result) + }) + + t.Run("invalid action", func(t *testing.T) { + data := map[string]any{ + "object_attributes": map[string]any{ + "action": "update", + }, + } + + result := trigger.whitelistedAction(data, []string{"open", "close"}) + assert.False(t, result) + }) + + t.Run("missing action", func(t *testing.T) { + data := map[string]any{ + "object_attributes": map[string]any{}, + } + + result := trigger.whitelistedAction(data, []string{"open", "close"}) + assert.False(t, result) + }) + +} + +func Test__OnIssue__HandleWebhook__UpdateOnClosed(t *testing.T) { + trigger := &OnIssue{} + + headers := http.Header{} + headers.Set("X-Gitlab-Event", "Issue Hook") + headers.Set("X-Gitlab-Token", "token") + + webhookCtx := &contexts.WebhookContext{Secret: "token"} + eventsCtx := &contexts.EventContext{} + + data := map[string]any{ + "object_attributes": map[string]any{ + "state": "closed", + "action": "update", + }, + } + body, _ := json.Marshal(data) + + ctx := core.WebhookRequestContext{ + Headers: headers, + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"update"}}, + Webhook: webhookCtx, + Events: eventsCtx, + } + + code, err := trigger.HandleWebhook(ctx) + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + + assert.Equal(t, 0, eventsCtx.Count()) +} diff --git a/pkg/integrations/gitlab/webhook_handler.go b/pkg/integrations/gitlab/webhook_handler.go new file mode 100644 index 0000000000..a5055f2ec1 --- /dev/null +++ b/pkg/integrations/gitlab/webhook_handler.go @@ -0,0 +1,102 @@ +package gitlab + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +type GitLabWebhookHandler struct{} + +func (h *GitLabWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} + +func (h *GitLabWebhookHandler) CompareConfig(a, b any) (bool, error) { + configA := WebhookConfiguration{} + configB := WebhookConfiguration{} + + if err := mapstructure.Decode(a, &configA); err != nil { + return false, err + } + + if err := mapstructure.Decode(b, &configB); err != nil { + return false, err + } + + if configA.ProjectID != configB.ProjectID { + return false, nil + } + + if configA.EventType != configB.EventType { + return false, nil + } + + return true, nil +} + +func (h *GitLabWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + hooksClient, err := NewHooksClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to create hooks client: %v", err) + } + + config := WebhookConfiguration{} + if err := mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config); err != nil { + return nil, fmt.Errorf("failed to decode webhook config: %v", err) + } + + secret, err := ctx.Webhook.GetSecret() + if err != nil { + return nil, fmt.Errorf("error getting webhook secret: %v", err) + } + + events := HookEvents{} + switch config.EventType { + case "issues": + events.IssuesEvents = true + case "merge_requests": + events.MergeRequestsEvents = true + case "push": + events.PushEvents = true + case "tag_push": + events.TagPushEvents = true + case "note": + events.NoteEvents = true + case "pipeline": + events.PipelineEvents = true + case "releases": + events.ReleasesEvents = true + } + + hook, err := hooksClient.CreateHook(config.ProjectID, ctx.Webhook.GetURL(), string(secret), events) + if err != nil { + return nil, fmt.Errorf("error creating webhook: %v", err) + } + + return &WebhookMetadata{ID: hook.ID}, nil +} + +func (h *GitLabWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + hooksClient, err := NewHooksClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create hooks client: %v", err) + } + + webhook := WebhookMetadata{} + if err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &webhook); err != nil { + return fmt.Errorf("failed to decode webhook metadata: %v", err) + } + + config := WebhookConfiguration{} + if err := mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config); err != nil { + return fmt.Errorf("failed to decode webhook config: %v", err) + } + + if err := hooksClient.DeleteHook(config.ProjectID, webhook.ID); err != nil { + return fmt.Errorf("error deleting webhook: %v", err) + } + + return nil +} diff --git a/pkg/integrations/gitlab/webhook_handler_test.go b/pkg/integrations/gitlab/webhook_handler_test.go new file mode 100644 index 0000000000..745c3adafc --- /dev/null +++ b/pkg/integrations/gitlab/webhook_handler_test.go @@ -0,0 +1,120 @@ +package gitlab + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test__GitLabWebhookHandler__CompareConfig(t *testing.T) { + handler := &GitLabWebhookHandler{} + + testCases := []struct { + name string + configA any + configB any + expectEqual bool + expectError bool + }{ + { + name: "identical configurations", + configA: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + configB: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + expectEqual: true, + expectError: false, + }, + { + name: "different project IDs", + configA: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + configB: WebhookConfiguration{ + ProjectID: "456", + EventType: "push", + }, + expectEqual: false, + expectError: false, + }, + { + name: "different event types", + configA: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + configB: WebhookConfiguration{ + ProjectID: "123", + EventType: "merge_requests", + }, + expectEqual: false, + expectError: false, + }, + { + name: "both fields different", + configA: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + configB: WebhookConfiguration{ + ProjectID: "456", + EventType: "issues", + }, + expectEqual: false, + expectError: false, + }, + { + name: "comparing map representations", + configA: map[string]any{ + "projectId": "123", + "eventType": "push", + }, + configB: map[string]any{ + "projectId": "123", + "eventType": "push", + }, + expectEqual: true, + expectError: false, + }, + { + name: "invalid first configuration", + configA: "invalid", + configB: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + expectEqual: false, + expectError: true, + }, + { + name: "invalid second configuration", + configA: WebhookConfiguration{ + ProjectID: "123", + EventType: "push", + }, + configB: "invalid", + expectEqual: false, + expectError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + equal, err := handler.CompareConfig(tc.configA, tc.configB) + + if tc.expectError { + assert.Error(t, err) + } else { + require.NoError(t, err) + } + + assert.Equal(t, tc.expectEqual, equal) + }) + } +} diff --git a/pkg/server/server.go b/pkg/server/server.go index 9864e9022e..b96ff29078 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -41,6 +41,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/discord" _ "github.com/superplanehq/superplane/pkg/integrations/grafana" _ "github.com/superplanehq/superplane/pkg/integrations/github" + _ "github.com/superplanehq/superplane/pkg/integrations/gitlab" _ "github.com/superplanehq/superplane/pkg/integrations/jira" _ "github.com/superplanehq/superplane/pkg/integrations/openai" _ "github.com/superplanehq/superplane/pkg/integrations/pagerduty" diff --git a/web_src/src/assets/icons/integrations/gitlab.svg b/web_src/src/assets/icons/integrations/gitlab.svg new file mode 100644 index 0000000000..fc1450b3a9 --- /dev/null +++ b/web_src/src/assets/icons/integrations/gitlab.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/github/utils.ts b/web_src/src/pages/workflowv2/mappers/github/utils.ts index bd1bce4649..5cb39fc791 100644 --- a/web_src/src/pages/workflowv2/mappers/github/utils.ts +++ b/web_src/src/pages/workflowv2/mappers/github/utils.ts @@ -1,7 +1,5 @@ import { MetadataItem } from "@/ui/metadataList"; -import { formatTimeAgo } from "@/utils/date"; -import { CanvasesCanvasNodeExecution } from "@/api-client"; -import { Predicate, formatPredicate } from "../utils"; +import { Predicate, formatPredicate, buildSubtitle, buildExecutionSubtitle } from "../utils"; export function createGithubMetadataItems( repositoryName: string | undefined, @@ -26,17 +24,5 @@ export function createGithubMetadataItems( return metadataItems; } -export function buildGithubSubtitle(content: string | undefined, createdAt?: string): string { - const trimmed = (content || "").trim(); - const timeAgo = createdAt ? formatTimeAgo(new Date(createdAt)) : ""; - - if (trimmed && timeAgo) { - return `${trimmed} · ${timeAgo}`; - } - return trimmed || timeAgo; -} - -export function buildGithubExecutionSubtitle(execution: CanvasesCanvasNodeExecution, content?: string): string { - const timestamp = execution.updatedAt || execution.createdAt; - return buildGithubSubtitle(content || "", timestamp); -} +export const buildGithubSubtitle = buildSubtitle; +export const buildGithubExecutionSubtitle = buildExecutionSubtitle; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/base.ts b/web_src/src/pages/workflowv2/mappers/gitlab/base.ts new file mode 100644 index 0000000000..644ccf5316 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/base.ts @@ -0,0 +1,57 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getColorClass, getBackgroundColorClass } from "@/utils/colors"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { MetadataItem } from "@/ui/metadataList"; +import { NodeInfo, ComponentDefinition, ExecutionInfo } from "../types"; +import { GitLabNodeMetadata } from "./types"; +import { buildGitlabExecutionSubtitle } from "./utils"; + +export function baseProps( + nodes: NodeInfo[], + node: NodeInfo, + componentDefinition: ComponentDefinition, + lastExecutions: ExecutionInfo[], +): ComponentBaseProps { + const lastExecution = lastExecutions.length > 0 ? lastExecutions[0] : null; + const componentName = componentDefinition.name || node.componentName || "unknown"; + + return { + iconSrc: gitlabIcon, + iconColor: getColorClass(componentDefinition.color), + collapsedBackground: getBackgroundColorClass(componentDefinition.color), + collapsed: node.isCollapsed, + title: node.name || componentDefinition.label || componentDefinition.name || "Unnamed component", + eventSections: lastExecution ? baseEventSections(nodes, lastExecution, componentName) : undefined, + metadata: metadataList(node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; +} + +function metadataList(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const nodeMetadata = node.metadata as GitLabNodeMetadata; + + if (nodeMetadata?.project?.name) { + metadata.push({ icon: "book", label: nodeMetadata.project.name }); + } + + return metadata; +} + +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventState: getState(componentName)(execution), + eventSubtitle: buildGitlabExecutionSubtitle(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/create_issue.ts b/web_src/src/pages/workflowv2/mappers/gitlab/create_issue.ts new file mode 100644 index 0000000000..d6113cf1c5 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/create_issue.ts @@ -0,0 +1,43 @@ +import { ComponentBaseProps } from "@/ui/componentBase"; +import { + OutputPayload, + ComponentBaseMapper, + ComponentBaseContext, + SubtitleContext, + ExecutionDetailsContext, +} from "../types"; +import { Issue } from "./types"; +import { baseProps } from "./base"; +import { buildGitlabExecutionSubtitle } from "./utils"; +import { getDetailsForApiIssue } from "./issue_utils"; + +export const createIssueMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + return baseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + }, + + subtitle(context: SubtitleContext): string { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + if (outputs?.default?.[0]?.data) { + const issue = outputs.default[0].data as Issue; + return `#${issue.iid} ${issue.title}`; + } + return buildGitlabExecutionSubtitle(context.execution, "Issue Created"); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const details: Record = {}; + + if (!outputs || !outputs.default || outputs.default.length === 0) { + return details; + } + + if (!outputs.default[0].data) { + return details; + } + + const issue = outputs.default[0].data as Issue; + return { ...getDetailsForApiIssue(issue), ...details }; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts new file mode 100644 index 0000000000..6d78e8e9a4 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts @@ -0,0 +1,16 @@ +import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; +import { buildActionStateRegistry } from "../utils"; +import { createIssueMapper } from "./create_issue"; +import { onIssueTriggerRenderer } from "./on_issue"; + +export const eventStateRegistry: Record = { + createIssue: buildActionStateRegistry("created"), +}; + +export const componentMappers: Record = { + createIssue: createIssueMapper, +}; + +export const triggerRenderers: Record = { + onIssue: onIssueTriggerRenderer, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/issue_utils.ts b/web_src/src/pages/workflowv2/mappers/gitlab/issue_utils.ts new file mode 100644 index 0000000000..2b5079ee1b --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/issue_utils.ts @@ -0,0 +1,76 @@ +import { Issue } from "./types"; + +/** + * Shared interface for webhook event issue data (object_attributes). + * This is a subset of the full Issue type used in webhook payloads. + */ +export interface WebhookIssue { + id?: number; + iid?: number; + title?: string; + description?: string; + state?: string; + action?: string; + url?: string; +} + +/** + * Get display details for a webhook event issue (from object_attributes). + * Used by trigger renderers. + */ +export function getDetailsForWebhookIssue(issue: WebhookIssue | undefined): Record { + if (!issue) { + return {}; + } + + return { + URL: issue.url || "", + Title: issue.title || "", + Action: issue.action || "", + State: issue.state || "", + IID: issue.iid?.toString() || "", + }; +} + +/** + * Get display details for a full API Issue response. + * Used by action mappers (create_issue, etc.). + */ +export function getDetailsForApiIssue(issue: Issue | undefined): Record { + if (!issue) { + return {}; + } + + const details: Record = { + IID: issue.iid?.toString() || "", + ID: issue.id?.toString() || "", + State: issue.state || "", + URL: issue.web_url || "", + Title: issue.title || "-", + "Created At": issue.created_at ? new Date(issue.created_at).toLocaleString() : "-", + "Created By": issue.author?.username || "-", + }; + + if (issue.closed_by) { + details["Closed By"] = issue.closed_by.username; + details["Closed At"] = issue.closed_at ? new Date(issue.closed_at).toLocaleString() : ""; + } + + if (issue.labels && issue.labels.length > 0) { + details["Labels"] = issue.labels.join(", "); + } + + if (issue.assignees && issue.assignees.length > 0) { + details["Assignees"] = issue.assignees.map((assignee) => assignee.username).join(", "); + } + + if (issue.milestone) { + details["Milestone"] = issue.milestone.title; + } + + if (issue.due_date) { + details["Due Date"] = issue.due_date; + } + + return details; +} diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_issue.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_issue.ts new file mode 100644 index 0000000000..51c308b571 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_issue.ts @@ -0,0 +1,110 @@ +import { getColorClass, getBackgroundColorClass } from "@/utils/colors"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { GitLabNodeMetadata } from "./types"; +import { buildGitlabSubtitle } from "./utils"; +import { getDetailsForWebhookIssue, WebhookIssue } from "./issue_utils"; +import { formatPredicate, Predicate } from "../utils"; + +interface OnIssueConfiguration { + actions: string[]; + project: string; + labels: Predicate[]; +} + +interface OnIssueEventData { + object_kind?: string; + event_type?: string; + object_attributes?: WebhookIssue; + user?: { + id: number; + name: string; + username: string; + }; + project?: { + id: number; + name: string; + path_with_namespace: string; + web_url: string; + }; +} + +export const onIssueTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnIssueEventData; + const issue = eventData?.object_attributes; + + return { + title: `#${issue?.iid ?? ""} - ${issue?.title || "Issue"}`, + subtitle: buildGitlabSubtitle(issue?.action || "", context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnIssueEventData; + const issue = eventData?.object_attributes; + const values = getDetailsForWebhookIssue(issue); + + if (eventData?.user?.username) { + values["Author"] = eventData.user.username; + } + + if (eventData?.project?.path_with_namespace) { + values["Project"] = eventData.project.path_with_namespace; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const configuration = node.configuration as unknown as OnIssueConfiguration; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + if (configuration?.actions) { + metadataItems.push({ + icon: "funnel", + label: configuration.actions.join(", "), + }); + } + + if (configuration?.labels && configuration?.labels?.length > 0) { + metadataItems.push({ + icon: "label", + label: configuration.labels.map((label) => formatPredicate(label)).join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnIssueEventData; + const issue = eventData?.object_attributes; + + props.lastEventData = { + title: `#${issue?.iid ?? ""} - ${issue?.title || "Issue"}`, + subtitle: buildGitlabSubtitle(issue?.action || "", lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/types.ts b/web_src/src/pages/workflowv2/mappers/gitlab/types.ts new file mode 100644 index 0000000000..784cd7ccb6 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/types.ts @@ -0,0 +1,43 @@ +export interface Issue { + id: number; + iid: number; + project_id: number; + title: string; + description: string; + state: string; + created_at: string; + updated_at: string; + closed_at?: string; + closed_by?: User; + labels: string[]; + assignees?: User[]; + author: User; + type: string; + web_url: string; + milestone?: Milestone; + due_date?: string; +} + +export interface Milestone { + id: number; + iid: number; + title: string; + state: string; +} + +export interface User { + id: number; + name: string; + username: string; + state: string; + avatar_url: string; + web_url: string; +} + +export interface GitLabNodeMetadata { + project?: { + name?: string; + url?: string; + id?: number; + }; +} diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/utils.ts b/web_src/src/pages/workflowv2/mappers/gitlab/utils.ts new file mode 100644 index 0000000000..3f83777de7 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/utils.ts @@ -0,0 +1,4 @@ +import { buildSubtitle, buildExecutionSubtitle } from "../utils"; + +export const buildGitlabSubtitle = buildSubtitle; +export const buildGitlabExecutionSubtitle = buildExecutionSubtitle; diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 9668e345ee..67826690fe 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -25,6 +25,11 @@ import { eventStateRegistry as githubEventStateRegistry, customFieldRenderers as githubCustomFieldRenderers, } from "./github/index"; +import { + componentMappers as gitlabComponentMappers, + triggerRenderers as gitlabTriggerRenderers, + eventStateRegistry as gitlabEventStateRegistry, +} from "./gitlab/index"; import { componentMappers as pagerdutyComponentMappers, triggerRenderers as pagerdutyTriggerRenderers, @@ -136,6 +141,7 @@ const appMappers: Record> = { cloudflare: cloudflareComponentMappers, semaphore: semaphoreComponentMappers, github: githubComponentMappers, + gitlab: gitlabComponentMappers, pagerduty: pagerdutyComponentMappers, dash0: dash0ComponentMappers, daytona: daytonaComponentMappers, @@ -156,6 +162,7 @@ const appTriggerRenderers: Record> = { cloudflare: cloudflareTriggerRenderers, semaphore: semaphoreTriggerRenderers, github: githubTriggerRenderers, + gitlab: gitlabTriggerRenderers, pagerduty: pagerdutyTriggerRenderers, dash0: dash0TriggerRenderers, daytona: daytonaTriggerRenderers, @@ -189,6 +196,7 @@ const appEventStateRegistries: Record openai: openaiEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, + gitlab: gitlabEventStateRegistry, grafana: grafanaEventStateRegistry, }; diff --git a/web_src/src/pages/workflowv2/mappers/utils.ts b/web_src/src/pages/workflowv2/mappers/utils.ts index 3ce64fa734..5f30ad219c 100644 --- a/web_src/src/pages/workflowv2/mappers/utils.ts +++ b/web_src/src/pages/workflowv2/mappers/utils.ts @@ -1,4 +1,5 @@ import { DEFAULT_EVENT_STATE_MAP } from "@/ui/componentBase"; +import { formatTimeAgo } from "@/utils/date"; import { EventStateRegistry } from "./types"; import { defaultStateFunction } from "./stateRegistry"; @@ -95,3 +96,23 @@ export function formatPredicate(predicate: Predicate): string { return predicate.value; } } + +export function buildSubtitle(content: string | undefined, createdAt?: string): string { + const trimmed = (content || "").trim(); + const timeAgo = createdAt ? formatTimeAgo(new Date(createdAt)) : ""; + + if (trimmed && timeAgo) { + return `${trimmed} · ${timeAgo}`; + } + return trimmed || timeAgo; +} + +export interface ExecutionLike { + createdAt: string; + updatedAt?: string; +} + +export function buildExecutionSubtitle(execution: ExecutionLike, content?: string): string { + const timestamp = execution.updatedAt || execution.createdAt; + return buildSubtitle(content || "", timestamp); +} diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index 546254986a..b34fae223c 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -19,6 +19,7 @@ import daytonaIcon from "@/assets/icons/integrations/daytona.svg"; import datadogIcon from "@/assets/icons/integrations/datadog.svg"; import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; @@ -397,6 +398,7 @@ function CategorySection({ daytona: daytonaIcon, discord: discordIcon, github: githubIcon, + gitlab: gitlabIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -469,6 +471,7 @@ function CategorySection({ datadog: datadogIcon, discord: discordIcon, github: githubIcon, + gitlab: gitlabIcon, openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, diff --git a/web_src/src/ui/IntegrationInstructions.tsx b/web_src/src/ui/IntegrationInstructions.tsx index 03f8adba67..1df6801046 100644 --- a/web_src/src/ui/IntegrationInstructions.tsx +++ b/web_src/src/ui/IntegrationInstructions.tsx @@ -22,16 +22,33 @@ export interface IntegrationInstructionsProps { export function IntegrationInstructions({ description, onContinue, className = "" }: IntegrationInstructionsProps) { if (!description?.trim()) return null; + const normalizedDescription = description.replace(/\r\n/g, "\n").replace(/\n(?!\n)/g, " \n"); + return (

{children}

, + h2: ({ children }) =>

{children}

, + h3: ({ children }) =>

{children}

, + h4: ({ children }) =>

{children}

, + p: ({ children }) =>

{children}

, + ul: ({ children }) =>
    {children}
, + ol: ({ children }) =>
    {children}
, + li: ({ children }) =>
  • {children}
  • , + a: ({ children, href }) => ( + + {children} + + ), + code: ({ children }) => {children}, strong: ({ children }) => {children}, + em: ({ children }) => {children}, }} > - {description} + {normalizedDescription}
    {onContinue && ( diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 25846ebe99..4756c09f7d 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -8,6 +8,7 @@ import datadogIcon from "@/assets/icons/integrations/datadog.svg"; import daytonaIcon from "@/assets/icons/integrations/daytona.svg"; import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; @@ -28,6 +29,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { daytona: daytonaIcon, discord: discordIcon, github: githubIcon, + gitlab: gitlabIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -49,6 +51,7 @@ export const APP_LOGO_MAP: Record> = { daytona: daytonaIcon, discord: discordIcon, github: githubIcon, + gitlab: gitlabIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, diff --git a/web_src/src/utils/integrationDisplayName.ts b/web_src/src/utils/integrationDisplayName.ts index d54f92ea5f..3166153dbb 100644 --- a/web_src/src/utils/integrationDisplayName.ts +++ b/web_src/src/utils/integrationDisplayName.ts @@ -4,6 +4,7 @@ */ const INTEGRATION_TYPE_DISPLAY_NAMES: Record = { github: "GitHub", + gitlab: "GitLab", openai: "OpenAI", claude: "Claude", pagerduty: "PagerDuty", From d67da97643da93481aa6a3f100e752ddbc1777a6 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Mon, 9 Feb 2026 19:08:32 -0300 Subject: [PATCH 032/160] chore: add Logger to WebhookRequestContext (#2988) This makes it possible for triggers to log more contextual messages when they have filters configured. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 + pkg/integrations/gitlab/on_issue.go | 31 ++++++++++++++++++------ pkg/integrations/gitlab/on_issue_test.go | 14 ++++++++--- pkg/public/server.go | 6 +++++ 4 files changed, 42 insertions(+), 10 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 7f4ef72e65..523874b102 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -115,6 +115,7 @@ type WebhookRequestContext struct { WorkflowID string NodeID string Configuration any + Logger *log.Entry Webhook NodeWebhookContext Events EventContext Integration IntegrationContext diff --git a/pkg/integrations/gitlab/on_issue.go b/pkg/integrations/gitlab/on_issue.go index fe92ca62a3..d76f646904 100644 --- a/pkg/integrations/gitlab/on_issue.go +++ b/pkg/integrations/gitlab/on_issue.go @@ -7,6 +7,7 @@ import ( "slices" "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" ) @@ -159,14 +160,14 @@ func (i *OnIssue) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { // // Verify that the action is in the allowed list // - if len(config.Actions) > 0 && !i.whitelistedAction(data, config.Actions) { + if len(config.Actions) > 0 && !i.whitelistedAction(ctx.Logger, data, config.Actions) { return http.StatusOK, nil } // // Verify that the labels are in the allowed list // - if len(config.Labels) > 0 && !i.hasWhitelistedLabel(data, config.Labels) { + if len(config.Labels) > 0 && !i.hasWhitelistedLabel(ctx.Logger, data, config.Labels) { return http.StatusOK, nil } @@ -181,7 +182,7 @@ func (i *OnIssue) Cleanup(ctx core.TriggerContext) error { return nil } -func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string) bool { +func (i *OnIssue) whitelistedAction(logger *log.Entry, data map[string]any, allowedActions []string) bool { attrs, ok := data["object_attributes"].(map[string]any) if !ok { return false @@ -193,6 +194,7 @@ func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string } if !slices.Contains(allowedActions, action) { + logger.Infof("Action %s is not in the allowed list: %v", action, allowedActions) return false } @@ -213,26 +215,41 @@ func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string return false } - return state == "opened" + if state != "opened" { + logger.Infof("Received update for issue in non-opened state: %s - ignoring", state) + return false + } + + return true } -func (i *OnIssue) hasWhitelistedLabel(data map[string]any, allowedLabels []configuration.Predicate) bool { +func (i *OnIssue) hasWhitelistedLabel(logger *log.Entry, data map[string]any, allowedLabels []configuration.Predicate) bool { labels, ok := data["labels"].([]any) if !ok { return false } + labelNames := []string{} for _, label := range labels { labelMap, ok := label.(map[string]any) if !ok { continue } - title, _ := labelMap["title"].(string) - if configuration.MatchesAnyPredicate(allowedLabels, title) { + title, ok := labelMap["title"].(string) + if !ok { + continue + } + + labelNames = append(labelNames, title) + } + + for _, labelName := range labelNames { + if configuration.MatchesAnyPredicate(allowedLabels, labelName) { return true } } + logger.Infof("Labels do not match the allowed list: Received: %v, Allowed: %v", labelNames, allowedLabels) return false } diff --git a/pkg/integrations/gitlab/on_issue_test.go b/pkg/integrations/gitlab/on_issue_test.go index 3a9ad0efe4..b65b5f8aac 100644 --- a/pkg/integrations/gitlab/on_issue_test.go +++ b/pkg/integrations/gitlab/on_issue_test.go @@ -5,6 +5,7 @@ import ( "net/http" "testing" + log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" @@ -38,6 +39,7 @@ func Test__OnIssue__HandleWebhook__WrongEventType(t *testing.T) { Body: []byte(`{}`), Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -60,6 +62,7 @@ func Test__OnIssue__HandleWebhook__InvalidToken(t *testing.T) { Body: []byte(`{}`), Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Webhook: webhookCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -92,6 +95,7 @@ func Test__OnIssue__HandleWebhook__StateNotOpened(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"close"}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -127,6 +131,7 @@ func Test__OnIssue__HandleWebhook__Success(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -168,6 +173,7 @@ func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -194,6 +200,7 @@ func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -214,7 +221,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { "action": "open", }, } - result := trigger.whitelistedAction(data, []string{"open", "close"}) + result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) assert.True(t, result) }) @@ -225,7 +232,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { }, } - result := trigger.whitelistedAction(data, []string{"open", "close"}) + result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) assert.False(t, result) }) @@ -234,7 +241,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { "object_attributes": map[string]any{}, } - result := trigger.whitelistedAction(data, []string{"open", "close"}) + result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) assert.False(t, result) }) @@ -264,6 +271,7 @@ func Test__OnIssue__HandleWebhook__UpdateOnClosed(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"update"}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) diff --git a/pkg/public/server.go b/pkg/public/server.go index 6d3b72c9c0..4c8eb55d61 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -788,6 +788,7 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht return http.StatusInternalServerError, fmt.Errorf("trigger not found: %w", err) } + logger := logging.ForNode(node) tx := database.Conn() var integrationCtx core.IntegrationContext if node.AppInstallationID != nil { @@ -796,6 +797,7 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht return http.StatusInternalServerError, integrationErr } + logger = logging.WithIntegration(logger, *integration) integrationCtx = contexts.NewIntegrationContext(tx, &node, integration, s.encryptor, s.registry) } @@ -805,6 +807,7 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), Events: contexts.NewEventContext(tx, &node), @@ -819,6 +822,7 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers return http.StatusInternalServerError, fmt.Errorf("component not found: %w", err) } + logger := logging.ForNode(node) tx := database.Conn() var integrationCtx core.IntegrationContext if node.AppInstallationID != nil { @@ -827,6 +831,7 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers return http.StatusInternalServerError, integrationErr } + logger = logging.WithIntegration(logger, *integration) integrationCtx = contexts.NewIntegrationContext(tx, &node, integration, s.encryptor, s.registry) } @@ -836,6 +841,7 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), Events: contexts.NewEventContext(tx, &node), From 746443549750857b39697dc9b9dd9f58158fe31c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Tue, 10 Feb 2026 00:24:17 -0300 Subject: [PATCH 033/160] fix: infinite redirect due to invalid org ID (#2992) ## Summary Fixes an infinite redirect loop when the URL contains an invalid organization ID. Because our API interceptor redirects any `401` to login, requests made with an invalid org ID caused repeated redirects even for authenticated users. ## Root Cause - `organizationId` from route params was treated as valid without format validation. - `me` / permissions requests still ran with an invalid org context. - Backend returned `401`, interceptor redirected to login, causing a loop. ## Changes - Added UUID validator utility in `web_src/src/lib/utils.ts`. - Updated `useOrganizationId` in `web_src/src/utils/withOrganizationHeader.ts` to return `null` when route org ID is not a UUID. - Updated `useMe` in `web_src/src/hooks/useMe.ts` to only execute when a valid org ID exists (`enabled: !!organizationId`). - Refactored `PermissionsContext` in `web_src/src/contexts/PermissionsContext.tsx` to: - use `useOrganizationId()` - reuse `useMe()` instead of duplicating the `me` query ## Expected Behavior After Fix - Invalid org IDs no longer trigger authenticated API calls in org context. - No `401` is generated from this invalid-org path. - No infinite redirect to login. Closes: https://github.com/superplanehq/superplane/issues/2908 --------- Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/contexts/PermissionsContext.tsx | 24 ++++++--------------- web_src/src/hooks/useMe.ts | 4 +++- web_src/src/lib/utils.ts | 4 ++++ web_src/src/utils/withOrganizationHeader.ts | 4 ++++ 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/web_src/src/contexts/PermissionsContext.tsx b/web_src/src/contexts/PermissionsContext.tsx index 4f1ba67af3..7f72bc850a 100644 --- a/web_src/src/contexts/PermissionsContext.tsx +++ b/web_src/src/contexts/PermissionsContext.tsx @@ -1,9 +1,9 @@ import React, { createContext, useCallback, useContext, useMemo } from "react"; -import { useParams } from "react-router-dom"; import { useQuery } from "@tanstack/react-query"; -import { AuthorizationPermission, meMe, usersListUserPermissions } from "@/api-client"; -import { withOrganizationHeader } from "@/utils/withOrganizationHeader"; +import { AuthorizationPermission, usersListUserPermissions } from "@/api-client"; +import { useOrganizationId, withOrganizationHeader } from "@/utils/withOrganizationHeader"; +import { useMe } from "@/hooks/useMe"; interface PermissionsContextType { permissions: AuthorizationPermission[]; @@ -30,20 +30,10 @@ interface PermissionsProviderProps { } export const PermissionsProvider: React.FC = ({ children }) => { - const { organizationId } = useParams<{ organizationId?: string }>(); + const organizationId = useOrganizationId(); + const { data: me, isLoading: meLoading } = useMe(); - const meQuery = useQuery({ - queryKey: ["me", organizationId], - queryFn: async () => { - const response = await meMe(withOrganizationHeader()); - return response.data || null; - }, - enabled: !!organizationId, - staleTime: 5 * 60 * 1000, - gcTime: 10 * 60 * 1000, - }); - - const userId = meQuery.data?.id; + const userId = me?.id; const permissionsQuery = useQuery({ queryKey: ["permissions", organizationId, userId], @@ -84,7 +74,7 @@ export const PermissionsProvider: React.FC = ({ childr [permissionSet], ); - const isLoading = meQuery.isLoading || permissionsQuery.isLoading; + const isLoading = meLoading || permissionsQuery.isLoading; return ( {children} diff --git a/web_src/src/hooks/useMe.ts b/web_src/src/hooks/useMe.ts index 7c9dd03e6d..6a29be65ce 100644 --- a/web_src/src/hooks/useMe.ts +++ b/web_src/src/hooks/useMe.ts @@ -1,12 +1,13 @@ import { useQuery } from "@tanstack/react-query"; import { meMe } from "@/api-client"; -import { withOrganizationHeader } from "@/utils/withOrganizationHeader"; +import { useOrganizationId, withOrganizationHeader } from "@/utils/withOrganizationHeader"; export const meKeys = { me: ["me"] as const, }; export const useMe = () => { + const organizationId = useOrganizationId(); return useQuery({ queryKey: meKeys.me, queryFn: async () => { @@ -15,5 +16,6 @@ export const useMe = () => { }, staleTime: 5 * 60 * 1000, gcTime: 10 * 60 * 1000, + enabled: !!organizationId, }); }; diff --git a/web_src/src/lib/utils.ts b/web_src/src/lib/utils.ts index 92e202948c..324df1bc22 100644 --- a/web_src/src/lib/utils.ts +++ b/web_src/src/lib/utils.ts @@ -154,3 +154,7 @@ export function isUrl(value: string): boolean { return false; } } + +export const isUUID = (value: string): boolean => { + return /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/.test(value); +}; diff --git a/web_src/src/utils/withOrganizationHeader.ts b/web_src/src/utils/withOrganizationHeader.ts index 93f4aae67f..c8705b584d 100644 --- a/web_src/src/utils/withOrganizationHeader.ts +++ b/web_src/src/utils/withOrganizationHeader.ts @@ -1,7 +1,11 @@ +import { isUUID } from "@/lib/utils"; import { useParams } from "react-router-dom"; export const useOrganizationId = (): string | null => { const { organizationId } = useParams<{ organizationId: string }>(); + if (organizationId && !isUUID(organizationId)) { + return null; + } return organizationId || null; }; From ba9d6672ae97209402fbe6a7f733584d8900bb1c Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:30:01 +0100 Subject: [PATCH 034/160] chore: Update changelog skill for cursor (#2993) Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/changelog.md | 10 +++--- .cursor/skills/superplane-changelog/SKILL.md | 34 ++++++++++++-------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/.cursor/commands/changelog.md b/.cursor/commands/changelog.md index 05419034b6..6f3da21174 100644 --- a/.cursor/commands/changelog.md +++ b/.cursor/commands/changelog.md @@ -6,17 +6,17 @@ description: Generate a "what's new" changelog from merged commits over a time r Generate a changelog of what was merged to `main` for a given time range. The output is a single markdown file in `tmp/` with new integrations, new components and triggers, improvements, security updates, and bug fixes. -**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. +**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. Section titles must include numeric counts for both integrations (e.g. "#### 3 new integrations") and components/triggers (e.g. "#### 12 new components and triggers"). ## Input -- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", or "from Feb 3 to now". If the user does not specify, ask or default to "since Monday (5 days)". +- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", "from Feb 3 to now", or "since v0.6.0". If the user does not specify, ask or default to "since Monday (5 days)". ## Process -1. Determine start and end dates from the user's time range. -2. Run `git log --since="" --format="%h %ad %s" --date=short main` and use it to identify what landed in the window. -3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. +1. Determine start and end of the window. When the range is version-based (e.g. "since v0.6.0"), use **date and time**: get the tag's commit timestamp (e.g. `git log -1 --format="%cI" v0.6.0`) so the window is strictly after the tag and same-day commits before the tag are excluded. +2. Run `git log --since="" --format="%h %ad %s" main` with `` as a date (`YYYY-MM-DD`) or as the tag's ISO 8601 timestamp when using a version tag. Use `--date=short` or `--date=iso` as appropriate. Use the result to identify what landed in the window. +3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. Do not include or derive entries from `chore:` commits. Omit bug/security fixes that only affect a component or integration introduced in this changelog window. 4. Resolve component/trigger names from `pkg/integrations/` and `pkg/components/` (Labels). 5. Write `tmp/changelog__to_.md` following the skill's structure and format rules. diff --git a/.cursor/skills/superplane-changelog/SKILL.md b/.cursor/skills/superplane-changelog/SKILL.md index 83d435012c..c55b70e929 100644 --- a/.cursor/skills/superplane-changelog/SKILL.md +++ b/.cursor/skills/superplane-changelog/SKILL.md @@ -11,9 +11,13 @@ Use this skill when the user wants a changelog of what was merged to `main` over ## 1. Determine time range -- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", or a specific date. -- **Compute**: Start date (e.g. last Monday = start of week) and end date (today). For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. -- **Git**: Use `git log --since="YYYY-MM-DD" --format="%h %ad %s" --date=short main` to list commits. Only include in the changelog items whose merge/commit date falls **on or after** the start date. +- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", "since v0.6.0", or a specific date. +- **Compute**: Start and end of the window. Use **date and time** (not just date) when the start is a version tag so that same-day commits before the tag are excluded. + - **Date-only ranges** (e.g. "since Monday", "Feb 3 to now"): Start = date at midnight, end = today. For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. + - **Version-tag ranges** (e.g. "since v0.6.0"): Start = **exact commit timestamp of the tag** (e.g. `git log -1 --format="%cI" v0.6.0` for ISO 8601). End = now or a chosen end date. This ensures commits that landed the same calendar day but before the tag are not included. +- **Git**: Use `git log --since="" --format="%h %ad %s" main` where `` is: + - For date-only: `YYYY-MM-DD` (e.g. `2026-02-03`). Use `--date=short` in the format. + - For version-tag: the tag's commit timestamp in ISO 8601 (e.g. `2026-02-01T15:30:00+00:00`). Use `--date=iso` if you need to compare times. Only include in the changelog items whose commit/merge date is **strictly after** the start when using a tag. --- @@ -21,11 +25,12 @@ Use this skill when the user wants a changelog of what was merged to `main` over From commit messages and dates: +- **Exclude `chore:` commits (mandatory).** Do not list or derive any changelog entry from commits whose subject starts with `chore:` or `chore(...):`. This applies to every section: do not add an improvement, integration, component, or any other bullet based on a chore commit, even if the change seems user-facing (e.g. "Allow running multiple instances" is still a chore and must be omitted). When classifying what landed, skip chore commits entirely; only use `feat:`, `fix:`, `docs:` (for user-facing doc changes), and similar non-chore prefixes as sources for changelog entries. - **New integrations**: Integrations that were **fully added** in the window (base integration registered + first components). Example: SendGrid, Jira. Do **not** count standalone components (e.g. SSH is a component under `pkg/components/ssh`, not an integration). -- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit dates to exclude anything that landed before the start date (e.g. Cloudflare DNS records merged Feb 1 are excluded if the window is "Monday Feb 3 to now"). -- **Improvements**: User-facing product changes (RBAC, Secrets, Bounty Program, integrations UX, list vs expression, multiple instances). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). -- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. -- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. +- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit timestamps (date and time) to exclude anything that landed before the start of the window (e.g. when the window is "since v0.6.0", exclude commits with timestamp on or before the tag's commit time, so same-day commits before the tag are excluded). +- **Improvements**: User-facing product changes from non-chore commits only (e.g. RBAC, Secrets, integrations UX). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). Describe each improvement in user-oriented terms: what the user can do, what problem it solves, or what benefit they get (e.g. "Define roles and permissions and control what each user can do" rather than "Permission guard in the UI"). +- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. Do not list a security fix if it only affects a component or integration that was introduced in this changelog window. +- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. Do not list a fix if it only affects a component or integration that was introduced in this changelog window (e.g. "fix: AWS ECR timestamp" when ECR was added in the same window). To resolve component/trigger names and which integration they belong to, use `pkg/integrations/*/` and `pkg/components/*/`: check each integration's `Components()` and `Triggers()` and their `Label()` / `Name()` (e.g. `aws.go` for AWS, `ecr/`, `codeartifact/`). @@ -37,9 +42,9 @@ To resolve component/trigger names and which integration they belong to, use `pk - **No "We" language**. Use direct, neutral phrasing (e.g. "Role-based access control." not "We introduced role-based access control."). - **New integrations section**: List only integration names, one per line (e.g. SendGrid, Jira). - **New components section**: Use **Integration:** Component1, Component2, ... One line per integration or standalone component (e.g. **GitHub:** Get Release; **SSH:** Run commands on remote hosts). -- **Improvements**: Each bullet is **Bold label**: Short, user-focused description. No implementation details. No "We". +- **Improvements**: Each bullet is **Bold label**: Short, user-oriented description. Write from the user's perspective: what they can do, what problem it solves, or what benefit they get. Avoid implementation jargon (e.g. "permission guard", "payload limit"); prefer outcome and capability (e.g. "Control what each user can do in your organization", "Secrets can be used in the SSH component to store private keys"). No "We". - **Security**: Dedicated section (use only when there are security-related commits). Each bullet: include **CVE identifier** when available (e.g. CVE-2024-12345), then a short description of the vulnerability or fix. If no CVE, use "Fixed: " plus description (e.g. "Fixed: SSRF protection added to HTTP requests"). Same tone as rest of changelog; no em dashes. -- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. +- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. Omit fixes that only apply to components or integrations that are new in this changelog. --- @@ -47,6 +52,8 @@ To resolve component/trigger names and which integration they belong to, use `pk Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) with this structure: +- **Section titles must include the numeric count** for both integrations and components (e.g. "#### 3 new integrations", "#### 12 new components and triggers"). Count each integration as 1. For components and triggers, count each component or trigger as 1 (e.g. one line "**GitHub:** Get Release, On Release" is 2). + ```markdown # SuperPlane Changelog (Feb X-Y, YYYY) @@ -67,7 +74,6 @@ Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) - **RBAC**: Role-based access control. Define roles and permissions... - **Secrets**: Create, update, and delete organization secrets... - - **Bounty Program**: Get paid for building integrations. See [link]... - (etc.) #### Security @@ -83,15 +89,15 @@ Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) ``` - Use three spaces before list bullets for indentation under each #### heading. -- Counts (N new integrations, M new components and triggers) must match the listed items and the chosen time window. +- Replace N and M with the actual counts. N = number of integrations listed. M = total number of components and triggers (each component or trigger counts as 1, even when several are on one line). Counts must match the listed items and the chosen time window. --- ## 5. Workflow summary -1. Ask for or infer time range (e.g. "Monday to now" = 5 days). -2. Run `git log --since="" --format="%h %ad %s" --date=short main` and optionally inspect merge dates for key PRs. -3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only), security fixes (dedicated section; separate from bug fixes), and bug fixes. +1. Ask for or infer time range (e.g. "Monday to now" = 5 days; "since v0.6.0" = after the tag's commit timestamp). +2. Run `git log --since="" --format="%h %ad %s" main` with `` as date (`YYYY-MM-DD`) or as the tag's commit timestamp in ISO 8601 when the range is version-based. Use `--date=short` or `--date=iso` as needed. Optionally inspect merge dates for key PRs. +3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only; never derived from chore commits), security fixes (dedicated section; separate from bug fixes), and bug fixes. Do not include or derive any entry from `chore:` or `chore(...):` commits in any section. 4. Resolve labels from code: `pkg/integrations//` and `pkg/components/` for component/trigger names. 5. Write `tmp/changelog_.md` following the structure and format rules above. 6. Tell the user the file path and that they can review or edit it. From ed3c00285915e430d8a4c93b983fd3595b71e849 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Tue, 10 Feb 2026 08:48:34 +0100 Subject: [PATCH 035/160] chore: Remove empty WIP file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/contributing/integration_and_component_checklist.md | 1 - 1 file changed, 1 deletion(-) delete mode 100644 docs/contributing/integration_and_component_checklist.md diff --git a/docs/contributing/integration_and_component_checklist.md b/docs/contributing/integration_and_component_checklist.md deleted file mode 100644 index 00d7bdd40d..0000000000 --- a/docs/contributing/integration_and_component_checklist.md +++ /dev/null @@ -1 +0,0 @@ -WIP From 4fb4db6b9fdc72bd98889e6b640c12b98585fa80 Mon Sep 17 00:00:00 2001 From: Konstantin Komelin Date: Tue, 10 Feb 2026 09:25:09 +0100 Subject: [PATCH 036/160] fix: prevent option selection reset in AutoCompleteSelect (#2931) ## Summary - Fix a race condition in `AutoCompleteSelect` where selecting an option would sometimes reset to the previous value, particularly noticeable when using a laptop touchpad - Move `preventDefault` from `click` to `onMouseDown` on option items to actually prevent the input blur from firing during selection - Remove duplicate `onClick` handler on the inner div that caused `handleOptionSelect` to fire twice per click via event bubbling ## Demo [Watch demo video](https://www.loom.com/share/2f5ea84b99fb4a9dbd203cea6b31f0b6) ## Root cause When clicking an option, the browser fires `mousedown` -> input `blur` -> `click` in sequence. The blur handler checked `e.relatedTarget` to detect if focus moved to the dropdown list, but since option `
    ` elements aren't focusable, `relatedTarget` was always `null`. This caused the blur handler to run and close the dropdown, racing with the click event. Touchpad clicks have slightly different timing than mouse clicks, making the race condition more likely to trigger. ## Test plan - [ ] Select options in any AutoCompleteSelect across the app using a touchpad (e.g. model picker in Claude -> Create Message) - should work reliably - [ ] Select options using a mouse - should work reliably - [ ] Type to filter, then select a filtered option - should work - [ ] Press Escape to close the dropdown - should still work - [ ] Click outside the dropdown - should still close it - [ ] `make check.build.ui` passes Signed-off-by: Konstantin Komelin Co-authored-by: Claude Opus 4.6 Co-authored-by: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Signed-off-by: Muhammad Fuzail Zubari --- .../AutoCompleteSelect/AutoCompleteSelect.tsx | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/web_src/src/components/AutoCompleteSelect/AutoCompleteSelect.tsx b/web_src/src/components/AutoCompleteSelect/AutoCompleteSelect.tsx index 6e7b3f3be6..4df7fdf28c 100644 --- a/web_src/src/components/AutoCompleteSelect/AutoCompleteSelect.tsx +++ b/web_src/src/components/AutoCompleteSelect/AutoCompleteSelect.tsx @@ -221,15 +221,10 @@ export function AutoCompleteSelect({ role="option" aria-selected={isSelected} className="relative cursor-pointer select-none px-3 py-2 text-sm hover:bg-blue-500 hover:text-white text-gray-800 dark:text-gray-100" - onClick={(e) => { - e.preventDefault(); // Prevent blur from firing - handleOptionSelect(option.value); - }} + onMouseDown={(e) => e.preventDefault()} + onClick={() => handleOptionSelect(option.value)} > -
    handleOptionSelect(option.value)} - > +
    {option.label} From 0d24015b8015e824be921ef61ac28d7a7cd7be20 Mon Sep 17 00:00:00 2001 From: e-todorovski-bm <132341821+e-todorovski-bm@users.noreply.github.com> Date: Tue, 10 Feb 2026 09:25:58 +0100 Subject: [PATCH 037/160] feat: PagerDuty list notes (#2857) UI: Screenshot 2026-02-04 at 10 29 21 Screenshot 2026-02-04 at 10 29 32 Screenshot 2026-02-04 at 10 29 46 Screenshot 2026-02-04 at 10 29 57 --------- Signed-off-by: Emil Todorovski Signed-off-by: Muhammad Fuzail Zubari --- docs/components/PagerDuty.mdx | 83 +++++++ pkg/integrations/pagerduty/client.go | 34 +++ pkg/integrations/pagerduty/example.go | 10 + .../pagerduty/example_output_list_notes.json | 51 +++++ pkg/integrations/pagerduty/list_notes.go | 146 ++++++++++++ pkg/integrations/pagerduty/list_notes_test.go | 209 ++++++++++++++++++ pkg/integrations/pagerduty/pagerduty.go | 1 + .../workflowv2/mappers/pagerduty/index.ts | 3 + .../mappers/pagerduty/list_notes.ts | 131 +++++++++++ .../workflowv2/mappers/pagerduty/types.ts | 21 ++ 10 files changed, 689 insertions(+) create mode 100644 pkg/integrations/pagerduty/example_output_list_notes.json create mode 100644 pkg/integrations/pagerduty/list_notes.go create mode 100644 pkg/integrations/pagerduty/list_notes_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts diff --git a/docs/components/PagerDuty.mdx b/docs/components/PagerDuty.mdx index 6a8c9e60d5..7e352f496b 100644 --- a/docs/components/PagerDuty.mdx +++ b/docs/components/PagerDuty.mdx @@ -20,6 +20,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -653,6 +654,88 @@ Returns a list of open incidents with: } ``` + + +## List Notes + +The List Notes component retrieves all notes (timeline entries) for a PagerDuty incident. + +### Use Cases + +- **Incident review**: Review all notes added to an incident +- **Timeline reconstruction**: Build a timeline of incident updates +- **Audit trail**: Access the history of notes for compliance or review +- **Note analysis**: Process or analyze notes for patterns or keywords + +### Configuration + +- **Incident ID**: The ID of the incident to list notes for (e.g., A12BC34567...) + +### Output + +Returns a list of notes with: +- **id**: Note ID +- **content**: The note content +- **created_at**: When the note was created +- **user**: The user who created the note +- **channel**: The channel through which the note was created + +### Example Output + +```json +{ + "data": { + "notes": [ + { + "channel": { + "type": "web_ui" + }, + "content": "Investigation started. Checking server logs for anomalies.", + "created_at": "2024-01-15T10:30:00Z", + "id": "PVL9NF8", + "user": { + "html_url": "https://acme.pagerduty.com/users/PLH1HKV", + "id": "PLH1HKV", + "summary": "John Smith", + "type": "user_reference" + } + }, + { + "channel": { + "type": "web_ui" + }, + "content": "Root cause identified: memory leak in the cache service. Deploying fix now.", + "created_at": "2024-01-15T10:45:00Z", + "id": "PVL9NF9", + "user": { + "html_url": "https://acme.pagerduty.com/users/PLH1HKV", + "id": "PLH1HKV", + "summary": "John Smith", + "type": "user_reference" + } + }, + { + "channel": { + "type": "api" + }, + "content": "Fix deployed successfully. Monitoring for stability.", + "created_at": "2024-01-15T11:00:00Z", + "id": "PVL9NFA", + "user": { + "html_url": "https://acme.pagerduty.com/users/PLH1HKW", + "id": "PLH1HKW", + "summary": "Jane Doe", + "type": "user_reference" + } + } + ], + "total": 3 + }, + "timestamp": "2024-01-15T11:05:00Z", + "type": "pagerduty.notes.list" +} +``` + ## Snooze Incident diff --git a/pkg/integrations/pagerduty/client.go b/pkg/integrations/pagerduty/client.go index 410ac0bc7c..e6f68c5a14 100644 --- a/pkg/integrations/pagerduty/client.go +++ b/pkg/integrations/pagerduty/client.go @@ -708,3 +708,37 @@ func (c *Client) SnoozeIncident(incidentID string, fromEmail string, duration in return response, nil } + +// Note represents a note on a PagerDuty incident +type Note struct { + ID string `json:"id"` + User *ServiceRef `json:"user"` + Channel *NoteChannel `json:"channel"` + Content string `json:"content"` + CreatedAt string `json:"created_at"` +} + +// NoteChannel represents the channel through which a note was created +type NoteChannel struct { + Type string `json:"type"` +} + +// ListIncidentNotes retrieves all notes for a given incident +func (c *Client) ListIncidentNotes(incidentID string) ([]Note, error) { + apiURL := fmt.Sprintf("%s/incidents/%s/notes", c.BaseURL, incidentID) + responseBody, err := c.execRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + var response struct { + Notes []Note `json:"notes"` + } + + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + return response.Notes, nil +} diff --git a/pkg/integrations/pagerduty/example.go b/pkg/integrations/pagerduty/example.go index 1e9e35c266..40c732f0b2 100644 --- a/pkg/integrations/pagerduty/example.go +++ b/pkg/integrations/pagerduty/example.go @@ -55,6 +55,12 @@ var exampleOutputListIncidentsBytes []byte var exampleOutputListIncidentsOnce sync.Once var exampleOutputListIncidents map[string]any +//go:embed example_output_list_notes.json +var exampleOutputListNotesBytes []byte + +var exampleOutputListNotesOnce sync.Once +var exampleOutputListNotes map[string]any + func (c *CreateIncident) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIncidentOnce, exampleOutputCreateIncidentBytes, &exampleOutputCreateIncident) } @@ -75,6 +81,10 @@ func (l *ListIncidents) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputListIncidentsOnce, exampleOutputListIncidentsBytes, &exampleOutputListIncidents) } +func (l *ListNotes) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputListNotesOnce, exampleOutputListNotesBytes, &exampleOutputListNotes) +} + func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/pagerduty/example_output_list_notes.json b/pkg/integrations/pagerduty/example_output_list_notes.json new file mode 100644 index 0000000000..c206ae167f --- /dev/null +++ b/pkg/integrations/pagerduty/example_output_list_notes.json @@ -0,0 +1,51 @@ +{ + "type": "pagerduty.notes.list", + "data": { + "notes": [ + { + "id": "PVL9NF8", + "user": { + "id": "PLH1HKV", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PLH1HKV" + }, + "channel": { + "type": "web_ui" + }, + "content": "Investigation started. Checking server logs for anomalies.", + "created_at": "2024-01-15T10:30:00Z" + }, + { + "id": "PVL9NF9", + "user": { + "id": "PLH1HKV", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PLH1HKV" + }, + "channel": { + "type": "web_ui" + }, + "content": "Root cause identified: memory leak in the cache service. Deploying fix now.", + "created_at": "2024-01-15T10:45:00Z" + }, + { + "id": "PVL9NFA", + "user": { + "id": "PLH1HKW", + "type": "user_reference", + "summary": "Jane Doe", + "html_url": "https://acme.pagerduty.com/users/PLH1HKW" + }, + "channel": { + "type": "api" + }, + "content": "Fix deployed successfully. Monitoring for stability.", + "created_at": "2024-01-15T11:00:00Z" + } + ], + "total": 3 + }, + "timestamp": "2024-01-15T11:05:00Z" +} diff --git a/pkg/integrations/pagerduty/list_notes.go b/pkg/integrations/pagerduty/list_notes.go new file mode 100644 index 0000000000..13de73b49a --- /dev/null +++ b/pkg/integrations/pagerduty/list_notes.go @@ -0,0 +1,146 @@ +package pagerduty + +import ( + "errors" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type ListNotes struct{} + +type ListNotesSpec struct { + IncidentID string `json:"incidentId"` +} + +func (l *ListNotes) Name() string { + return "pagerduty.listNotes" +} + +func (l *ListNotes) Label() string { + return "List Notes" +} + +func (l *ListNotes) Description() string { + return "List all notes (timeline entries) for a PagerDuty incident" +} + +func (l *ListNotes) Documentation() string { + return `The List Notes component retrieves all notes (timeline entries) for a PagerDuty incident. + +## Use Cases + +- **Incident review**: Review all notes added to an incident +- **Timeline reconstruction**: Build a timeline of incident updates +- **Audit trail**: Access the history of notes for compliance or review +- **Note analysis**: Process or analyze notes for patterns or keywords + +## Configuration + +- **Incident ID**: The ID of the incident to list notes for (e.g., A12BC34567...) + +## Output + +Returns a list of notes with: +- **id**: Note ID +- **content**: The note content +- **created_at**: When the note was created +- **user**: The user who created the note +- **channel**: The channel through which the note was created` +} + +func (l *ListNotes) Icon() string { + return "message-square" +} + +func (l *ListNotes) Color() string { + return "gray" +} + +func (l *ListNotes) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (l *ListNotes) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "incidentId", + Label: "Incident ID", + Type: configuration.FieldTypeString, + Required: true, + Description: "The ID of the incident to list notes for (e.g., A12BC34567...)", + Placeholder: "e.g., A12BC34567...", + }, + } +} + +func (l *ListNotes) Setup(ctx core.SetupContext) error { + spec := ListNotesSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + if spec.IncidentID == "" { + return errors.New("incidentId is required") + } + + return ctx.Metadata.Set(NodeMetadata{}) +} + +func (l *ListNotes) Execute(ctx core.ExecutionContext) error { + spec := ListNotesSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("error creating client: %v", err) + } + + notes, err := client.ListIncidentNotes(spec.IncidentID) + if err != nil { + return fmt.Errorf("failed to list notes: %v", err) + } + + responseData := map[string]any{ + "notes": notes, + "total": len(notes), + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "pagerduty.notes.list", + []any{responseData}, + ) +} + +func (l *ListNotes) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (l *ListNotes) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (l *ListNotes) Actions() []core.Action { + return []core.Action{} +} + +func (l *ListNotes) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (l *ListNotes) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (l *ListNotes) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/pagerduty/list_notes_test.go b/pkg/integrations/pagerduty/list_notes_test.go new file mode 100644 index 0000000000..4bd83c5193 --- /dev/null +++ b/pkg/integrations/pagerduty/list_notes_test.go @@ -0,0 +1,209 @@ +package pagerduty + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__ListNotes__Setup(t *testing.T) { + component := &ListNotes{} + + t.Run("valid configuration", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{} + + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + }, + Metadata: metadataCtx, + }) + + require.NoError(t, err) + }) + + t.Run("missing incidentId returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{}, + Metadata: &contexts.MetadataContext{}, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) + + t.Run("empty incidentId returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "", + }, + Metadata: &contexts.MetadataContext{}, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) +} + +func Test__ListNotes__Execute(t *testing.T) { + component := &ListNotes{} + + t.Run("successfully lists notes", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + { + "notes": [ + { + "id": "PVL9NF8", + "user": { + "id": "PLH1HKV", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PLH1HKV" + }, + "channel": { + "type": "web_ui" + }, + "content": "Investigation started. Checking server logs.", + "created_at": "2024-01-15T10:30:00Z" + }, + { + "id": "PVL9NF9", + "user": { + "id": "PLH1HKV", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PLH1HKV" + }, + "channel": { + "type": "web_ui" + }, + "content": "Root cause identified. Deploying fix.", + "created_at": "2024-01-15T10:45:00Z" + } + ] + } + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.NoError(t, err) + assert.True(t, execCtx.Finished) + assert.True(t, execCtx.Passed) + assert.Equal(t, "pagerduty.notes.list", execCtx.Type) + assert.Equal(t, core.DefaultOutputChannel.Name, execCtx.Channel) + + // Verify the request was made correctly + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, http.MethodGet, httpContext.Requests[0].Method) + assert.Contains(t, httpContext.Requests[0].URL.String(), "/incidents/PT4KHLK/notes") + + // Verify response contains expected data + require.Len(t, execCtx.Payloads, 1) + wrappedPayload, ok := execCtx.Payloads[0].(map[string]any) + require.True(t, ok) + responseData, ok := wrappedPayload["data"].(map[string]any) + require.True(t, ok) + assert.Equal(t, 2, responseData["total"]) + }) + + t.Run("successfully lists empty notes", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + { + "notes": [] + } + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.NoError(t, err) + assert.True(t, execCtx.Finished) + assert.True(t, execCtx.Passed) + + // Verify response contains expected data + require.Len(t, execCtx.Payloads, 1) + wrappedPayload, ok := execCtx.Payloads[0].(map[string]any) + require.True(t, ok) + responseData, ok := wrappedPayload["data"].(map[string]any) + require.True(t, ok) + assert.Equal(t, 0, responseData["total"]) + }) + + t.Run("API error returns error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusNotFound, + Body: io.NopCloser(strings.NewReader(`{"error": "Incident not found"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "INVALID", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to list notes") + }) +} diff --git a/pkg/integrations/pagerduty/pagerduty.go b/pkg/integrations/pagerduty/pagerduty.go index a474f7baf7..11861f6844 100644 --- a/pkg/integrations/pagerduty/pagerduty.go +++ b/pkg/integrations/pagerduty/pagerduty.go @@ -138,6 +138,7 @@ func (p *PagerDuty) Components() []core.Component { &UpdateIncident{}, &AnnotateIncident{}, &ListIncidents{}, + &ListNotes{}, &SnoozeIncident{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts index c2fc24ba36..28ea68f434 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts @@ -6,6 +6,7 @@ import { createIncidentMapper } from "./create_incident"; import { updateIncidentMapper } from "./update_incident"; import { annotateIncidentMapper } from "./annotate_incident"; import { listIncidentsMapper, LIST_INCIDENTS_STATE_REGISTRY } from "./list_incidents"; +import { listNotesMapper } from "./list_notes"; import { snoozeIncidentMapper } from "./snooze_incident"; import { buildActionStateRegistry } from "../utils"; @@ -14,6 +15,7 @@ export const componentMappers: Record = { updateIncident: updateIncidentMapper, annotateIncident: annotateIncidentMapper, listIncidents: listIncidentsMapper, + listNotes: listNotesMapper, snoozeIncident: snoozeIncidentMapper, }; @@ -28,5 +30,6 @@ export const eventStateRegistry: Record = { updateIncident: buildActionStateRegistry("updated"), annotateIncident: buildActionStateRegistry("annotated"), listIncidents: LIST_INCIDENTS_STATE_REGISTRY, + listNotes: buildActionStateRegistry("listed"), snoozeIncident: buildActionStateRegistry("snoozed"), }; diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts new file mode 100644 index 0000000000..c0a8b41e2f --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts @@ -0,0 +1,131 @@ +import { + ComponentsNode, + ComponentsComponent, + CanvasesCanvasNodeExecution, + CanvasesCanvasNodeQueueItem, +} from "@/api-client"; +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getBackgroundColorClass } from "@/utils/colors"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { ComponentBaseMapper, OutputPayload } from "../types"; +import { MetadataItem } from "@/ui/metadataList"; +import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; +import { formatTimeAgo } from "@/utils/date"; +import { ListNotesResponse, Note } from "./types"; + +/** + * Extracts the first payload from execution outputs. + */ +function getFirstPayload(execution: CanvasesCanvasNodeExecution): OutputPayload | null { + const outputs = execution.outputs as { default?: OutputPayload[] } | undefined; + if (!outputs) return null; + + if (outputs.default && outputs.default.length > 0) { + return outputs.default[0]; + } + + return null; +} + +/** + * Extracts notes from the execution payload. + */ +function getNotes(execution: CanvasesCanvasNodeExecution): Note[] { + const payload = getFirstPayload(execution); + if (!payload || !payload.data) return []; + + const responseData = payload.data as ListNotesResponse | undefined; + if (!responseData || !responseData.notes) return []; + + return responseData.notes; +} + +export const listNotesMapper: ComponentBaseMapper = { + props( + nodes: ComponentsNode[], + node: ComponentsNode, + componentDefinition: ComponentsComponent, + lastExecutions: CanvasesCanvasNodeExecution[], + _?: CanvasesCanvasNodeQueueItem[], + ): ComponentBaseProps { + const lastExecution = lastExecutions.length > 0 ? lastExecutions[0] : null; + const componentName = componentDefinition.name || node.component?.name || "unknown"; + + return { + iconSrc: pdIcon, + collapsedBackground: getBackgroundColorClass(componentDefinition.color), + collapsed: node.isCollapsed, + title: node.name || componentDefinition.label || componentDefinition.name || "Unnamed component", + eventSections: lastExecution ? baseEventSections(nodes, lastExecution, componentName) : undefined, + metadata: metadataList(node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + subtitle(_node: ComponentsNode, execution: CanvasesCanvasNodeExecution): string { + const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); + const notes = getNotes(execution); + + if (notes.length > 0) { + return `${notes.length} note${notes.length === 1 ? "" : "s"} · ${timeAgo}`; + } + + return `no notes · ${timeAgo}`; + }, + + getExecutionDetails(execution: CanvasesCanvasNodeExecution, _: ComponentsNode): Record { + const details: Record = {}; + + // Add "Checked at" timestamp + if (execution.createdAt) { + details["Checked at"] = new Date(execution.createdAt).toLocaleString(); + } + + const notes = getNotes(execution); + details["Notes"] = `${notes.length} note${notes.length === 1 ? "" : "s"} fetched`; + + return details; + }, +}; + +function metadataList(node: ComponentsNode): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as any; + + if (configuration.incidentId) { + metadata.push({ icon: "alert-triangle", label: `Incident: ${configuration.incidentId}` }); + } + + return metadata; +} + +function baseEventSections( + nodes: ComponentsNode[], + execution: CanvasesCanvasNodeExecution, + componentName: string, +): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.trigger?.name || ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle(execution.rootEvent!); + + const notes = getNotes(execution); + const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); + + let eventSubtitle: string; + if (notes.length > 0) { + eventSubtitle = `${notes.length} note${notes.length === 1 ? "" : "s"} · ${timeAgo}`; + } else { + eventSubtitle = `no notes · ${timeAgo}`; + } + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts index 1b84e8d711..e0ff6c05db 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts @@ -46,3 +46,24 @@ export interface ListIncidentsResponse { incidents: Incident[]; total: number; } + +export interface Note { + id?: string; + content?: string; + created_at?: string; + user?: ResourceRef; + channel?: NoteChannel; +} + +export interface NoteChannel { + type?: string; +} + +export interface ListNotesConfiguration { + incidentId?: string; +} + +export interface ListNotesResponse { + notes: Note[]; + total: number; +} From b4af9dd31cc5538a476473796c5896c477a785e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Tue, 10 Feb 2026 11:33:46 +0100 Subject: [PATCH 038/160] chore: Fix frontend mappers to use new format (#2996) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- .../mappers/pagerduty/list_notes.ts | 72 +++++++++---------- 1 file changed, 34 insertions(+), 38 deletions(-) diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts index c0a8b41e2f..76d0fd602f 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts @@ -1,13 +1,15 @@ -import { - ComponentsNode, - ComponentsComponent, - CanvasesCanvasNodeExecution, - CanvasesCanvasNodeQueueItem, -} from "@/api-client"; import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; import { getBackgroundColorClass } from "@/utils/colors"; import { getState, getStateMap, getTriggerRenderer } from ".."; -import { ComponentBaseMapper, OutputPayload } from "../types"; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; import { MetadataItem } from "@/ui/metadataList"; import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; import { formatTimeAgo } from "@/utils/date"; @@ -16,7 +18,7 @@ import { ListNotesResponse, Note } from "./types"; /** * Extracts the first payload from execution outputs. */ -function getFirstPayload(execution: CanvasesCanvasNodeExecution): OutputPayload | null { +function getFirstPayload(execution: ExecutionInfo): OutputPayload | null { const outputs = execution.outputs as { default?: OutputPayload[] } | undefined; if (!outputs) return null; @@ -30,7 +32,7 @@ function getFirstPayload(execution: CanvasesCanvasNodeExecution): OutputPayload /** * Extracts notes from the execution payload. */ -function getNotes(execution: CanvasesCanvasNodeExecution): Note[] { +function getNotes(execution: ExecutionInfo): Note[] { const payload = getFirstPayload(execution); if (!payload || !payload.data) return []; @@ -41,31 +43,29 @@ function getNotes(execution: CanvasesCanvasNodeExecution): Note[] { } export const listNotesMapper: ComponentBaseMapper = { - props( - nodes: ComponentsNode[], - node: ComponentsNode, - componentDefinition: ComponentsComponent, - lastExecutions: CanvasesCanvasNodeExecution[], - _?: CanvasesCanvasNodeQueueItem[], - ): ComponentBaseProps { - const lastExecution = lastExecutions.length > 0 ? lastExecutions[0] : null; - const componentName = componentDefinition.name || node.component?.name || "unknown"; + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "pagerduty"; return { iconSrc: pdIcon, - collapsedBackground: getBackgroundColorClass(componentDefinition.color), - collapsed: node.isCollapsed, - title: node.name || componentDefinition.label || componentDefinition.name || "Unnamed component", - eventSections: lastExecution ? baseEventSections(nodes, lastExecution, componentName) : undefined, - metadata: metadataList(node), + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition.label || + context.componentDefinition.name || + "Unnamed component", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + metadata: metadataList(context.node), includeEmptyState: !lastExecution, eventStateMap: getStateMap(componentName), }; }, - subtitle(_node: ComponentsNode, execution: CanvasesCanvasNodeExecution): string { - const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); - const notes = getNotes(execution); + subtitle(context: SubtitleContext): string { + const timeAgo = formatTimeAgo(new Date(context.execution.createdAt!)); + const notes = getNotes(context.execution); if (notes.length > 0) { return `${notes.length} note${notes.length === 1 ? "" : "s"} · ${timeAgo}`; @@ -74,22 +74,22 @@ export const listNotesMapper: ComponentBaseMapper = { return `no notes · ${timeAgo}`; }, - getExecutionDetails(execution: CanvasesCanvasNodeExecution, _: ComponentsNode): Record { + getExecutionDetails(context: ExecutionDetailsContext): Record { const details: Record = {}; // Add "Checked at" timestamp - if (execution.createdAt) { - details["Checked at"] = new Date(execution.createdAt).toLocaleString(); + if (context.execution.createdAt) { + details["Checked at"] = new Date(context.execution.createdAt).toLocaleString(); } - const notes = getNotes(execution); + const notes = getNotes(context.execution); details["Notes"] = `${notes.length} note${notes.length === 1 ? "" : "s"} fetched`; return details; }, }; -function metadataList(node: ComponentsNode): MetadataItem[] { +function metadataList(node: NodeInfo): MetadataItem[] { const metadata: MetadataItem[] = []; const configuration = node.configuration as any; @@ -100,14 +100,10 @@ function metadataList(node: ComponentsNode): MetadataItem[] { return metadata; } -function baseEventSections( - nodes: ComponentsNode[], - execution: CanvasesCanvasNodeExecution, - componentName: string, -): EventSection[] { +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.trigger?.name || ""); - const { title } = rootTriggerRenderer.getTitleAndSubtitle(execution.rootEvent!); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName ?? ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); const notes = getNotes(execution); const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); From 9374544c79c9b10518e2d26284bbf0dedeca0752 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Tue, 10 Feb 2026 12:32:20 +0100 Subject: [PATCH 039/160] Update available components count in README Updated the number of available components in the README. Signed-off-by: Muhammad Fuzail Zubari --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e1ec1b6047..31a190e240 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,7 @@ This section gives a quick snapshot of what SuperPlane already supports and what **Available now** -✓ 40+ starter components (10+ core, 30+ integrations) +✓ 75+ components ✓ Event-driven workflow engine ✓ Visual Canvas builder ✓ Run history, event chain view, debug console From f2301c73d052037c4209788fbf1ed6d5e07545db Mon Sep 17 00:00:00 2001 From: Prudhviraj Kb <96768786+prudh-vi@users.noreply.github.com> Date: Tue, 10 Feb 2026 19:54:08 +0530 Subject: [PATCH 040/160] feat: add snap-to-grid toggle to workflow canvas (#2925) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary This PR adds a snap-to-grid toggle feature to the workflow canvas as described in #2813 ## Changes ### Canvas Integration (`app/web/src/ui/CanvasPage/index.tsx`) * Added `isSnapToGridEnabled` state to track grid snapping status * Passed `snapToGrid={isSnapToGridEnabled}` and `snapGrid={[16, 16]}` props to ReactFlow component * Grid spacing set to 16x16 pixels for consistent alignment ```tsx const [isSnapToGridEnabled, setIsSnapToGridEnabled] = useState(false); ``` ### UI Components (`app/web/src/components/zoom-slider.tsx`) * Added grid toggle button to the ZoomSlider toolbar (bottom-left) * Uses `Grid3X3` icon from lucide-react * Button variant changes based on state (default when enabled, ghost when disabled) * Added tooltip: "Enable/Disable snap to grid" for user guidance ```tsx {isSnapToGridEnabled ? "Disable snap to grid" : "Enable snap to grid"} ``` ### Functionality * **Toggle OFF (default)**: Nodes can be placed at any pixel position (free-form) * **Toggle ON**: Nodes automatically snap to nearest 16px grid intersection * Preserves all existing canvas functionality (zoom, pan, selection, etc.) ## Testing * ✅ Verified free-form placement when toggle is disabled * ✅ Verified nodes snap to 16px grid systematically when toggle is enabled * ✅ Toggle works smoothly without affecting other canvas operations * ✅ No console errors or visual glitches ## Before https://github.com/user-attachments/assets/e277cc8f-c894-4f4c-aa65-9c963ba3bda5 ## After https://github.com/user-attachments/assets/52fa0689-59cd-4c34-8ece-e304cac19b71 --------- Signed-off-by: prudh-vi Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/components/zoom-slider.tsx | 21 ++++++++++++++++++++- web_src/src/ui/CanvasPage/index.tsx | 7 ++++++- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/web_src/src/components/zoom-slider.tsx b/web_src/src/components/zoom-slider.tsx index 393cc6f693..550946f6da 100644 --- a/web_src/src/components/zoom-slider.tsx +++ b/web_src/src/components/zoom-slider.tsx @@ -1,7 +1,7 @@ "use client"; import React, { useCallback, useEffect } from "react"; -import { Camera, Maximize, Minus, MousePointer2, Plus } from "lucide-react"; +import { Camera, Grid3X3, Maximize, Minus, MousePointer2, Plus } from "lucide-react"; import { toPng } from "html-to-image"; import { @@ -26,6 +26,8 @@ export function ZoomSlider({ isSelectionModeEnabled, onSelectionModeToggle, screenshotName, + isSnapToGridEnabled, + onSnapToGridToggle, ...props }: Omit & { orientation?: "horizontal" | "vertical"; @@ -33,6 +35,8 @@ export function ZoomSlider({ isSelectionModeEnabled?: boolean; onSelectionModeToggle?: () => void; screenshotName?: string; + isSnapToGridEnabled?: boolean; + onSnapToGridToggle?: () => void; }) { const { zoom } = useViewport(); const { zoomTo, zoomIn, zoomOut, fitView, getNodes } = useReactFlow(); @@ -221,6 +225,21 @@ export function ZoomSlider({ )} + {onSnapToGridToggle && ( + + + + + {isSnapToGridEnabled ? "Disable snap to grid" : "Enable snap to grid"} + + )} {children} diff --git a/web_src/src/ui/CanvasPage/index.tsx b/web_src/src/ui/CanvasPage/index.tsx index f37476e90d..33efafcf50 100644 --- a/web_src/src/ui/CanvasPage/index.tsx +++ b/web_src/src/ui/CanvasPage/index.tsx @@ -1439,6 +1439,7 @@ function CanvasContent({ const [logSearch, setLogSearch] = useState(""); const [expandedRuns, setExpandedRuns] = useState>(() => new Set()); const [logSidebarHeight, setLogSidebarHeight] = useState(320); + const [isSnapToGridEnabled, setIsSnapToGridEnabled] = useState(true); useEffect(() => { const activeNoteId = getActiveNoteId(); @@ -2101,6 +2102,8 @@ function CanvasContent({ selectionKeyCode: selectionKey, multiSelectionKeyCode: selectionKey, })} + snapToGrid={isSnapToGridEnabled} + snapGrid={[48, 48]} panOnScrollSpeed={0.8} nodesDraggable={!isReadOnly} nodesConnectable={!isReadOnly && !!onEdgeCreate} @@ -2133,6 +2136,8 @@ function CanvasContent({ setIsTemporarilyEnabled(false); }} screenshotName={title} + isSnapToGridEnabled={isSnapToGridEnabled} + onSnapToGridToggle={() => setIsSnapToGridEnabled((prev) => !prev)} > @@ -2168,7 +2173,7 @@ function CanvasContent({ From 777ec038a147262c7acac9c1e72248ea0e8d7d36 Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Tue, 10 Feb 2026 15:31:05 +0100 Subject: [PATCH 041/160] chore: Overhaul of the researcher skill (#3007) Signed-off-by: Muhammad Fuzail Zubari --- .cursor/PM_WORKFLOW_HELPER.md | 102 --------------- .cursor/agents/integration-issue-logger.md | 29 ----- .cursor/agents/integration-researcher.md | 37 ------ .../commands/integration-issue-templates.md | 39 ------ .cursor/commands/integration-log-issues.md | 35 ----- .cursor/commands/integration-research.md | 47 ------- .cursor/commands/research-extension.md | 19 +++ .cursor/commands/research-integration.md | 19 +++ .../SKILL.md | 80 ------------ .../SKILL.md | 91 ------------- .../SKILL.md | 46 ------- .../superplane-integration-research/SKILL.md | 24 ++++ docs/contributing/integration-pm-workflow.md | 123 ------------------ 13 files changed, 62 insertions(+), 629 deletions(-) delete mode 100644 .cursor/PM_WORKFLOW_HELPER.md delete mode 100644 .cursor/agents/integration-issue-logger.md delete mode 100644 .cursor/agents/integration-researcher.md delete mode 100644 .cursor/commands/integration-issue-templates.md delete mode 100644 .cursor/commands/integration-log-issues.md delete mode 100644 .cursor/commands/integration-research.md create mode 100644 .cursor/commands/research-extension.md create mode 100644 .cursor/commands/research-integration.md delete mode 100644 .cursor/skills/superplane-integration-issue-templates/SKILL.md delete mode 100644 .cursor/skills/superplane-integration-log-issues-github/SKILL.md delete mode 100644 .cursor/skills/superplane-integration-prioritization/SKILL.md create mode 100644 .cursor/skills/superplane-integration-research/SKILL.md delete mode 100644 docs/contributing/integration-pm-workflow.md diff --git a/.cursor/PM_WORKFLOW_HELPER.md b/.cursor/PM_WORKFLOW_HELPER.md deleted file mode 100644 index 9a3c076e85..0000000000 --- a/.cursor/PM_WORKFLOW_HELPER.md +++ /dev/null @@ -1,102 +0,0 @@ -# Cursor PM Workflow Helper - -Use this file when adding or changing **Rules**, **Skills**, **Commands**, or **Subagents** for the SuperPlane integration PM workflow, or when you need a quick reference for which primitive to use. - ---- - -## 1. Definitions and comparison - -| Primitive | What it is | Where it lives | -|-----------|------------|----------------| -| **Rules** | System-level instructions; part of the prompt. Applied always, by file glob, or when the agent decides they're relevant. | `.cursor/rules/` (`.md` or `.mdc` with frontmatter). Also `AGENTS.md` in project root. | -| **Commands** | Slash-invoked workflows (`/name`). Repeatable workflows you or the agent trigger. | `.cursor/commands/` as `.md` files. | -| **Skills** | Portable knowledge packages (open standard). Domain-specific instructions the agent can apply when relevant or when you invoke via `/`. Can include `scripts/`, `references/`, `assets/`. | `.cursor/skills//SKILL.md` (folder per skill). | -| **Subagents** | Isolated AI assistants with their own context. Can run in foreground or background. | `.cursor/agents/` as `.md` files with YAML frontmatter. | - -**When to use which (short):** - -- **Rules** — Short, permanent or context-scoped guidance (e.g. issue conventions, title format, triggers vs actions). Keep under ~500 lines. -- **Commands** — Repeatable workflows you invoke step-by-step (e.g. `/integration-research`, `/integration-issue-templates`, `/integration-log-issues`). -- **Skills** — Substantial reference or procedure the agent loads when doing a task (e.g. prioritization criteria, issue templates, log-issues procedure). -- **Subagents** — Isolated context for research or long issue-creation runs so the main chat doesn't get bloated; return a concise summary to the parent. - ---- - -## 2. Formats and locations quick reference - -**Rules** (`.cursor/rules/*.mdc` or `.md`): - -- Frontmatter: `description`, `globs` (array of path patterns), `alwaysApply` (boolean). -- Types: Always Apply, Apply Intelligently (by description), Apply to Specific Files (globs), Apply Manually (@mention). - -**Commands** (`.cursor/commands/*.md`): - -- Plain Markdown; optional YAML frontmatter (e.g. `description`). -- Filename (without `.md`) becomes the slash command (e.g. `integration-research.md` → `/integration-research`). - -**Skills** (`.cursor/skills//SKILL.md`): - -- Frontmatter: `name` (required, matches folder), `description` (required), optional `disable-model-invocation`, `license`, `compatibility`, `metadata`. -- Optional dirs: `scripts/`, `references/`, `assets/`. - -**Subagents** (`.cursor/agents/*.md`): - -- Frontmatter: `name`, `description`, `model` (e.g. `inherit`, `fast`), optional `readonly`, `is_background`. -- Body: Instructions for the subagent (what to do and what to return to the parent). - ---- - -## 3. SuperPlane integration PM workflow mapping - -This repo's **integration PM workflow** (research → templates → log issues) is implemented as follows. - -### Flow - -1. **Research** — User specifies integration/tool → check existing GitHub issues (MCP) → suggest base + components (triggers/actions) + P1–P4 → summarize for user feedback. -2. **Templates** — After approval → generate temp issue files (base + components) under `tmp/integrations_pm/issues/` → user reviews and requests changes. -3. **Log issues** — After approval → create issues on GitHub via MCP (base first, then children sequentially), set Board Integration Status = Backlog and Priority (P1–P4), attach children as sub-issues of base; track progress in tmp; after user confirms, cleanup tmp (progress doc, etc.). - -### Primitives in use - -| Phase | Rule | Skills | Commands | Subagents | -|-------|------|--------|----------|-----------| -| **Conventions** | `integration-issue-conventions` (globs: `tmp/integrations_pm/**`) | — | — | — | -| **Prioritization** | — | `superplane-integration-prioritization` | — | — | -| **Issue content** | — | `superplane-integration-issue-templates` | — | — | -| **Log to GitHub** | — | `superplane-integration-log-issues-github` | — | — | -| **Research workflow** | — | prioritization (+ templates for norms) | `/integration-research` | `integration-researcher` (optional) | -| **Templates workflow** | conventions | issue-templates | `/integration-issue-templates` | — | -| **Log issues workflow** | — | log-issues-github | `/integration-log-issues` | `integration-issue-logger` (optional) | - -### File locations (this repo) - -- **Rule:** `.cursor/rules/integration-issue-conventions.mdc` -- **Skills:** `.cursor/skills/superplane-integration-prioritization/`, `superplane-integration-issue-templates/`, `superplane-integration-log-issues-github/` -- **Commands:** `.cursor/commands/integration-research.md`, `integration-issue-templates.md`, `integration-log-issues.md` -- **Subagents:** `.cursor/agents/integration-researcher.md`, `integration-issue-logger.md` -- **Source templates / temp files:** `tmp/integrations_pm/issues/` (base under `base/p1/` or `base/p2/`, components under `p1/` or `p2/`). Progress tracking: `tmp/integrations_pm/log-progress-{integration}.md` (removed after user confirms). - -### Invocation order - -1. `/integration-research` (optionally with tool name) → user approves summary. -2. `/integration-issue-templates` → user reviews temp files and approves. -3. `/integration-log-issues` → create issues via MCP; user reviews on GitHub and confirms → cleanup. - -The main agent can delegate to **integration-researcher** or **integration-issue-logger** subagents for isolated context (e.g. `/integration-researcher Consul` or "Use the integration-issue-logger subagent to log the Consul issues"). - ---- - -## 4. References - -- [Rules](https://cursor.com/docs/context/rules) -- [Commands](https://cursor.com/docs/context/commands) -- [Agent Skills](https://cursor.com/docs/context/skills) -- [Subagents](https://cursor.com/docs/context/subagents) -- [Skills vs Commands vs Rules (forum)](https://forum.cursor.com/t/skills-vs-commands-vs-rules/148875) - ---- - -## 5. Alignment with this repo - -- **Project guidelines:** [AGENTS.md](AGENTS.md) at project root (build, test, formatting, migrations, etc.). Integration PM primitives live under `.cursor/` and follow the structure above. -- **Existing command pattern:** The component-review command uses a command file plus a rules file (`.cursor/commands/component-review.md` and `component-review.rules.md`). Integration PM uses Rules + Skills + Commands + Subagents as in the table above; no separate `.rules.md` next to commands. diff --git a/.cursor/agents/integration-issue-logger.md b/.cursor/agents/integration-issue-logger.md deleted file mode 100644 index b83fd27786..0000000000 --- a/.cursor/agents/integration-issue-logger.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -name: integration-issue-logger -description: Create integration issues on GitHub from prepared templates in tmp/integrations_pm/issues/. Use when templates are approved and the user or parent agent needs issues created via MCP (base first, then children, Board fields, sub-issues). Tracks progress in tmp; returns created issue numbers. -model: inherit ---- - -You are a subagent that creates SuperPlane integration issues on GitHub from prepared template files. The user or parent agent has already approved the temp files under `tmp/integrations_pm/issues/`. Your job is to create the base issue first, then one child issue per component, set labels and SuperPlane Board fields (via MCP projects), attach children as sub-issues of the base, and track progress so you can resume after context summary. - -**Use the skill `superplane-integration-log-issues-github`** for the full procedure (progress doc, body content with `### Title` and `### Priority` stripped, labels, add to Board then **set Board fields** via `update_project_item` — Integration Status = Backlog, Priority = agreed P1–P4 — do not skip; sub-issue links via MCP, sequential creation, list.md, cleanup). - -## When invoked - -You receive the integration name (e.g. "Consul") or infer it from the base file in `tmp/integrations_pm/issues/base/p1/` or `base/p2/`. If multiple bases exist, ask which integration to log. - -## Steps - -1. **Create progress doc** in `tmp/integrations_pm/` (e.g. `log-progress-{integration}.md`) with integration name, base path, component paths. Append issue numbers as you create them. -2. **Create base issue**: Read base file; title from `### Title`, body without `### Title`/title line and without `### Priority`/priority value line (Priority is set via Board). Create via GitHub MCP; labels `integration`, `refinement`; add to Board with `add_project_item`, then **set Board fields** (required): get the new item's id via `list_project_items` (query e.g. `title:*[IntegrationName]*`), then call `update_project_item` twice — Integration Status = Backlog, Priority = agreed P1–P4 (use field and option ids from `list_project_fields`; field ids must be numbers). Write base # in progress doc. -3. **Create child issues sequentially** (one at a time to avoid rate limits): For each component file, body = `**Parent (base integration):** #BASE_ISSUE_NUMBER` then rest without `### Title`/title line and without `### Priority`/priority value line (Priority is set via Board); replace #TBD with real base #. Create via MCP; same labels; add to Board then **set Board fields** via `update_project_item` (Backlog + agreed Priority); attach as sub-issue of base via MCP (projects). Append each child # to progress doc. -4. **Update list.md** if present: set `[x]` for each created issue. -5. **Ask user to review** on GitHub and confirm. -6. **After user confirms**: Delete progress doc and other tmp files created for this run; do not delete source template files unless asked. - -## Output (return to parent) - -- **After creating issues**: List base issue # and each child issue # (with links if possible). "Please review on GitHub and confirm when done." -- **After user confirms**: "Cleanup complete. Progress doc removed." - -Create issues **sequentially**. Use GitHub MCP with **projects** permissions for Board fields and sub-issue links; no manual setup. diff --git a/.cursor/agents/integration-researcher.md b/.cursor/agents/integration-researcher.md deleted file mode 100644 index 111c781970..0000000000 --- a/.cursor/agents/integration-researcher.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: integration-researcher -description: Research a tool for SuperPlane integration; suggest base, triggers, actions, and P1–P4 priorities. Use when the user or parent agent needs a concise research summary with existing-issues check. Returns existing-issues note + base + component list with priorities. -model: inherit ---- - -You are a product manager subagent for SuperPlane. Your job is to research an integration/tool, check for existing GitHub issues, suggest how it would connect (base), which components (triggers and actions) to implement, and assign P1–P4 priorities. Return a **concise summary** to the parent so the main conversation stays focused. - -**Use the skill `superplane-integration-prioritization`** for P1–P4 criteria and definitions. - -## When invoked - -You receive the integration/tool name (e.g. "Consul", "Grafana"). If ambiguous, ask one clarifying question. - -## Steps - -1. **Check existing issues (GitHub MCP)**: Search the SuperPlane repo for issues whose title contains the integration in brackets (e.g. `[Consul]`, `[Grafana]`), or use label `integration` + title/body match. List any existing base or component issues (number, title). Note: "Existing issues for this integration: …" or "No existing issues found." -2. **Research the tool**: What it does, devops/SW dev usage, API/events, common integration patterns. -3. **Suggest base**: How it would connect to SuperPlane (auth, credentials, webhooks). One base per tool. -4. **Suggest components**: Triggers (events to listen for) and actions (operations to perform) with short rationale each. **Default to a compact component set** (see Compaction guidance below). -5. **Assign P1–P4**: Base and each component, using the prioritization criteria. Order by priority (P1 first, then P2, P3, P4). -6. **Compaction**: If you listed a more granular set, propose **compaction options** and a **compact component list** with priorities. Ask whether the user wants the compact set or the full (granular) set before generating templates. - -## Compaction guidance (default to compact) - -- **Triggers**: Prefer **one trigger per event source** with event type in payload + optional "Event types" filter, instead of many separate triggers (e.g. On Delivered, On Bounced, On Opened → one "On Email Event"). -- **Actions on the same resource (CRUD)**: Prefer **one component per resource** with an **Operation** field (e.g. Create | Update | Delete) instead of separate Create/Update/Delete components. Example: **"Manage DNS Record"** with config **Operation** = Create | Update | Delete and operation-dependent fields (zone, type, name, content for Create; record + content for Update; record for Delete). Same for other CRUD-style APIs (records, items, entries). -- **Actions (other)**: Merge when one API call can cover both (e.g. "Add or Update Contact" with optional "List IDs" instead of separate "Add Contact" and "Add Contact to List"). Defer or drop P4 / rarely used components for a smaller first release. - -## Output (return to parent) - -- **Existing issues**: What (if any) already exists on GitHub for this integration. -- **Summary**: Base (name, suggested connection method); then list of suggested triggers and actions with priority (P1–P4) and one-line rationale each. **Suggest the compact set first** (e.g. one trigger per event source, "Manage X" with Operation for CRUD). -- **Compaction options** (if you also list a granular alternative): Note "Alternatively, you could split into separate Create / Update / Delete components if you prefer granularity" and show compact component list; ask: compact set or full (granular) set? -- **Note**: "User can review and request corrections; next step is generating issue templates." - -Keep the summary compact. Do not generate issue template files; that is a separate step. diff --git a/.cursor/commands/integration-issue-templates.md b/.cursor/commands/integration-issue-templates.md deleted file mode 100644 index a9b2573d57..0000000000 --- a/.cursor/commands/integration-issue-templates.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -description: Generate temp issue template files (base + components) in tmp/integrations_pm/issues/ after research is approved. User reviews; when satisfied, run integration-log-issues. ---- - -# Integration Issue Templates - -You are generating **temporary issue template files** for a SuperPlane integration. The user has already run `/integration-research` and approved the summary (base integration + list of components with P1–P4). Your job is to write one base file and one file per component under `tmp/integrations_pm/issues/`, following the template structure and conventions. - -**Use the skill `superplane-integration-issue-templates`** for structure, guidelines, triggers vs actions, and output channels. Follow the **integration-issue-conventions** rule for IMPORTANT blocks, title format, and hierarchy. - -## Input - -- The **agreed research summary**: integration name, base (connection method), and list of components (triggers/actions) with assigned P1–P4. If the user did not paste it, ask them to provide the agreed list (or run `/integration-research` first and get approval). -- **Integration name** for file naming: use lowercase with underscores for filenames (e.g. `consul`, `grafana`, `git_hub` only if needed to avoid ambiguity; usually `github`). - -## File locations and naming - -- **Base**: `tmp/integrations_pm/issues/base/{p1|p2}/{integration}_base.md` — use `p1` or `p2` folder based on the base's priority (P1 → base/p1/, P2 → base/p2/). -- **Components**: `tmp/integrations_pm/issues/{p1|p2}/{integration}_{component_slug}.md` — use p1/p2 folder by each component's priority. Component slug: operation name in lowercase with spaces replaced by underscores (e.g. `Get KV` → `get_kv`, `On Deployment` → `on_deployment`, `Sync Application` → `sync_application`). -- Create the directories if they do not exist. - -## Process - -1. **Resolve input**: Confirm integration name and the full list (base + components with priorities). If missing, ask the user. -2. **Generate base file**: One file in `tmp/integrations_pm/issues/base/p1/` or `base/p2/`. Content: IMPORTANT block (base version), then `### Title` and `` `[{Integration Name}] Base` ``, then Description, Suggested Connection Method, Acceptance Criteria, Follow up tasks, Reference. The log-issues skill will strip the `### Title` section when creating the GitHub issue body. -3. **Generate component files**: One file per component in `tmp/integrations_pm/issues/p1/` or `p2/` by priority. Content: IMPORTANT block (component version), then `**Parent (base integration):** #TBD` (placeholder; replaced with actual base issue number when logging), then `### Title` and the title line, then Priority, Description, Use Cases, Configuration, Outputs, Acceptance Criteria (optional), Reference. Use the skill for triggers vs actions (config = filters vs execution parameters) and output channels. File name: `{integration}_{component_slug}.md`. The log-issues skill will strip the `### Title` section when creating the GitHub issue body. -4. **Optional — list.md**: If `tmp/integrations_pm/issues/list.md` exists, add an entry for this integration (base + components with checkboxes `[ ]`). If it does not exist, you may create a minimal list or skip; the log-issues skill will update checkboxes when issues are created. - -## Output - -- **List of created files**: Paths to each file (base + components). -- **Ask**: "Review the files above. Request any changes; when you're satisfied, run `/integration-log-issues` to create the issues on GitHub." - -## Constraints - -- Do **not** create GitHub issues here; that is the next command (`/integration-log-issues`). -- Parent reference in component files must be `**Parent (base integration):** #TBD`; the actual number is set when creating issues. -- Follow the issue-templates skill strictly (triggers = filter config, actions = execution params; output channels by whether user would branch). -- Use realistic example values in Configuration (e.g. `backend-api`, `prod-deployment`), not `example` or `my-thing`. diff --git a/.cursor/commands/integration-log-issues.md b/.cursor/commands/integration-log-issues.md deleted file mode 100644 index 70a48f3cb1..0000000000 --- a/.cursor/commands/integration-log-issues.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -description: Create GitHub issues from prepared integration template files in tmp/integrations_pm/issues/. Uses MCP, SuperPlane Board fields, sub-issues; tracks progress in tmp; cleanup after user confirms. ---- - -# Integration Log Issues - -You are creating integration issues on GitHub from prepared template files under `tmp/integrations_pm/issues/`. The user has already run `/integration-research` and `/integration-issue-templates` and approved the temp files. Your job is to create the base issue first, then one child issue per component, assign labels and SuperPlane Board fields, and track progress so you can resume after context summary. - -**Use the skill `superplane-integration-log-issues-github`** for the full procedure: progress doc, base then children, body content (strip `### Title` and `### Priority` — both are helpers; Priority is set via Board project field), labels, add to Board then **set Board fields** via `update_project_item` (Integration Status = Backlog, Priority = P1–P4 — required; add_project_item does not set these), sub-issue links, sequential creation, list.md checkboxes, user review, cleanup. - -## Input - -- **Integration name** (e.g. "Consul", "Grafana"): the user will specify which integration to log, or you can infer from the base file in `tmp/integrations_pm/issues/base/p1/` or `base/p2/` (e.g. `consul_base.md` → integration "consul"). If multiple base files exist, ask which integration to log. -- **Source**: Template files under `tmp/integrations_pm/issues/`. Base: `base/p1/{integration}_base.md` or `base/p2/`. Components: `p1/{integration}_*.md` or `p2/` (excluding base). - -## Process - -1. **Create progress doc**: In `tmp/integrations_pm/`, create or open a progress file (e.g. `log-progress-{integration}.md`) with integration name, base file path, list of component file paths. You will append the base issue number and each child issue number after creating them. -2. **Create base issue**: Read base file; title from `### Title`, body = file content **without** `### Title` and the backtick title line, and **without** `### Priority` and the priority value line (if present; Priority is set via Board). Create issue via GitHub MCP; add labels `integration` and `refinement`; add issue to SuperPlane Board with `add_project_item`; then **set Board fields** (required — do not skip): get the new project item's numeric id via `list_project_items` (e.g. query `title:*[IntegrationName]*`), then call `update_project_item` twice for that item — Integration Status = Backlog, Priority = agreed P1–P4 (field and option ids from `list_project_fields`; field ids must be numbers). Write base issue number in progress doc. -3. **Create child issues sequentially**: For each component file (one at a time to avoid rate limits): read file; title from `### Title`; body = `**Parent (base integration):** #BASE_ISSUE_NUMBER` (use actual number from step 2), then blank line, then rest of file **without** `### Title` and title line and **without** `### Priority` and the priority value line (Priority is set via Board). Replace any `#TBD` parent reference with the real base issue number. Create issue via MCP; add labels; add to Board then **set Board fields** via `update_project_item` (Integration Status = Backlog, Priority from file); attach as sub-issue of base via MCP (projects permissions). Append child issue number to progress doc. -4. **Update list.md**: If `tmp/integrations_pm/issues/list.md` exists, set checkbox to `[x]` for the base and each component file you created. -5. **Ask user to review**: Tell the user to check the issues on GitHub (base, children, labels, Board fields, sub-issue links) and confirm. -6. **Cleanup after confirm**: Once the user confirms, delete the progress doc and any other tmp files created for this run (e.g. `log-progress-{integration}.md`). Do **not** delete the source template files unless the user explicitly asks. - -## Output - -- **After creating issues**: List base issue # and each child issue # (with links if possible). "Please review the issues on GitHub and confirm when done." -- **After user confirms**: "Cleanup complete. Progress doc removed." - -## Constraints - -- **Always** create the progress doc before the first issue; update it after every issue so you can resume after context summary. -- Create issues **sequentially** (base, then child 1, then child 2, …); do not batch creates to avoid API rate limiting. -- Body for GitHub: never include the `### Title` line or the backtick title line; never include the `### Priority` section or the priority value line (Priority is set via Board project field). For children, body must start with `**Parent (base integration):** #N`. -- Use GitHub MCP with **projects** permissions to add issues to the Board and to **set Board fields** (Integration Status = Backlog, Priority). Adding an issue with `add_project_item` does **not** set these fields — you must call `update_project_item` for each new item (get item id from `list_project_items`, then update Integration Status and Priority using field/option ids from `list_project_fields`). Also attach each child issue as a sub-issue of the base. No manual Board or sub-issue setup is required. diff --git a/.cursor/commands/integration-research.md b/.cursor/commands/integration-research.md deleted file mode 100644 index def8ca8d2d..0000000000 --- a/.cursor/commands/integration-research.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -description: Research a tool for SuperPlane integration, suggest base + components (triggers/actions) and P1–P4 priorities, then summarize for user feedback. ---- - -# Integration Research - -You are acting as a product manager for SuperPlane. The user has specified an integration/tool they want to build or integrate. Your job is to research it, suggest how it would connect to SuperPlane, which components (triggers and actions) make sense, and assign priorities P1–P4. - -**Use the skill `superplane-integration-prioritization`** for prioritization criteria and P1–P4 definitions. Apply the four criteria: popularity in devops/SW dev, unlocks common devops workflows, commonly used, usefulness of each operation. - -## Input - -- Use the user's message: they will name the integration/tool (e.g. "Grafana", "Consul", "GitLab"). -- If the tool name is ambiguous, ask one clarifying question (e.g. "Grafana OSS or Grafana Cloud?"). - -## Process - -1. **Check existing issues (GitHub MCP)**: Before researching, use the GitHub MCP to search for issues that already exist for this integration in the SuperPlane repository. Search by title containing the integration name in brackets (e.g. `[Consul]`, `[Grafana]`), or use the repo's issue search/list tools with appropriate filters (e.g. label `integration` and title/body matching the tool name). List any existing base or component issues (issue number, title, link). Make a clear note: "Existing issues for this integration: …" or "No existing issues found for this integration." -2. **Research the tool**: What it does, how it's used in devops/software development, its API or events, common integration patterns. -3. **Suggest the base integration**: How it would connect to SuperPlane (auth method, credentials, webhooks if needed). One base per tool. -4. **Suggest components**: Which **triggers** (events to listen for) and **actions** (operations to perform) make sense. For each, give a short rationale. **Default to a compact component set** (see Compaction guidance below). -5. **Assign priorities**: P1–P4 for the base and for each component using the prioritization criteria. Order or group by priority (P1 first, then P2, P3, P4). -6. **Compaction**: If you listed a more granular set, propose **compaction options** — ways to merge into fewer components — and present a **compact component list** with priorities. Ask whether the user wants the compact set or the full (granular) set. -7. **Summarize**: Present the existing-issues note, then the base, list of components with priorities, **compaction options** (if you also listed a granular alternative), and brief rationale so the user can choose the compact set or the full (granular) set and then request corrections or templates. - -## Compaction guidance (default to compact) - -- **Triggers**: Prefer **one trigger per event source** where the payload includes an event type (e.g. "On Email Event" with event type in payload and an optional "Event types" filter) instead of many separate triggers (On Delivered, On Bounced, On Opened, …). Suggest merging when there are several similar webhook/event types. -- **Actions on the same resource (CRUD)**: Prefer **one component per resource** with an **Operation** (or **Action**) field — e.g. Create | Update | Delete — instead of separate Create/Update/Delete components. Example: **"Manage DNS Record"** with config **Operation** = Create | Update | Delete and operation-dependent fields (zone, type, name, content for Create; record + content for Update; record for Delete). Same idea for other CRUD-style APIs (records, items, entries). -- **Actions (other)**: Consider merging when one API call can do both (e.g. "Add or Update Contact" with optional "List IDs" to add to lists, instead of separate "Add Contact" and "Add Contact to List"). Drop or defer low-value components (P4 or rarely used) to keep the first release small. -- **Output**: In the summary, **suggest the compact set first** (e.g. "Manage DNS Record" with Operation). If you also list a granular alternative, add a **Compaction options** subsection: note "Alternatively, you could split into separate Create / Update / Delete components if you prefer granularity" and show the compact component list with priorities. Ask: "Do you want the compact set (fewer components) or the full (granular) set? We can generate issue templates for either." - -## Output - -- **Existing issues section**: At the top, report what (if anything) already exists on GitHub for this integration (issue numbers, titles). If issues exist, the user may want to extend or skip creating duplicates. -- **Summary section**: Base integration (name, suggested connection method), then list of suggested triggers and actions with assigned priority (P1–P4) and one-line rationale each. -- **Compaction options** (if you also listed a granular alternative): Note the alternative (e.g. split into separate Create/Update/Delete components) and show the compact component list; ask whether the user wants the compact set or the full (granular) set. -- **Ask**: "Review the above. Comment or ask for corrections; when you're satisfied we can generate issue templates (next step)." - -## Constraints - -- **Always** run the GitHub MCP check first (step 1); do not skip it. If MCP is unavailable, say so and continue with research. -- Do not generate issue template files yet; that is a separate step after user approval. -- Base and component suggestions should be concrete (real API operations/events), not vague. -- **Default to a compact component set**: one trigger per event source (with event-type filter), one action per resource with an Operation field for CRUD (e.g. "Manage DNS Record" with Create | Update | Delete). Offer the granular alternative (separate components) in Compaction options so the user can choose. -- If existing issues are found, still present your full suggestion list; optionally note which suggested components already have an issue so the user can decide to skip or add only new ones. -- If you need to look up the tool's API or docs, do so before finalizing the list. diff --git a/.cursor/commands/research-extension.md b/.cursor/commands/research-extension.md new file mode 100644 index 0000000000..07e863ce56 --- /dev/null +++ b/.cursor/commands/research-extension.md @@ -0,0 +1,19 @@ +--- +description: Research additional components for an existing SuperPlane integration. Usability-focused: use cases, what the API allows—then suggest what to add. Conversational. +--- + +# Research extension + +You are a **research helper** for **extending** an existing SuperPlane integration. Focus on **usability**: what's the tool's priority function, what use cases we're not covering yet, what the API allows. Then suggest **additional components** that fit. Connection details stay with engineers—you only need to know what we can access. + +**Use the skill `superplane-integration-research`.** + +## How you work + +1. **Start with what we have.** What's already in the integration (from `docs/components/` or docs)? One short answer. Then: what's the tool's main job and use cases we might be missing? +2. **Then API.** What else does the API expose that fits those use cases? Limitations? Enough to know what's feasible—no connection specs. +3. **Suggest a few more components** that match priority function and use cases. One line each. Ask if they want to add or drop any. +4. **When they're ready** to lock in: short summary = what's already there + additional components. Connection only if relevant in one line. +5. **Never:** Leading with connection method, long reports, or engineering-focused output. + +Goal: a small set of additional components that make sense for how people use the tool. Get there by conversation. diff --git a/.cursor/commands/research-integration.md b/.cursor/commands/research-integration.md new file mode 100644 index 0000000000..f64d0f7e48 --- /dev/null +++ b/.cursor/commands/research-integration.md @@ -0,0 +1,19 @@ +--- +description: Research a new tool for SuperPlane integration. Usability-focused: what the tool is, use cases, what the API allows—then suggest two starter components. Conversational. +--- + +# Research integration + +You are a **research helper** for a **new** SuperPlane integration. Focus on **usability**: what the tool is, who uses it, what good use cases are, and what the API lets us do. Then suggest **two starter components** (one trigger, one action) that match. Connection method is for engineers to explore—you only need to understand what we can access. + +**Use the skill `superplane-integration-research`.** + +## How you work + +1. **Start with the tool.** What is it? What's its priority function (main job)? Good use cases in a workflow? One short answer, then ask what they want next (e.g. "Want me to look at what the API exposes?"). +2. **Then API and access.** What events/operations does the API give us? Limitations? Enough to know what components are feasible. Don't write connection specs—just "we can get deploy events, we can trigger a deploy" etc. +3. **Suggest two components** based on that: one trigger, one action, tied to the tool's main use cases. One line each. If something is similar to an existing integration (e.g. Render), say so in one line. +4. **When they're ready** to lock in: short summary = what the tool is for + two components. Connection in one line if useful ("API key + webhooks—engineers can detail"); otherwise leave it for later. +5. **Never:** Leading with Auth/API/Constraints as deliverables, long reports, or making connection method the main output. + +Goal: two components that make sense for how people use the tool. Get there by conversation. diff --git a/.cursor/skills/superplane-integration-issue-templates/SKILL.md b/.cursor/skills/superplane-integration-issue-templates/SKILL.md deleted file mode 100644 index 385ae86dea..0000000000 --- a/.cursor/skills/superplane-integration-issue-templates/SKILL.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -name: superplane-integration-issue-templates -description: When creating or reviewing SuperPlane integration issue content (base integration issues or component trigger/action issues). Use when generating issue templates, drafting issue bodies, or validating issue structure and guidelines. ---- - -# SuperPlane Integration Issue Templates - -Use this skill when creating or reviewing issue content for SuperPlane integrations: **base integration issues** (one per tool) and **component issues** (triggers and actions, one per operation). Follow the integration-issue-conventions rule for IMPORTANT blocks, hierarchy, and title format. - ---- - -## Base integration issue - -**Purpose:** One parent issue per tool. Establishes how the tool connects to SuperPlane. All component issues are children of this base. - -### Structure - -1. **IMPORTANT block** (at top) — use the base version: "Review and rethink the suggested connection method before implementing. If unsure, reach out on **Discord** first." -2. **Title**: `[{Integration Name}] Base` (e.g. `[GitHub] Base`, `[ArgoCD] Base`) -3. **Description**: 2–3 sentences on what the tool does and primary use cases. Always include **Link**: {URL}. -4. **Suggested Connection Method**: Primary auth method (steps to generate credentials, required scopes, what to store in SuperPlane). Alternatives if applicable. -5. **Acceptance Criteria**: has proper tests, documentation, code quality review, functionality review, ui/ux review. -6. **Follow up tasks**: Once all components are done — announcement, outreach, marketplace/docs, templates/examples. -7. **Reference**: Integration & component checklist. - -### Guidelines (base) - -- **Connection method**: Describe recommended auth (API Token, OAuth 2.0, App Installation, Service Account, Personal Access Token, Webhook Secret). Include where/how to generate credentials and required permissions/scopes. - ---- - -## Component issue (trigger or action) - -**Purpose:** One issue per trigger or action. Each is a child of the base integration issue. Parent reference and IMPORTANT block at top (see integration-issue-conventions rule). - -### Structure - -1. **IMPORTANT block** (at top) — use the component version: "Review and rethink configuration options and output channels before implementing. If unsure, reach out on **Discord** first." -2. **Parent reference**: `**Parent (base integration):** #BASE_ISSUE_NUMBER` (immediately after IMPORTANT block). -3. **Title**: `[{Integration Name}] {Operation Name}` — Title Case; triggers use "On {Event}" (e.g. `[GitHub] On Push`, `[ArgoCD] Sync Application`). -4. **Priority**: `P1 - High` / `P2 - Medium` / `P3 - Low` / `P4 - Lowest` (from agreed prioritization). -5. **Description**: 1–2 sentences on **what the component does to external systems**, not that it exists. No "Enable X to Y as part of SuperPlane workflows." -6. **Use Cases**: 2–3 **specific, realistic scenarios** (e.g. "Send Slack notification when production deployment pipeline completes"). Not generic "Automate {operation} in CI/CD workflow." -7. **Configuration**: All fields with (required/optional), defaults, example values. **Triggers = FILTERS only. Actions = EXECUTION PARAMETERS only.** -8. **Outputs**: Channel(s) with when emitted and what data. See Output channels below. -9. **Acceptance Criteria** (optional): tests, documentation, reviews. -10. **Reference**: Integration & component checklist. - -### Triggers vs Actions (critical) - -- **Actions** (execute operations): Configuration = **execution parameters** — what resource, how to perform, what data to send. Action config answers: "What should I DO?" -- **Triggers** (listen for events): Configuration = **filters** — what events to listen for, what values to match (repository, status, labels). Trigger config answers: "What events should I LISTEN FOR?" - -**Wrong (trigger):** `[ArgoCD] On Sync Completed` with config "Revision", "Prune" — those are for performing a sync, not filtering events. -**Right (trigger):** "Application Filter", "Sync Status" (Succeeded/Failed), "Health Status". - -### Output channels (components) - -- **Triggers**: Single **default** channel (outcomes determined downstream). -- **Actions**: Depends on whether the user would model different workflow paths: - - **Multiple channels** when the result should branch: e.g. `success`/`failed`, or `clear`/`degraded`/`critical` (e.g. by highest urgency), or `approved`/`rejected`. - - **Single default** when there is one outcome stream or when "success" varies by user. Do not force success/failure on every action. -- **Channel names**: Lowercase, single-word (`success`, `failed`, `approved`, `timeout`). Be consistent. - -### Anti-patterns to avoid - -- **Generic use cases:** "Automate {operation} within a CI/CD workflow", "Sync {service} changes into internal systems", "Create a workflow that reacts to {service} events." -- **Generic descriptions:** "Enable {service} to {operation} as part of SuperPlane workflows.", "Trigger workflows in SuperPlane when {service} emits {event}." -- **Wrong config for triggers:** Trigger with execution parameters (e.g. On Alarm with "Alarm Name", "Metric", "Threshold" for creating alarms). Use filter fields (Alarm Name Filter, State Transition, Severity). -- **Wrong title case:** `Create user` → use `Create User`. -- **Vague example values:** `example`, `my-thing`, `test` → use `backend-api`, `prod-deployment`, `v1.2.3`. - -### Validation checklist (before submitting) - -- [ ] Title uses Title Case. -- [ ] Description explains WHAT happens to external systems. -- [ ] Use cases are specific scenarios, not generic patterns. -- [ ] Triggers: config has FILTER fields only. -- [ ] Actions: output channels match whether user would branch (multiple vs default). -- [ ] Configuration includes important API parameters; example values are realistic. diff --git a/.cursor/skills/superplane-integration-log-issues-github/SKILL.md b/.cursor/skills/superplane-integration-log-issues-github/SKILL.md deleted file mode 100644 index 7dafc26e59..0000000000 --- a/.cursor/skills/superplane-integration-log-issues-github/SKILL.md +++ /dev/null @@ -1,91 +0,0 @@ ---- -name: superplane-integration-log-issues-github -description: When creating GitHub issues from prepared SuperPlane integration issue templates. Use when logging base and component issues via GitHub MCP, assigning SuperPlane Board project fields (Integration Status, Priority), attaching sub-issues, and tracking progress in tmp. ---- - -# SuperPlane Integration: Log Issues to GitHub - -Use this skill when creating integration issues on GitHub from prepared template files. You need **GitHub MCP** (e.g. `user-github`) with **projects** permissions so you can create issues, set SuperPlane Board fields (Integration Status, Priority), and attach child issues as sub-issues of the base. No manual Board or sub-issue setup is required. Source content lives under **tmp/integrations_pm/issues/**. - ---- - -## Prerequisites - -- **GitHub MCP** available with issue and **projects** permissions (for Board fields and sub-issue links). -- **Repository**: SuperPlane GitHub repo (owner/repo where issues are created). -- **Source content**: Markdown files under `tmp/integrations_pm/issues/` — base under `base/p1/` or `base/p2/`, components under `p1/` or `p2/`. File names: `{integration}_base.md`, `{integration}_{component}.md`. -- **Progress tracking**: Create or update a temporary document in `tmp/integrations_pm/` (e.g. `progress.md` or `log-progress-{integration}.md`) at the start and after each issue created, so you don’t lose track after context summary. - ---- - -## Workflow overview - -1. Create/update a **progress doc** in `tmp/integrations_pm/` listing the integration, base file, component files, and (as you go) created issue numbers. -2. **Create the base issue** first (one per integration). Assign labels and SuperPlane Board fields. Log base issue number in the progress doc. -3. **Create child issues** one per component, **sequentially** (one by one) to avoid API rate limiting. For each: create issue, set parent/sub-issue link, assign labels and Board fields, then update progress doc. -4. **Update list.md** (if present): in `tmp/integrations_pm/issues/list.md`, set checkbox to `[x]` for each created issue. -5. Ask the **user to review** on GitHub and confirm. -6. Once the user confirms, **clean up**: remove the progress doc and any other temporary files created for this run (e.g. under `tmp/integrations_pm/` for this integration). - ---- - -## Step 1: Progress tracking - -- **Before creating any issue**, create or open a progress document in `tmp/integrations_pm/` (e.g. `log-progress-consul.md`). -- **Contents**: Integration name, base file path, list of component file paths, then as you create issues: base issue #, then each child issue #. -- **After each created issue** (base or child), append to this doc so that after context summary you can resume from the last created issue. - ---- - -## Step 2: Create the base issue - -1. **Locate the base file**: `tmp/integrations_pm/issues/base/p1/{integration}_base.md` or `base/p2/` (e.g. `consul_base.md`). -2. **Build the issue** from the file: - - **Title**: From `### Title` (e.g. `[Consul] Base`). Do **not** put the title line in the body. - - **Body**: IMPORTANT block, Description, Suggested Connection Method, Acceptance Criteria, Follow up tasks, Reference — **no** `### Title` or backtick title line; **no** `### Priority` or priority value line (Priority is set via Board). -3. **Create the issue** via GitHub MCP with title and body as above. -4. **Labels**: Add `integration` and `refinement` to the issue. -5. **SuperPlane Board** (via MCP projects): - - **Add** the issue to the SuperPlane Board project (`add_project_item` with owner, project_number 2, item_owner, item_repo, item_type `issue`, issue_number). - - **Set Board fields** (required — do not skip): Adding an issue does **not** set Integration Status or Priority. Get the new item's numeric id via `list_project_items` (e.g. query `title:*[IntegrationName]*`), find the item whose Title matches the issue you just created; then call `update_project_item` twice for that item — once Integration Status = Backlog (field id and Backlog option id from `list_project_fields`), once Priority = agreed P1–P4 (field id and P1/P2/P3/P4 option id from `list_project_fields`). Field ids in `updated_field` must be **numbers**. -6. **Note the base issue number** (e.g. `#1912`) and write it in the progress doc. You need it for every child issue body and for linking sub-issues. - ---- - -## Step 3: Create child issues (one per component, sequentially) - -1. **Find component files**: Under `tmp/integrations_pm/issues/p1/` or `p2/`, all `{integration}_*.md` except `{integration}_base.md` (e.g. `consul_get_kv.md`, `consul_register_service.md`). Use `tmp/integrations_pm/issues/list.md` if present for the exact list. -2. **For each component file, one at a time** (to avoid rate limiting): - - **Build the issue**: - - **Title**: From `### Title` in the file (e.g. `[Consul] Get KV`). Do **not** put the title in the body. - - **Body**: First line must be `**Parent (base integration):** #BASE_ISSUE_NUMBER`, then a blank line, then the rest of the file content **without** the `### Title` and backtick title line and **without** the `### Priority` and priority value line (Priority is set via Board). Include IMPORTANT block, Description, Use Cases, Configuration, Outputs, Acceptance Criteria, Reference. - - **Create the issue** via MCP with this title and body. - - **Labels**: Add `integration` and `refinement`. - - **SuperPlane Board**: Add to project with `add_project_item`. Then **set Board fields** (required — do not skip): get the new item's numeric id via `list_project_items` (e.g. query `title:*[IntegrationName]*`), then call `update_project_item` twice for that item — once Integration Status = Backlog (field id and Backlog option id from `list_project_fields`), once Priority = agreed P1–P4 (field id and P1/P2/P3/P4 option id from `list_project_fields`). Field ids in `updated_field` must be numbers. - - **Sub-issue link** (via MCP projects): Attach this issue as a **sub-issue of the base issue** so the Board shows the hierarchy. The parent reference in the body is also required. - - **Update progress doc** with this child issue number, then proceed to the next component. -3. Create issues **sequentially**; do not batch many creates in parallel to avoid GitHub API rate limits. - ---- - -## Step 4: Update list.md (if present) - -- In `tmp/integrations_pm/issues/list.md`, for each issue created (base and each child), change the corresponding line from `- [ ]` to `- [x]` (same indentation and filename). -- This keeps the checklist in sync with what exists on GitHub. - ---- - -## Step 5: User review and cleanup - -- **Ask the user** to review the created issues on GitHub (base, children, labels, Board fields, sub-issue links) and confirm. -- **After user confirms**: Delete the progress document and any other temporary files created for this logging run (e.g. `tmp/integrations_pm/log-progress-{integration}.md`). Do **not** delete the source issue template files unless the user explicitly asks to clean those too. - ---- - -## Summary checklist (per integration) - -- [ ] Create/update progress doc in `tmp/integrations_pm/` before starting. -- [ ] Create **one base issue** first; exclude `### Title` and `### Priority` from body; add labels `integration`, `refinement`; add to Board then **set Board fields** via `update_project_item` (Integration Status = Backlog, Priority = agreed P1–P4) — get item id from `list_project_items`; do not skip this. -- [ ] Create **one child issue** per component **sequentially**; body starts with `**Parent (base integration):** #BASE_ISSUE_NUMBER`; exclude `### Title` and `### Priority` from body; same labels; add to Board then **set Board fields** via `update_project_item` for that item; attach as sub-issue of base via MCP (projects). -- [ ] After each issue, update progress doc; then update `list.md` checkboxes if the file exists. -- [ ] Ask user to review on GitHub; after confirmation, remove progress doc and other tmp files created for this run. diff --git a/.cursor/skills/superplane-integration-prioritization/SKILL.md b/.cursor/skills/superplane-integration-prioritization/SKILL.md deleted file mode 100644 index 0ca10c6c78..0000000000 --- a/.cursor/skills/superplane-integration-prioritization/SKILL.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -name: superplane-integration-prioritization -description: When suggesting or prioritizing integration components for SuperPlane (base integrations, triggers, actions). Use when researching a tool to integrate, assigning P1–P4 priorities, or deciding which operations to implement first. ---- - -# SuperPlane Integration Prioritization - -Use this skill when you are researching a tool for SuperPlane integration, suggesting which components (triggers and actions) to implement, or assigning priorities from P1 to P4. - -## Criteria for prioritizing integrations and components - -Apply these four criteria when evaluating a tool and its operations: - -1. **Popularity in devops or software development** — Is this tool widely used in devops, CI/CD, or general software development? Leading or common tools in their category rank higher. - -2. **Unlocks common devops workflow processes** — Does integrating this tool enable workflows that teams commonly need (e.g. deploy on merge, notify on failure, sync state, run tests)? - -3. **Commonly used** — Would the integration and its operations be used frequently in real workflows, not just in edge cases? - -4. **Usefulness of operations (components)** — Among the tool’s possible triggers and actions, which operations are most useful? Rank each suggested component (trigger or action) by how often and how critically it would be used in real workflows. - -## Priority levels (P1–P4) - -Assign each **base integration** and each **component** (trigger or action) to one priority based on the criteria above. - -| Priority | Meaning | When to use | -|----------|---------|-------------| -| **P1** | High — core, frequently used | Core integrations and operations that are widely used and unlock essential workflows (e.g. GitHub base, On Push, Get Issue; Slack base, Send Message). | -| **P2** | Medium — important, common | Important integrations and operations that support common workflows but are not always required first (e.g. many CI/CD triggers, common read/write actions). | -| **P3** | Low — useful, less common | Useful operations for specific or less common workflows; implement after P1 and P2. | -| **P4** | Lowest — edge cases, rarely used | Edge cases, rarely used operations, or nice-to-haves; implement last or defer. | - -## How to apply - -- **Base integration**: Assign a single P1–P4 for the whole integration (e.g. GitHub → P1; lesser-known tool → P2 or P3). -- **Components**: Assign P1–P4 per trigger and per action. The same integration can have a mix (e.g. [GitHub] On Push → P1, [GitHub] Get Issue → P1, [GitHub] On Deployment → P2). -- **Ordering**: When suggesting a list of components, order or group by priority (P1 first, then P2, then P3, then P4) so the user sees what to implement first. - -## Output when suggesting components - -When you suggest a base integration and its components: - -1. Name the integration and suggest the **base** (how it would connect to SuperPlane). -2. List suggested **triggers** and **actions** with a short rationale for each. -3. Assign **P1–P4** to the base and to each component. -4. Provide a brief **summary** so the user can review and request changes before issue templates or GitHub issues are created. diff --git a/.cursor/skills/superplane-integration-research/SKILL.md b/.cursor/skills/superplane-integration-research/SKILL.md new file mode 100644 index 0000000000..edda4d13d9 --- /dev/null +++ b/.cursor/skills/superplane-integration-research/SKILL.md @@ -0,0 +1,24 @@ +--- +name: superplane-integration-research +description: Usability-oriented research for SuperPlane integrations: what the tool is, use cases, what the API allows, then suggest components. Connection details are for engineers. +--- + +# SuperPlane Integration Research + +You are a **research helper**, usability-oriented. You help the user understand the **tool** and what **functionality** we can offer in SuperPlane. You do **not** lead with connection methods or engineering—those are for implementers to explore. + +## What to focus on (in order) + +1. **What is the tool?** What's it for? What's the **priority function** (the main job users use it for)? +2. **Good use cases.** When would someone want this inside a SuperPlane workflow? What problems does it solve? +3. **API and limitations.** What does the API actually let us do? (Events → triggers; operations → actions.) What's limited or quirky? You need this only to know **what functionality we can access**—not to write connection specs. +4. **Connection.** Understand just enough to know what's possible (e.g. "they have webhooks so we can do event triggers; REST API for deploy"). Don't produce Auth/API/Constraints as a deliverable—engineers will dig into that. You only need connection insight to suggest the right components. +5. **Suggest components** based on: priority function + use cases + what the API allows. New integration = two starter components (one trigger, one action). Extension = a few more that fit. + +## How to respond + +- **Brief, conversational.** A few sentences or 2–3 bullets. One finding per turn, then ask what they want next. +- **No slop.** No formal headers, no "comprehensive overview." Talk like a colleague. +- **Existing integrations:** From [docs/components/](docs/components/) or [docs.superplane.com](https://docs.superplane.com). If the tool is similar to one we have (e.g. Railway ↔ Render), mention it in one line and use it as a pattern for components. + +When they're ready to lock in: short summary = what the tool is for, suggested components (one line each). Optionally one line on "connection looks like X—engineers can detail it." Don't make connection the main output. diff --git a/docs/contributing/integration-pm-workflow.md b/docs/contributing/integration-pm-workflow.md deleted file mode 100644 index 3d30b93d06..0000000000 --- a/docs/contributing/integration-pm-workflow.md +++ /dev/null @@ -1,123 +0,0 @@ -# Integration PM Workflow (Cursor) - -This guide explains how to use the **integration product management workflow** in Cursor to plan new integrations, generate issue templates, and create GitHub issues on the SuperPlane Board. The workflow uses Cursor Rules, Skills, Commands, and optional Subagents so you can run it consistently and repeatably. - -## Table of Contents - -- [Prerequisites](#prerequisites) -- [Workflow overview](#workflow-overview) -- [Step 1: Research the integration](#step-1-research-the-integration) -- [Step 2: Generate issue templates](#step-2-generate-issue-templates) -- [Step 3: Create issues on GitHub](#step-3-create-issues-on-github) -- [Using subagents (optional)](#using-subagents-optional) -- [Where everything lives](#where-everything-lives) -- [Troubleshooting](#troubleshooting) - -## Prerequisites - -- **Cursor** with Agent (Chat) available. -- **GitHub MCP** enabled and connected to the SuperPlane repo, with **projects** permissions so the agent can create issues, set Board fields (Integration Status, Priority), and attach sub-issues to the base issue. -- The integration PM primitives are already in the repo: `.cursor/rules/`, `.cursor/skills/`, `.cursor/commands/`, `.cursor/agents/` (see [Where everything lives](#where-everything-lives)). - -## Workflow overview - -The workflow has three steps. You run one command per step, review the output, and then move to the next. - -| Step | What you do | What the agent does | -|------|-------------|----------------------| -| **1. Research** | Run `/integration-research` with the tool name and links. | Checks existing GitHub issues for that integration, researches the tool, suggests base + components (triggers/actions) and P1–P4 priorities, and gives you a summary to review. | -| **2. Templates** | After you approve the research, run `/integration-issue-templates` (or ask the agent to generate templates). | Creates temp issue files under `tmp/integrations_pm/issues/` (one base + one per component). You review the files and request changes if needed. | -| **3. Log issues** | After you approve the templates, run `/integration-log-issues` with the integration name. | Creates the base issue on GitHub, then each component issue as a child; sets labels, Board Integration Status = Backlog, Priority (P1–P4), and attaches children as sub-issues. Tracks progress in a temp file; after you confirm on GitHub, cleans up the progress file. | - -You must complete each step and approve before moving to the next. The agent will not generate templates until you approve the research summary, and will not create GitHub issues until you approve the template files. - -## Step 1: Research the integration - -1. Open a new Agent chat in Cursor (or use an existing one). -2. Type **`/integration-research`** and add the tool name and links. For example: - - ``` - /integration-research Rootly - - Tool: https://rootly.com/ — end-to-end incident management platform. - Docs: https://docs.rootly.com/help-and-documentation - ``` - - Or in plain language: - - ``` - I want to integrate Rootly (https://rootly.com/, docs: https://docs.rootly.com/help-and-documentation) into SuperPlane. Run the integration research: check existing GitHub issues for this integration, then suggest the base integration and components (triggers/actions) with P1–P4 priorities, and give me a summary to review. - ``` - -3. The agent will: - - Use GitHub MCP to check for existing issues with that integration name (e.g. `[Rootly]`). - - Research the tool and suggest how it would connect to SuperPlane (base), plus a list of triggers and actions with P1–P4 priorities. - - Present a summary: existing issues (if any), base, and list of components with priorities. - -4. **Review the summary.** Ask for changes (e.g. add/remove components, change priorities). When you are satisfied, say you approve and want to generate the issue templates. - -## Step 2: Generate issue templates - -1. After you have approved the research summary, ask the agent to generate the issue template files. For example: - - ``` - Looks good. Generate the issue template files for this integration (base + components) in tmp/integrations_pm/issues/. - ``` - - Or run **`/integration-issue-templates`** and confirm the integration name if asked. - -2. The agent will create: - - One **base** file: `tmp/integrations_pm/issues/base/p1/{integration}_base.md` (or `base/p2/` depending on priority). - - One **component** file per trigger/action: `tmp/integrations_pm/issues/p1/{integration}_{component}.md` (or `p2/`). - -3. **Review the files** in `tmp/integrations_pm/issues/`. Open them in the editor and ask the agent to fix anything (e.g. description, use cases, configuration, output channels). When you are satisfied, say you want to create the issues on GitHub. - -## Step 3: Create issues on GitHub - -1. After you have approved the template files, run **`/integration-log-issues`** and specify the integration name if you have more than one in `tmp/integrations_pm/issues/`. For example: - - ``` - /integration-log-issues Rootly - ``` - - Or: “Create the GitHub issues from the Rootly template files in tmp/integrations_pm/issues/. Use the integration-log-issues workflow.” - -2. The agent will: - - Create a **progress file** in `tmp/integrations_pm/` (e.g. `log-progress-rootly.md`) so it can resume after context summary if needed. - - Create the **base issue** first (labels `integration`, `refinement`; Board Integration Status = Backlog, Priority from your agreed P1–P4). - - Create **one child issue per component** sequentially (to avoid rate limits), each with the same labels and Board fields, and attach each child as a **sub-issue** of the base issue. - - Optionally update `tmp/integrations_pm/issues/list.md` if it exists (checkboxes for created issues). - -3. **Review the issues on GitHub.** Check the SuperPlane Board: base issue, child issues, labels, Integration Status = Backlog, Priority, and that children are linked as sub-issues of the base. - -4. **Confirm in chat** when everything looks correct. The agent will then delete the progress file (and any other temp files it created for this run). It will **not** delete the source template files in `tmp/integrations_pm/issues/` unless you ask. - -**Note:** The agent strips the **Title** and **Priority** sections from the issue body when creating GitHub issues. Title becomes the issue title; Priority is set only via the Board project field, so it is not duplicated in the body. - -## Using subagents (optional) - -Subagents run in an **isolated context** and return a concise summary to the main chat. They are useful when research or issue creation would produce a lot of output and you want to keep the main conversation short. - -- **Integration researcher** — Use when you want the research step (existing-issues check + base + components + P1–P4) done in a separate context. Invoke by name, e.g. “Use the integration-researcher subagent to research Consul” or `/integration-researcher Consul`. The subagent returns the summary; you review and then continue with templates in the main chat. -- **Integration issue logger** — Use when you want the “create N issues via MCP” step done in isolation (e.g. many components). Invoke by name, e.g. “Use the integration-issue-logger subagent to create the GitHub issues for Rootly from the template files in tmp/integrations_pm/issues/.” The subagent creates the issues, tracks progress, and returns the list of issue numbers; you then review on GitHub and confirm so it can clean up. - -You can run the full workflow **without** subagents by using the three commands in order; subagents are optional. - -## Where everything lives - -| Purpose | Location | -|--------|----------| -| **Rule** (issue conventions, title format, triggers vs actions) | `.cursor/rules/integration-issue-conventions.mdc` | -| **Skills** (prioritization, issue templates, log-issues procedure) | `.cursor/skills/superplane-integration-prioritization/`, `superplane-integration-issue-templates/`, `superplane-integration-log-issues-github/` | -| **Commands** (slash workflows) | `.cursor/commands/integration-research.md`, `integration-issue-templates.md`, `integration-log-issues.md` | -| **Subagents** (optional) | `.cursor/agents/integration-researcher.md`, `integration-issue-logger.md` | -| **Temp template files** | `tmp/integrations_pm/issues/` (base in `base/p1/` or `base/p2/`, components in `p1/` or `p2/`) | -| **Progress file** (during log-issues; deleted after you confirm) | `tmp/integrations_pm/log-progress-{integration}.md` | -| **Helper for agents/maintainers** | `.cursor/PM_WORKFLOW_HELPER.md` (reference for Rules/Skills/Commands/Subagents and when to use which) | - -## Troubleshooting - -- **Commands don’t appear when I type `/`** — Ensure you are in Agent (Chat) and that the repo has `.cursor/commands/` with the integration PM command files. You can also describe what you want in plain language (e.g. “research Rootly for SuperPlane integration and suggest base + components + priorities”). -- **GitHub MCP can’t set Board fields or sub-issues** — The GitHub MCP connection must have **projects** permissions for the SuperPlane repo. Check your Cursor/MCP configuration. Without projects permission, the agent can still create issues and labels; you would set Integration Status, Priority, and sub-issue links manually on the Board. -- **Agent didn’t strip Title or Priority from the issue body** — The command and skill instruct the agent to omit the `### Title` and `### Priority` sections when building the GitHub issue body. If a run missed this, you can edit the issue body on GitHub to remove those lines, or re-run the workflow after fixing the instructions in `.cursor/commands/integration-log-issues.md` and `.cursor/skills/superplane-integration-log-issues-github/SKILL.md`. -- **I want to change how priorities or templates work** — Edit the relevant Skill or Rule under `.cursor/` (see [Where everything lives](#where-everything-lives)). For a quick reference on what each primitive does, see `.cursor/PM_WORKFLOW_HELPER.md`. From b09e969de03e3afead41cea4349d60a86e428ce7 Mon Sep 17 00:00:00 2001 From: Manideep Chopperla <130681531+Manideepchopperla@users.noreply.github.com> Date: Tue, 10 Feb 2026 20:50:56 +0530 Subject: [PATCH 042/160] feat: Add github.createIssueComment component (#2967) Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitHub.mdx | 48 +++++ .../github/create_issue_comment.go | 196 ++++++++++++++++++ .../github/create_issue_comment_test.go | 107 ++++++++++ pkg/integrations/github/example.go | 10 + .../example_output_create_issue_comment.json | 14 ++ pkg/integrations/github/github.go | 1 + .../mappers/github/create_issue_comment.ts | 35 ++++ .../pages/workflowv2/mappers/github/index.ts | 3 + 8 files changed, 414 insertions(+) create mode 100644 pkg/integrations/github/create_issue_comment.go create mode 100644 pkg/integrations/github/create_issue_comment_test.go create mode 100644 pkg/integrations/github/example_output_create_issue_comment.json create mode 100644 web_src/src/pages/workflowv2/mappers/github/create_issue_comment.ts diff --git a/docs/components/GitHub.mdx b/docs/components/GitHub.mdx index ea8c029e49..1e65971f71 100644 --- a/docs/components/GitHub.mdx +++ b/docs/components/GitHub.mdx @@ -24,6 +24,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -983,6 +984,53 @@ Returns the created issue object with details including: } ``` + + +## Create Issue Comment + +The Create Issue Comment component adds a comment to an existing GitHub issue or pull request. +Issues and pull requests share the same comment API in GitHub. + +### Use Cases + +- **Deployment updates**: Post deployment status or remediation updates to GitHub issues +- **Runbook linking**: Add runbook links, error details, or status for responders +- **Cross-platform sync**: Sync Slack or PagerDuty notes into GitHub as comments +- **Automated comments**: Add automated comments based on workflow events + +### Configuration + +- **Repository**: Select the GitHub repository containing the issue +- **Issue Number**: The issue or PR number to comment on (supports expressions) +- **Body**: The comment text (supports Markdown and expressions) + +### Output + +Returns the created comment object including: +- Comment ID and URL +- Comment body +- Author information +- Created timestamp + +### Example Output + +```json +{ + "data": { + "body": "Deployment to production completed successfully.", + "created_at": "2026-01-16T17:56:16Z", + "html_url": "https://github.com/acme/widgets/issues/42#issuecomment-5001", + "id": 5001, + "updated_at": "2026-01-16T17:56:16Z", + "user": { + "login": "superplane-app[bot]" + } + }, + "timestamp": "2026-01-16T17:56:16.680755501Z", + "type": "github.issueComment" +} +``` + ## Create Release diff --git a/pkg/integrations/github/create_issue_comment.go b/pkg/integrations/github/create_issue_comment.go new file mode 100644 index 0000000000..2ef2d7b77d --- /dev/null +++ b/pkg/integrations/github/create_issue_comment.go @@ -0,0 +1,196 @@ +package github + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/google/go-github/v74/github" + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type CreateIssueComment struct{} + +type CreateIssueCommentConfiguration struct { + Repository string `json:"repository" mapstructure:"repository"` + IssueNumber string `json:"issueNumber" mapstructure:"issueNumber"` + Body string `json:"body" mapstructure:"body"` +} + +func (c *CreateIssueComment) Name() string { + return "github.createIssueComment" +} + +func (c *CreateIssueComment) Label() string { + return "Create Issue Comment" +} + +func (c *CreateIssueComment) Description() string { + return "Add a comment to a GitHub issue or pull request" +} + +func (c *CreateIssueComment) Documentation() string { + return `The Create Issue Comment component adds a comment to an existing GitHub issue or pull request. +Issues and pull requests share the same comment API in GitHub. + +## Use Cases + +- **Deployment updates**: Post deployment status or remediation updates to GitHub issues +- **Runbook linking**: Add runbook links, error details, or status for responders +- **Cross-platform sync**: Sync Slack or PagerDuty notes into GitHub as comments +- **Automated comments**: Add automated comments based on workflow events + +## Configuration + +- **Repository**: Select the GitHub repository containing the issue +- **Issue Number**: The issue or PR number to comment on (supports expressions) +- **Body**: The comment text (supports Markdown and expressions) + +## Output + +Returns the created comment object including: +- Comment ID and URL +- Comment body +- Author information +- Created timestamp` +} + +func (c *CreateIssueComment) Icon() string { + return "github" +} + +func (c *CreateIssueComment) Color() string { + return "gray" +} + +func (c *CreateIssueComment) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateIssueComment) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "repository", + Label: "Repository", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "repository", + UseNameAsValue: true, + }, + }, + }, + { + Name: "issueNumber", + Label: "Issue Number", + Type: configuration.FieldTypeString, + Required: true, + Description: "The issue or pull request number to comment on", + }, + { + Name: "body", + Label: "Body", + Type: configuration.FieldTypeText, + Required: true, + Description: "The comment text. Supports Markdown formatting.", + }, + } +} + +func (c *CreateIssueComment) Setup(ctx core.SetupContext) error { + var config CreateIssueCommentConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if config.Repository == "" { + return errors.New("repository is required") + } + + if config.IssueNumber == "" { + return errors.New("issue number is required") + } + + if config.Body == "" { + return errors.New("body is required") + } + + return ensureRepoInMetadata( + ctx.Metadata, + ctx.Integration, + ctx.Configuration, + ) +} + +func (c *CreateIssueComment) Execute(ctx core.ExecutionContext) error { + var config CreateIssueCommentConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + issueNumber, err := strconv.Atoi(config.IssueNumber) + if err != nil { + return fmt.Errorf("issue number is not a number: %v", err) + } + + var appMetadata Metadata + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &appMetadata); err != nil { + return fmt.Errorf("failed to decode application metadata: %w", err) + } + + client, err := NewClient(ctx.Integration, appMetadata.GitHubApp.ID, appMetadata.InstallationID) + if err != nil { + return fmt.Errorf("failed to initialize GitHub client: %w", err) + } + + comment := &github.IssueComment{ + Body: &config.Body, + } + + createdComment, _, err := client.Issues.CreateComment( + context.Background(), + appMetadata.Owner, + config.Repository, + issueNumber, + comment, + ) + + if err != nil { + return fmt.Errorf("failed to create issue comment: %w", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "github.issueComment", + []any{createdComment}, + ) +} + +func (c *CreateIssueComment) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateIssueComment) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *CreateIssueComment) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateIssueComment) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateIssueComment) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateIssueComment) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/github/create_issue_comment_test.go b/pkg/integrations/github/create_issue_comment_test.go new file mode 100644 index 0000000000..a9848f1548 --- /dev/null +++ b/pkg/integrations/github/create_issue_comment_test.go @@ -0,0 +1,107 @@ +package github + +import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + contexts "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__CreateIssueComment__Setup(t *testing.T) { + helloRepo := Repository{ID: 123456, Name: "hello", URL: "https://github.com/testhq/hello"} + component := CreateIssueComment{} + + t.Run("repository is required", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + err := component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"issueNumber": "42", "body": "test", "repository": ""}, + }) + + require.ErrorContains(t, err, "repository is required") + }) + + t.Run("issue number is required", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + err := component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"issueNumber": "", "body": "test", "repository": "hello"}, + }) + + require.ErrorContains(t, err, "issue number is required") + }) + + t.Run("body is required", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + err := component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"issueNumber": "42", "body": "", "repository": "hello"}, + }) + + require.ErrorContains(t, err, "body is required") + }) + + t.Run("repository is not accessible", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Metadata: Metadata{ + Repositories: []Repository{helloRepo}, + }, + } + err := component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"issueNumber": "42", "body": "test", "repository": "world"}, + }) + + require.ErrorContains(t, err, "repository world is not accessible to app installation") + }) + + t.Run("metadata is set successfully", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Metadata: Metadata{ + Repositories: []Repository{helloRepo}, + }, + } + + nodeMetadataCtx := contexts.MetadataContext{} + require.NoError(t, component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &nodeMetadataCtx, + Configuration: map[string]any{"issueNumber": "42", "body": "test", "repository": "hello"}, + })) + + require.Equal(t, nodeMetadataCtx.Get(), NodeMetadata{Repository: &helloRepo}) + }) +} + +func Test__CreateIssueComment__Execute(t *testing.T) { + component := CreateIssueComment{} + + t.Run("fails when issue number is not a number", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: map[string]any{ + "issueNumber": "abc", + "body": "test comment", + "repository": "hello", + }, + }) + + require.ErrorContains(t, err, "issue number is not a number") + }) + + t.Run("fails when configuration decode fails", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: "not a map", + }) + + require.ErrorContains(t, err, "failed to decode configuration") + }) +} diff --git a/pkg/integrations/github/example.go b/pkg/integrations/github/example.go index d8d1753271..7d86c258cb 100644 --- a/pkg/integrations/github/example.go +++ b/pkg/integrations/github/example.go @@ -10,6 +10,9 @@ import ( //go:embed example_output_create_issue.json var exampleOutputCreateIssueBytes []byte +//go:embed example_output_create_issue_comment.json +var exampleOutputCreateIssueCommentBytes []byte + //go:embed example_output_get_issue.json var exampleOutputGetIssueBytes []byte @@ -64,6 +67,9 @@ var exampleDataOnWorkflowRunBytes []byte var exampleOutputCreateIssueOnce sync.Once var exampleOutputCreateIssue map[string]any +var exampleOutputCreateIssueCommentOnce sync.Once +var exampleOutputCreateIssueComment map[string]any + var exampleOutputGetIssueOnce sync.Once var exampleOutputGetIssue map[string]any @@ -119,6 +125,10 @@ func (c *CreateIssue) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIssueOnce, exampleOutputCreateIssueBytes, &exampleOutputCreateIssue) } +func (c *CreateIssueComment) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIssueCommentOnce, exampleOutputCreateIssueCommentBytes, &exampleOutputCreateIssueComment) +} + func (c *GetIssue) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputGetIssueOnce, exampleOutputGetIssueBytes, &exampleOutputGetIssue) } diff --git a/pkg/integrations/github/example_output_create_issue_comment.json b/pkg/integrations/github/example_output_create_issue_comment.json new file mode 100644 index 0000000000..40dff3ebd3 --- /dev/null +++ b/pkg/integrations/github/example_output_create_issue_comment.json @@ -0,0 +1,14 @@ +{ + "data": { + "id": 5001, + "body": "Deployment to production completed successfully.", + "html_url": "https://github.com/acme/widgets/issues/42#issuecomment-5001", + "user": { + "login": "superplane-app[bot]" + }, + "created_at": "2026-01-16T17:56:16Z", + "updated_at": "2026-01-16T17:56:16Z" + }, + "timestamp": "2026-01-16T17:56:16.680755501Z", + "type": "github.issueComment" +} diff --git a/pkg/integrations/github/github.go b/pkg/integrations/github/github.go index 523886ba5e..0f8e6ff1a7 100644 --- a/pkg/integrations/github/github.go +++ b/pkg/integrations/github/github.go @@ -95,6 +95,7 @@ func (g *GitHub) Components() []core.Component { return []core.Component{ &GetIssue{}, &CreateIssue{}, + &CreateIssueComment{}, &UpdateIssue{}, &RunWorkflow{}, &PublishCommitStatus{}, diff --git a/web_src/src/pages/workflowv2/mappers/github/create_issue_comment.ts b/web_src/src/pages/workflowv2/mappers/github/create_issue_comment.ts new file mode 100644 index 0000000000..88775a52d7 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/github/create_issue_comment.ts @@ -0,0 +1,35 @@ +import { ComponentBaseProps } from "@/ui/componentBase"; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, +} from "../types"; +import { baseProps } from "./base"; +import { buildGithubExecutionSubtitle } from "./utils"; +import { Comment } from "./types"; + +export const createIssueCommentMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + return baseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + }, + subtitle(context: SubtitleContext): string { + return buildGithubExecutionSubtitle(context.execution); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const details: Record = {}; + + if (!outputs?.default || outputs.default.length === 0) { + return details; + } + + const comment = outputs.default[0].data as Comment; + details["Created At"] = comment?.created_at ? new Date(comment.created_at).toLocaleString() : "-"; + details["URL"] = comment?.html_url || "-"; + + return details; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/github/index.ts b/web_src/src/pages/workflowv2/mappers/github/index.ts index 8f0daafe5a..1a4136b528 100644 --- a/web_src/src/pages/workflowv2/mappers/github/index.ts +++ b/web_src/src/pages/workflowv2/mappers/github/index.ts @@ -11,6 +11,7 @@ import { onWorkflowRunTriggerRenderer } from "./on_workflow_run"; import { baseIssueMapper } from "./base"; import { RUN_WORKFLOW_STATE_REGISTRY, runWorkflowMapper, runWorkflowCustomFieldRenderer } from "./run_workflow"; import { publishCommitStatusMapper } from "./publish_commit_status"; +import { createIssueCommentMapper } from "./create_issue_comment"; import { createReleaseMapper } from "./create_release"; import { updateReleaseMapper } from "./update_release"; import { deleteReleaseMapper } from "./delete_release"; @@ -20,6 +21,7 @@ import { buildActionStateRegistry } from "../utils"; export const eventStateRegistry: Record = { runWorkflow: RUN_WORKFLOW_STATE_REGISTRY, createIssue: buildActionStateRegistry("created"), + createIssueComment: buildActionStateRegistry("created"), getIssue: buildActionStateRegistry("retrieved"), updateIssue: buildActionStateRegistry("updated"), publishCommitStatus: buildActionStateRegistry("published"), @@ -31,6 +33,7 @@ export const eventStateRegistry: Record = { export const componentMappers: Record = { createIssue: baseIssueMapper, + createIssueComment: createIssueCommentMapper, getIssue: baseIssueMapper, updateIssue: baseIssueMapper, runWorkflow: runWorkflowMapper, From fdd465bb7ea603b552d6e08322f4ccd269290e09 Mon Sep 17 00:00:00 2001 From: energypantry <34472578+energypantry@users.noreply.github.com> Date: Wed, 11 Feb 2026 00:14:44 +0800 Subject: [PATCH 043/160] feat(github): Add createReview component (#2940) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #2258 Adds a new GitHub integration component: `github.createReview`. - Config: `repository` (resource), `pullNumber`, `event` (APPROVE/REQUEST_CHANGES/COMMENT), optional `body`, optional `commitId`, optional inline `comments` (path/line/body) - Emits: `github.pullRequestReview` on the default output channel - Includes docs + example output + validation/setup tests Tests: - `go test ./pkg/integrations/github` --------- Signed-off-by: energypantry Co-authored-by: Pedro Leão <60622592+forestileao@users.noreply.github.com> Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitHub.mdx | 47 ++++ pkg/integrations/github/create_review.go | 215 ++++++++++++++++++ pkg/integrations/github/create_review_test.go | 128 +++++++++++ pkg/integrations/github/example.go | 14 ++ .../github/example_output_create_review.json | 17 ++ pkg/integrations/github/github.go | 1 + .../mappers/github/create_review.ts | 47 ++++ .../pages/workflowv2/mappers/github/index.ts | 3 + 8 files changed, 472 insertions(+) create mode 100644 pkg/integrations/github/create_review.go create mode 100644 pkg/integrations/github/create_review_test.go create mode 100644 pkg/integrations/github/example_output_create_review.json create mode 100644 web_src/src/pages/workflowv2/mappers/github/create_review.ts diff --git a/docs/components/GitHub.mdx b/docs/components/GitHub.mdx index 1e65971f71..7da0582f5f 100644 --- a/docs/components/GitHub.mdx +++ b/docs/components/GitHub.mdx @@ -26,6 +26,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -1076,6 +1077,52 @@ Returns the created release object with all release information including tag, a } ``` + + +## Create Review + +The Create Review component submits a pull request review (approve, request changes, or comment) on a GitHub pull request. + +### Use Cases + +- **Automation**: Auto-approve when checks pass +- **Quality gates**: Request changes when checks fail +- **Bots**: Post review feedback + +### Configuration + +- **Repository**: Select the GitHub repository +- **Pull Number**: Pull request number +- **Event**: APPROVE, REQUEST_CHANGES, or COMMENT +- **Body**: Optional review body (required for REQUEST_CHANGES and COMMENT) + +### Output + +Emits the submitted review object including: +- id, state, submitted_at +- body and user + +### Example Output + +```json +{ + "data": { + "body": "LGTM. Approving after successful CI.", + "html_url": "https://github.com/acme/widgets/pull/42#pullrequestreview-9001", + "id": 9001, + "state": "APPROVED", + "submitted_at": "2026-01-25T12:34:56Z", + "user": { + "html_url": "https://github.com/octocat", + "id": 1, + "login": "octocat" + } + }, + "timestamp": "2026-01-25T12:34:56.000000000Z", + "type": "github.pullRequestReview" +} +``` + ## Delete Release diff --git a/pkg/integrations/github/create_review.go b/pkg/integrations/github/create_review.go new file mode 100644 index 0000000000..d09768212f --- /dev/null +++ b/pkg/integrations/github/create_review.go @@ -0,0 +1,215 @@ +package github + +import ( + "context" + "errors" + "fmt" + "strconv" + "strings" + + "github.com/google/go-github/v74/github" + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type CreateReview struct{} + +type CreateReviewConfiguration struct { + Repository string `mapstructure:"repository" json:"repository"` + PullNumber string `mapstructure:"pullNumber" json:"pullNumber"` + Event string `mapstructure:"event" json:"event"` + Body *string `mapstructure:"body,omitempty" json:"body,omitempty"` +} + +func (c *CreateReview) Name() string { + return "github.createReview" +} + +func (c *CreateReview) Label() string { + return "Create Review" +} + +func (c *CreateReview) Description() string { + return "Submit a pull request review on GitHub" +} + +func (c *CreateReview) Documentation() string { + return `The Create Review component submits a pull request review (approve, request changes, or comment) on a GitHub pull request. + +## Use Cases + +- **Automation**: Auto-approve when checks pass +- **Quality gates**: Request changes when checks fail +- **Bots**: Post review feedback + +## Configuration + +- **Repository**: Select the GitHub repository +- **Pull Number**: Pull request number +- **Event**: APPROVE, REQUEST_CHANGES, or COMMENT +- **Body**: Optional review body (required for REQUEST_CHANGES and COMMENT) + +## Output + +Emits the submitted review object including: +- id, state, submitted_at +- body and user` +} + +func (c *CreateReview) Icon() string { + return "github" +} + +func (c *CreateReview) Color() string { + return "gray" +} + +func (c *CreateReview) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateReview) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "repository", + Label: "Repository", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "repository", + UseNameAsValue: true, + }, + }, + }, + { + Name: "pullNumber", + Label: "Pull Number", + Type: configuration.FieldTypeString, + Required: true, + }, + { + Name: "event", + Label: "Event", + Type: configuration.FieldTypeSelect, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Approve", Value: "APPROVE"}, + {Label: "Request changes", Value: "REQUEST_CHANGES"}, + {Label: "Comment", Value: "COMMENT"}, + }, + }, + }, + }, + { + Name: "body", + Label: "Body", + Type: configuration.FieldTypeText, + Description: "Review body (required for REQUEST_CHANGES and COMMENT).", + RequiredConditions: []configuration.RequiredCondition{ + { + Field: "event", + Values: []string{"REQUEST_CHANGES", "COMMENT"}, + }, + }, + }, + } +} + +func (c *CreateReview) Setup(ctx core.SetupContext) error { + return ensureRepoInMetadata( + ctx.Metadata, + ctx.Integration, + ctx.Configuration, + ) +} + +func (c *CreateReview) Execute(ctx core.ExecutionContext) error { + var config CreateReviewConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if config.Repository == "" { + return errors.New("repository is required") + } + if config.PullNumber == "" { + return errors.New("pull number is required") + } + + pullNumber, err := strconv.Atoi(config.PullNumber) + if err != nil { + return fmt.Errorf("pull number is not a number: %v", err) + } + + event := strings.ToUpper(strings.TrimSpace(config.Event)) + if event != "APPROVE" && event != "REQUEST_CHANGES" && event != "COMMENT" { + return fmt.Errorf("invalid event: %s", config.Event) + } + + if (event == "REQUEST_CHANGES" || event == "COMMENT") && (config.Body == nil || strings.TrimSpace(*config.Body) == "") { + return fmt.Errorf("body is required for %s", event) + } + + var appMetadata Metadata + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &appMetadata); err != nil { + return fmt.Errorf("failed to decode integration metadata: %w", err) + } + + client, err := NewClient(ctx.Integration, appMetadata.GitHubApp.ID, appMetadata.InstallationID) + if err != nil { + return fmt.Errorf("failed to initialize GitHub client: %w", err) + } + + req := &github.PullRequestReviewRequest{ + Event: github.String(event), + } + if config.Body != nil && strings.TrimSpace(*config.Body) != "" { + req.Body = config.Body + } + + review, _, err := client.PullRequests.CreateReview( + context.Background(), + appMetadata.Owner, + config.Repository, + pullNumber, + req, + ) + if err != nil { + return fmt.Errorf("failed to create review: %w", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "github.pullRequestReview", + []any{review}, + ) +} + +func (c *CreateReview) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateReview) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *CreateReview) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateReview) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateReview) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateReview) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/github/create_review_test.go b/pkg/integrations/github/create_review_test.go new file mode 100644 index 0000000000..73e8d31055 --- /dev/null +++ b/pkg/integrations/github/create_review_test.go @@ -0,0 +1,128 @@ +package github + +import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + contexts "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__CreateReview__Execute__Validation(t *testing.T) { + component := CreateReview{} + + t.Run("body is conditionally required for request changes and comment", func(t *testing.T) { + fields := component.Configuration() + var bodyFieldFound bool + for _, f := range fields { + if f.Name != "body" { + continue + } + + bodyFieldFound = true + require.Len(t, f.RequiredConditions, 1) + require.Equal(t, "event", f.RequiredConditions[0].Field) + require.ElementsMatch(t, []string{"REQUEST_CHANGES", "COMMENT"}, f.RequiredConditions[0].Values) + } + + require.True(t, bodyFieldFound, "expected to find body field in configuration") + }) + + t.Run("pull number is required", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + NodeMetadata: &contexts.MetadataContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: map[string]any{"repository": "hello", "pullNumber": "", "event": "APPROVE"}, + }) + require.ErrorContains(t, err, "pull number is required") + }) + + t.Run("pull number must be a number", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + NodeMetadata: &contexts.MetadataContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: map[string]any{"repository": "hello", "pullNumber": "abc", "event": "APPROVE"}, + }) + require.ErrorContains(t, err, "pull number is not a number") + }) + + t.Run("event must be valid", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + NodeMetadata: &contexts.MetadataContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: map[string]any{"repository": "hello", "pullNumber": "1", "event": "NOPE"}, + }) + require.ErrorContains(t, err, "invalid event") + }) + + t.Run("body is required for request changes", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + NodeMetadata: &contexts.MetadataContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: map[string]any{"repository": "hello", "pullNumber": "1", "event": "REQUEST_CHANGES", "body": ""}, + }) + require.ErrorContains(t, err, "body is required for REQUEST_CHANGES") + }) + + t.Run("body is required for comment", func(t *testing.T) { + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{}, + NodeMetadata: &contexts.MetadataContext{}, + ExecutionState: &contexts.ExecutionStateContext{}, + Configuration: map[string]any{"repository": "hello", "pullNumber": "1", "event": "COMMENT"}, + }) + require.ErrorContains(t, err, "body is required for COMMENT") + }) +} + +func Test__CreateReview__Setup(t *testing.T) { + helloRepo := Repository{ID: 123456, Name: "hello", URL: "https://github.com/testhq/hello"} + component := CreateReview{} + + t.Run("repository is required", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + err := component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"repository": ""}, + }) + + require.ErrorContains(t, err, "repository is required") + }) + + t.Run("repository is not accessible", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Metadata: Metadata{ + Repositories: []Repository{helloRepo}, + }, + } + err := component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"repository": "world"}, + }) + + require.ErrorContains(t, err, "repository world is not accessible to app installation") + }) + + t.Run("metadata is set successfully", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Metadata: Metadata{ + Repositories: []Repository{helloRepo}, + }, + } + + nodeMetadataCtx := contexts.MetadataContext{} + require.NoError(t, component.Setup(core.SetupContext{ + Integration: integrationCtx, + Metadata: &nodeMetadataCtx, + Configuration: map[string]any{"repository": "hello"}, + })) + + require.Equal(t, nodeMetadataCtx.Get(), NodeMetadata{Repository: &helloRepo}) + }) +} diff --git a/pkg/integrations/github/example.go b/pkg/integrations/github/example.go index 7d86c258cb..6a28626105 100644 --- a/pkg/integrations/github/example.go +++ b/pkg/integrations/github/example.go @@ -37,6 +37,9 @@ var exampleOutputDeleteReleaseBytes []byte //go:embed example_output_run_workflow.json var exampleOutputRunWorkflowBytes []byte +//go:embed example_output_create_review.json +var exampleOutputCreateReviewBytes []byte + //go:embed example_data_on_issue_comment.json var exampleDataOnIssueCommentBytes []byte @@ -94,6 +97,9 @@ var exampleOutputDeleteRelease map[string]any var exampleOutputRunWorkflowOnce sync.Once var exampleOutputRunWorkflow map[string]any +var exampleOutputCreateReviewOnce sync.Once +var exampleOutputCreateReview map[string]any + var exampleDataOnIssueCommentOnce sync.Once var exampleDataOnIssueComment map[string]any @@ -165,6 +171,14 @@ func (c *RunWorkflow) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputRunWorkflowOnce, exampleOutputRunWorkflowBytes, &exampleOutputRunWorkflow) } +func (c *CreateReview) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleOutputCreateReviewOnce, + exampleOutputCreateReviewBytes, + &exampleOutputCreateReview, + ) +} + func (t *OnIssueComment) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIssueCommentOnce, exampleDataOnIssueCommentBytes, &exampleDataOnIssueComment) } diff --git a/pkg/integrations/github/example_output_create_review.json b/pkg/integrations/github/example_output_create_review.json new file mode 100644 index 0000000000..4573658893 --- /dev/null +++ b/pkg/integrations/github/example_output_create_review.json @@ -0,0 +1,17 @@ +{ + "data": { + "id": 9001, + "body": "LGTM. Approving after successful CI.", + "state": "APPROVED", + "submitted_at": "2026-01-25T12:34:56Z", + "html_url": "https://github.com/acme/widgets/pull/42#pullrequestreview-9001", + "user": { + "login": "octocat", + "id": 1, + "html_url": "https://github.com/octocat" + } + }, + "timestamp": "2026-01-25T12:34:56.000000000Z", + "type": "github.pullRequestReview" +} + diff --git a/pkg/integrations/github/github.go b/pkg/integrations/github/github.go index 0f8e6ff1a7..a0d890104c 100644 --- a/pkg/integrations/github/github.go +++ b/pkg/integrations/github/github.go @@ -97,6 +97,7 @@ func (g *GitHub) Components() []core.Component { &CreateIssue{}, &CreateIssueComment{}, &UpdateIssue{}, + &CreateReview{}, &RunWorkflow{}, &PublishCommitStatus{}, &CreateRelease{}, diff --git a/web_src/src/pages/workflowv2/mappers/github/create_review.ts b/web_src/src/pages/workflowv2/mappers/github/create_review.ts new file mode 100644 index 0000000000..c28f1600fb --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/github/create_review.ts @@ -0,0 +1,47 @@ +import { ComponentBaseProps } from "@/ui/componentBase"; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, +} from "../types"; +import { baseProps } from "./base"; +import { buildGithubExecutionSubtitle } from "./utils"; + +interface PullRequestReviewOutput { + id?: number; + state?: string; + body?: string; + html_url?: string; + submitted_at?: string; + user?: { + login?: string; + html_url?: string; + }; +} + +export const createReviewMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + return baseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + }, + + subtitle(context: SubtitleContext): string { + return buildGithubExecutionSubtitle(context.execution); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const details: Record = {}; + + if (!outputs || !outputs.default || outputs.default.length === 0) { + return details; + } + + const review = outputs.default[0].data as PullRequestReviewOutput; + details["Submitted At"] = review?.submitted_at ? new Date(review.submitted_at).toLocaleString() : "-"; + details["Review URL"] = review?.html_url || ""; + + return details; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/github/index.ts b/web_src/src/pages/workflowv2/mappers/github/index.ts index 1a4136b528..6ddba1c879 100644 --- a/web_src/src/pages/workflowv2/mappers/github/index.ts +++ b/web_src/src/pages/workflowv2/mappers/github/index.ts @@ -16,6 +16,7 @@ import { createReleaseMapper } from "./create_release"; import { updateReleaseMapper } from "./update_release"; import { deleteReleaseMapper } from "./delete_release"; import { getReleaseMapper } from "./get_release"; +import { createReviewMapper } from "./create_review"; import { buildActionStateRegistry } from "../utils"; export const eventStateRegistry: Record = { @@ -24,6 +25,7 @@ export const eventStateRegistry: Record = { createIssueComment: buildActionStateRegistry("created"), getIssue: buildActionStateRegistry("retrieved"), updateIssue: buildActionStateRegistry("updated"), + createReview: buildActionStateRegistry("created"), publishCommitStatus: buildActionStateRegistry("published"), createRelease: buildActionStateRegistry("created"), updateRelease: buildActionStateRegistry("updated"), @@ -36,6 +38,7 @@ export const componentMappers: Record = { createIssueComment: createIssueCommentMapper, getIssue: baseIssueMapper, updateIssue: baseIssueMapper, + createReview: createReviewMapper, runWorkflow: runWorkflowMapper, publishCommitStatus: publishCommitStatusMapper, createRelease: createReleaseMapper, From 25ff7ecd54a511c5c794dc3d7c85bf8de0339c7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Tue, 10 Feb 2026 18:17:43 +0100 Subject: [PATCH 044/160] feat: PagerDuty list log entries (#3012) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Emil Todorovski Signed-off-by: Igor Šarčević Co-authored-by: Emil Todorovski Signed-off-by: Muhammad Fuzail Zubari --- docs/components/PagerDuty.mdx | 88 ++++++ pkg/integrations/pagerduty/client.go | 35 +++ pkg/integrations/pagerduty/example.go | 10 + .../example_output_list_log_entries.json | 54 ++++ .../pagerduty/list_log_entries.go | 164 +++++++++++ .../pagerduty/list_log_entries_test.go | 272 ++++++++++++++++++ pkg/integrations/pagerduty/pagerduty.go | 1 + .../workflowv2/mappers/pagerduty/index.ts | 3 + .../mappers/pagerduty/list_log_entries.ts | 132 +++++++++ .../mappers/pagerduty/list_notes.ts | 31 +- .../workflowv2/mappers/pagerduty/types.ts | 23 ++ 11 files changed, 798 insertions(+), 15 deletions(-) create mode 100644 pkg/integrations/pagerduty/example_output_list_log_entries.json create mode 100644 pkg/integrations/pagerduty/list_log_entries.go create mode 100644 pkg/integrations/pagerduty/list_log_entries_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/pagerduty/list_log_entries.ts diff --git a/docs/components/PagerDuty.mdx b/docs/components/PagerDuty.mdx index 7e352f496b..516b002614 100644 --- a/docs/components/PagerDuty.mdx +++ b/docs/components/PagerDuty.mdx @@ -20,6 +20,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -654,6 +655,93 @@ Returns a list of open incidents with: } ``` + + +## List Log Entries + +The List Log Entries component retrieves all log entries (audit trail) for a PagerDuty incident. + +### Use Cases + +- **Audit trail**: Access complete incident history for compliance or review +- **Timeline reconstruction**: Build a detailed timeline of all incident activity +- **Incident analysis**: Analyze escalation patterns and response times +- **Forensics**: Review all actions taken during an incident + +### Configuration + +- **Incident ID**: The ID of the incident to list log entries for (e.g., A12BC34567...) +- **Limit**: Maximum number of log entries to return (default: 100) + +### Output + +Returns a list of log entries with: +- **id**: Log entry ID +- **type**: The type of log entry (e.g., trigger_log_entry, acknowledge_log_entry, annotate_log_entry) +- **summary**: A summary of what happened +- **created_at**: When the log entry was created +- **agent**: The agent (user or service) that caused the log entry +- **channel**: The channel through which the action was performed + +### Example Output + +```json +{ + "data": { + "log_entries": [ + { + "agent": { + "html_url": "https://acme.pagerduty.com/services/PLH1HKV", + "id": "PLH1HKV", + "summary": "API Service", + "type": "service_reference" + }, + "channel": { + "type": "api" + }, + "created_at": "2024-01-15T10:00:00Z", + "id": "Q02JTSNZWHSEKV", + "summary": "Triggered through the API", + "type": "trigger_log_entry" + }, + { + "agent": { + "html_url": "https://acme.pagerduty.com/users/PUSER01", + "id": "PUSER01", + "summary": "John Smith", + "type": "user_reference" + }, + "channel": { + "type": "web_ui" + }, + "created_at": "2024-01-15T10:15:00Z", + "id": "Q02JTSNZWHSEKW", + "summary": "Acknowledged by John Smith", + "type": "acknowledge_log_entry" + }, + { + "agent": { + "html_url": "https://acme.pagerduty.com/users/PUSER01", + "id": "PUSER01", + "summary": "John Smith", + "type": "user_reference" + }, + "channel": { + "type": "web_ui" + }, + "created_at": "2024-01-15T10:30:00Z", + "id": "Q02JTSNZWHSEKX", + "summary": "John Smith added a note", + "type": "annotate_log_entry" + } + ], + "total": 3 + }, + "timestamp": "2024-01-15T11:00:00Z", + "type": "pagerduty.log_entries.list" +} +``` + ## List Notes diff --git a/pkg/integrations/pagerduty/client.go b/pkg/integrations/pagerduty/client.go index e6f68c5a14..abcefc9e49 100644 --- a/pkg/integrations/pagerduty/client.go +++ b/pkg/integrations/pagerduty/client.go @@ -742,3 +742,38 @@ func (c *Client) ListIncidentNotes(incidentID string) ([]Note, error) { return response.Notes, nil } + +// LogEntry represents a log entry for a PagerDuty incident +type LogEntry struct { + ID string `json:"id"` + Type string `json:"type"` + Summary string `json:"summary"` + CreatedAt string `json:"created_at"` + Agent *ServiceRef `json:"agent"` + Channel *LogChannel `json:"channel"` +} + +// LogChannel represents the channel through which a log entry was created +type LogChannel struct { + Type string `json:"type"` +} + +// ListIncidentLogEntries retrieves log entries for a given incident +func (c *Client) ListIncidentLogEntries(incidentID string, limit int) ([]LogEntry, error) { + apiURL := fmt.Sprintf("%s/incidents/%s/log_entries?limit=%d", c.BaseURL, incidentID, limit) + responseBody, err := c.execRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + var response struct { + LogEntries []LogEntry `json:"log_entries"` + } + + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + return response.LogEntries, nil +} diff --git a/pkg/integrations/pagerduty/example.go b/pkg/integrations/pagerduty/example.go index 40c732f0b2..632ef83020 100644 --- a/pkg/integrations/pagerduty/example.go +++ b/pkg/integrations/pagerduty/example.go @@ -61,6 +61,12 @@ var exampleOutputListNotesBytes []byte var exampleOutputListNotesOnce sync.Once var exampleOutputListNotes map[string]any +//go:embed example_output_list_log_entries.json +var exampleOutputListLogEntriesBytes []byte + +var exampleOutputListLogEntriesOnce sync.Once +var exampleOutputListLogEntries map[string]any + func (c *CreateIncident) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIncidentOnce, exampleOutputCreateIncidentBytes, &exampleOutputCreateIncident) } @@ -85,6 +91,10 @@ func (l *ListNotes) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputListNotesOnce, exampleOutputListNotesBytes, &exampleOutputListNotes) } +func (l *ListLogEntries) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputListLogEntriesOnce, exampleOutputListLogEntriesBytes, &exampleOutputListLogEntries) +} + func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/pagerduty/example_output_list_log_entries.json b/pkg/integrations/pagerduty/example_output_list_log_entries.json new file mode 100644 index 0000000000..93542539b9 --- /dev/null +++ b/pkg/integrations/pagerduty/example_output_list_log_entries.json @@ -0,0 +1,54 @@ +{ + "type": "pagerduty.log_entries.list", + "data": { + "log_entries": [ + { + "id": "Q02JTSNZWHSEKV", + "type": "trigger_log_entry", + "summary": "Triggered through the API", + "created_at": "2024-01-15T10:00:00Z", + "agent": { + "id": "PLH1HKV", + "type": "service_reference", + "summary": "API Service", + "html_url": "https://acme.pagerduty.com/services/PLH1HKV" + }, + "channel": { + "type": "api" + } + }, + { + "id": "Q02JTSNZWHSEKW", + "type": "acknowledge_log_entry", + "summary": "Acknowledged by John Smith", + "created_at": "2024-01-15T10:15:00Z", + "agent": { + "id": "PUSER01", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PUSER01" + }, + "channel": { + "type": "web_ui" + } + }, + { + "id": "Q02JTSNZWHSEKX", + "type": "annotate_log_entry", + "summary": "John Smith added a note", + "created_at": "2024-01-15T10:30:00Z", + "agent": { + "id": "PUSER01", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PUSER01" + }, + "channel": { + "type": "web_ui" + } + } + ], + "total": 3 + }, + "timestamp": "2024-01-15T11:00:00Z" +} diff --git a/pkg/integrations/pagerduty/list_log_entries.go b/pkg/integrations/pagerduty/list_log_entries.go new file mode 100644 index 0000000000..098b471116 --- /dev/null +++ b/pkg/integrations/pagerduty/list_log_entries.go @@ -0,0 +1,164 @@ +package pagerduty + +import ( + "errors" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type ListLogEntries struct{} + +type ListLogEntriesSpec struct { + IncidentID string `json:"incidentId"` + Limit int `json:"limit"` +} + +const defaultLogEntriesLimit = 100 + +func (l *ListLogEntries) Name() string { + return "pagerduty.listLogEntries" +} + +func (l *ListLogEntries) Label() string { + return "List Log Entries" +} + +func (l *ListLogEntries) Description() string { + return "List all log entries (audit trail) for a PagerDuty incident" +} + +func (l *ListLogEntries) Documentation() string { + return `The List Log Entries component retrieves all log entries (audit trail) for a PagerDuty incident. + +## Use Cases + +- **Audit trail**: Access complete incident history for compliance or review +- **Timeline reconstruction**: Build a detailed timeline of all incident activity +- **Incident analysis**: Analyze escalation patterns and response times +- **Forensics**: Review all actions taken during an incident + +## Configuration + +- **Incident ID**: The ID of the incident to list log entries for (e.g., A12BC34567...) +- **Limit**: Maximum number of log entries to return (default: 100) + +## Output + +Returns a list of log entries with: +- **id**: Log entry ID +- **type**: The type of log entry (e.g., trigger_log_entry, acknowledge_log_entry, annotate_log_entry) +- **summary**: A summary of what happened +- **created_at**: When the log entry was created +- **agent**: The agent (user or service) that caused the log entry +- **channel**: The channel through which the action was performed` +} + +func (l *ListLogEntries) Icon() string { + return "list" +} + +func (l *ListLogEntries) Color() string { + return "gray" +} + +func (l *ListLogEntries) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (l *ListLogEntries) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "incidentId", + Label: "Incident ID", + Type: configuration.FieldTypeString, + Required: true, + Description: "The ID of the incident to list log entries for (e.g., A12BC34567...)", + Placeholder: "e.g., A12BC34567...", + }, + { + Name: "limit", + Label: "Limit", + Type: configuration.FieldTypeNumber, + Required: false, + Description: "Maximum number of log entries to return (default: 100)", + Placeholder: "100", + }, + } +} + +func (l *ListLogEntries) Setup(ctx core.SetupContext) error { + spec := ListLogEntriesSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + if spec.IncidentID == "" { + return errors.New("incidentId is required") + } + + return ctx.Metadata.Set(NodeMetadata{}) +} + +func (l *ListLogEntries) Execute(ctx core.ExecutionContext) error { + spec := ListLogEntriesSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + limit := spec.Limit + if limit <= 0 { + limit = defaultLogEntriesLimit + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("error creating client: %v", err) + } + + logEntries, err := client.ListIncidentLogEntries(spec.IncidentID, limit) + if err != nil { + return fmt.Errorf("failed to list log entries: %v", err) + } + + responseData := map[string]any{ + "log_entries": logEntries, + "total": len(logEntries), + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "pagerduty.log_entries.list", + []any{responseData}, + ) +} + +func (l *ListLogEntries) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (l *ListLogEntries) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (l *ListLogEntries) Actions() []core.Action { + return []core.Action{} +} + +func (l *ListLogEntries) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (l *ListLogEntries) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (l *ListLogEntries) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/pagerduty/list_log_entries_test.go b/pkg/integrations/pagerduty/list_log_entries_test.go new file mode 100644 index 0000000000..152120aa48 --- /dev/null +++ b/pkg/integrations/pagerduty/list_log_entries_test.go @@ -0,0 +1,272 @@ +package pagerduty + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__ListLogEntries__Setup(t *testing.T) { + component := &ListLogEntries{} + + t.Run("valid configuration", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{} + + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + }, + Metadata: metadataCtx, + }) + + require.NoError(t, err) + }) + + t.Run("valid configuration with limit", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{} + + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + "limit": 50, + }, + Metadata: metadataCtx, + }) + + require.NoError(t, err) + }) + + t.Run("missing incidentId returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{}, + Metadata: &contexts.MetadataContext{}, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) + + t.Run("empty incidentId returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "", + }, + Metadata: &contexts.MetadataContext{}, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) +} + +func Test__ListLogEntries__Execute(t *testing.T) { + component := &ListLogEntries{} + + t.Run("successfully lists log entries", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + { + "log_entries": [ + { + "id": "Q1234567890", + "type": "trigger_log_entry", + "summary": "Triggered through the API", + "created_at": "2024-01-15T10:00:00Z", + "agent": { + "id": "PLH1HKV", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PLH1HKV" + }, + "channel": { + "type": "api" + } + }, + { + "id": "Q1234567891", + "type": "acknowledge_log_entry", + "summary": "Acknowledged by John Smith", + "created_at": "2024-01-15T10:15:00Z", + "agent": { + "id": "PLH1HKV", + "type": "user_reference", + "summary": "John Smith", + "html_url": "https://acme.pagerduty.com/users/PLH1HKV" + }, + "channel": { + "type": "web_ui" + } + } + ] + } + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.NoError(t, err) + assert.True(t, execCtx.Finished) + assert.True(t, execCtx.Passed) + assert.Equal(t, "pagerduty.log_entries.list", execCtx.Type) + assert.Equal(t, core.DefaultOutputChannel.Name, execCtx.Channel) + + // Verify the request was made correctly + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, http.MethodGet, httpContext.Requests[0].Method) + assert.Contains(t, httpContext.Requests[0].URL.String(), "/incidents/PT4KHLK/log_entries") + assert.Contains(t, httpContext.Requests[0].URL.String(), "limit=100") + + // Verify response contains expected data + require.Len(t, execCtx.Payloads, 1) + wrappedPayload, ok := execCtx.Payloads[0].(map[string]any) + require.True(t, ok) + responseData, ok := wrappedPayload["data"].(map[string]any) + require.True(t, ok) + assert.Equal(t, 2, responseData["total"]) + }) + + t.Run("successfully lists log entries with custom limit", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + { + "log_entries": [ + { + "id": "Q1234567890", + "type": "trigger_log_entry", + "summary": "Triggered through the API", + "created_at": "2024-01-15T10:00:00Z" + } + ] + } + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + "limit": 50, + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.NoError(t, err) + + // Verify the request used the custom limit + require.Len(t, httpContext.Requests, 1) + assert.Contains(t, httpContext.Requests[0].URL.String(), "limit=50") + }) + + t.Run("successfully lists empty log entries", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + { + "log_entries": [] + } + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "PT4KHLK", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.NoError(t, err) + assert.True(t, execCtx.Finished) + assert.True(t, execCtx.Passed) + + // Verify response contains expected data + require.Len(t, execCtx.Payloads, 1) + wrappedPayload, ok := execCtx.Payloads[0].(map[string]any) + require.True(t, ok) + responseData, ok := wrappedPayload["data"].(map[string]any) + require.True(t, ok) + assert.Equal(t, 0, responseData["total"]) + }) + + t.Run("API error returns error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusNotFound, + Body: io.NopCloser(strings.NewReader(`{"error": "Incident not found"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeAPIToken, + "apiToken": "test-token", + }, + } + + execCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "INVALID", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execCtx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to list log entries") + }) +} diff --git a/pkg/integrations/pagerduty/pagerduty.go b/pkg/integrations/pagerduty/pagerduty.go index 11861f6844..6e9c08a048 100644 --- a/pkg/integrations/pagerduty/pagerduty.go +++ b/pkg/integrations/pagerduty/pagerduty.go @@ -139,6 +139,7 @@ func (p *PagerDuty) Components() []core.Component { &AnnotateIncident{}, &ListIncidents{}, &ListNotes{}, + &ListLogEntries{}, &SnoozeIncident{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts index 28ea68f434..8cacb7f46f 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts @@ -7,6 +7,7 @@ import { updateIncidentMapper } from "./update_incident"; import { annotateIncidentMapper } from "./annotate_incident"; import { listIncidentsMapper, LIST_INCIDENTS_STATE_REGISTRY } from "./list_incidents"; import { listNotesMapper } from "./list_notes"; +import { listLogEntriesMapper } from "./list_log_entries"; import { snoozeIncidentMapper } from "./snooze_incident"; import { buildActionStateRegistry } from "../utils"; @@ -16,6 +17,7 @@ export const componentMappers: Record = { annotateIncident: annotateIncidentMapper, listIncidents: listIncidentsMapper, listNotes: listNotesMapper, + listLogEntries: listLogEntriesMapper, snoozeIncident: snoozeIncidentMapper, }; @@ -31,5 +33,6 @@ export const eventStateRegistry: Record = { annotateIncident: buildActionStateRegistry("annotated"), listIncidents: LIST_INCIDENTS_STATE_REGISTRY, listNotes: buildActionStateRegistry("listed"), + listLogEntries: buildActionStateRegistry("listed"), snoozeIncident: buildActionStateRegistry("snoozed"), }; diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_log_entries.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_log_entries.ts new file mode 100644 index 0000000000..2759983c57 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_log_entries.ts @@ -0,0 +1,132 @@ +import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { MetadataItem } from "@/ui/metadataList"; +import { getBackgroundColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import { ListLogEntriesResponse, LogEntry } from "./types"; + +/** + * Extracts the first payload from execution outputs. + */ +function getFirstPayload(execution: ExecutionInfo): OutputPayload | null { + const outputs = execution.outputs as { default?: OutputPayload[] } | undefined; + if (!outputs) return null; + + if (outputs.default && outputs.default.length > 0) { + return outputs.default[0]; + } + + return null; +} + +/** + * Extracts log entries from the execution payload. + */ +function getLogEntries(execution: ExecutionInfo): LogEntry[] { + const payload = getFirstPayload(execution); + if (!payload || !payload.data) return []; + + const responseData = payload.data as ListLogEntriesResponse | undefined; + if (!responseData || !responseData.log_entries) return []; + + return responseData.log_entries; +} + +export const listLogEntriesMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = + context.lastExecutions && context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition?.name || "unknown"; + + return { + iconSrc: pdIcon, + collapsedBackground: getBackgroundColorClass(context.componentDefinition?.color), + collapsed: context.node?.isCollapsed ?? false, + title: + context.node?.name || + context.componentDefinition?.label || + context.componentDefinition?.name || + "Unnamed component", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + metadata: metadataList(context.node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + subtitle(context: SubtitleContext): string { + const timeAgo = formatTimeAgo(new Date(context.execution.createdAt!)); + const logEntries = getLogEntries(context.execution); + + if (logEntries.length > 0) { + return `${logEntries.length} log entr${logEntries.length === 1 ? "y" : "ies"} · ${timeAgo}`; + } + + return `no log entries · ${timeAgo}`; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + + // Add "Checked at" timestamp + if (context.execution.createdAt) { + details["Checked at"] = new Date(context.execution.createdAt).toLocaleString(); + } + + const logEntries = getLogEntries(context.execution); + details["Log Entries"] = `${logEntries.length} log entr${logEntries.length === 1 ? "y" : "ies"} fetched`; + + return details; + }, +}; + +function metadataList(node: { configuration?: unknown }): MetadataItem[] { + const metadata: MetadataItem[] = []; + if (!node) return metadata; + const configuration = node.configuration as any; + + if (configuration.incidentId) { + metadata.push({ icon: "alert-triangle", label: `Incident: ${configuration.incidentId}` }); + } + + if (configuration.limit) { + metadata.push({ icon: "hash", label: `Limit: ${configuration.limit}` }); + } + + return metadata; +} + +function baseEventSections(nodes: { id: string }[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer((rootTriggerNode as any)?.trigger?.name || ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); + + const logEntries = getLogEntries(execution); + const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); + + let eventSubtitle: string; + if (logEntries.length > 0) { + eventSubtitle = `${logEntries.length} log entr${logEntries.length === 1 ? "y" : "ies"} · ${timeAgo}`; + } else { + eventSubtitle = `no log entries · ${timeAgo}`; + } + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts index 76d0fd602f..7a361d9ec2 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts @@ -1,18 +1,17 @@ +import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { MetadataItem } from "@/ui/metadataList"; import { getBackgroundColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; import { getState, getStateMap, getTriggerRenderer } from ".."; import { ComponentBaseContext, ComponentBaseMapper, ExecutionDetailsContext, ExecutionInfo, - NodeInfo, OutputPayload, SubtitleContext, } from "../types"; -import { MetadataItem } from "@/ui/metadataList"; -import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; -import { formatTimeAgo } from "@/utils/date"; import { ListNotesResponse, Note } from "./types"; /** @@ -44,17 +43,18 @@ function getNotes(execution: ExecutionInfo): Note[] { export const listNotesMapper: ComponentBaseMapper = { props(context: ComponentBaseContext): ComponentBaseProps { - const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; - const componentName = context.componentDefinition.name ?? "pagerduty"; + const lastExecution = + context.lastExecutions && context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition?.name || "unknown"; return { iconSrc: pdIcon, - collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), - collapsed: context.node.isCollapsed, + collapsedBackground: getBackgroundColorClass(context.componentDefinition?.color), + collapsed: context.node?.isCollapsed ?? false, title: - context.node.name || - context.componentDefinition.label || - context.componentDefinition.name || + context.node?.name || + context.componentDefinition?.label || + context.componentDefinition?.name || "Unnamed component", eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, metadata: metadataList(context.node), @@ -89,8 +89,9 @@ export const listNotesMapper: ComponentBaseMapper = { }, }; -function metadataList(node: NodeInfo): MetadataItem[] { +function metadataList(node: { configuration?: unknown }): MetadataItem[] { const metadata: MetadataItem[] = []; + if (!node) return metadata; const configuration = node.configuration as any; if (configuration.incidentId) { @@ -100,9 +101,9 @@ function metadataList(node: NodeInfo): MetadataItem[] { return metadata; } -function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { +function baseEventSections(nodes: { id: string }[], execution: ExecutionInfo, componentName: string): EventSection[] { const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName ?? ""); + const rootTriggerRenderer = getTriggerRenderer((rootTriggerNode as any)?.trigger?.name || ""); const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); const notes = getNotes(execution); @@ -121,7 +122,7 @@ function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componen eventTitle: title, eventSubtitle, eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id!, + eventId: execution.rootEvent!.id, }, ]; } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts index e0ff6c05db..7945b0ddb9 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts @@ -67,3 +67,26 @@ export interface ListNotesResponse { notes: Note[]; total: number; } + +export interface LogEntry { + id?: string; + type?: string; + summary?: string; + created_at?: string; + agent?: ResourceRef; + channel?: LogChannel; +} + +export interface LogChannel { + type?: string; +} + +export interface ListLogEntriesConfiguration { + incidentId?: string; + limit?: number; +} + +export interface ListLogEntriesResponse { + log_entries: LogEntry[]; + total: number; +} From 15422b5ec9dc4005602b8c286257e54350b10a2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Tue, 10 Feb 2026 15:33:42 -0300 Subject: [PATCH 045/160] fix: allow integrations of the same type + allow editing integration name (#3019) closes: https://github.com/superplanehq/superplane/issues/3008 closes: https://github.com/superplanehq/superplane/issues/3009 [feat: make it possible to update integration name](https://github.com/superplanehq/superplane/pull/3019/commits/015a0f3b3653f23719d795d0ba685043cc3d4947) [fix: make it possible to add integration of the same type](https://github.com/superplanehq/superplane/pull/3019/commits/4fd70fe4cf5ba16b12ec46424597a384e89b8dec) image image image --------- Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- api/swagger/superplane.swagger.json | 3 + .../organizations/update_integration.go | 31 ++++- .../organizations/update_integration_test.go | 69 +++++++++- pkg/grpc/organization_service.go | 8 +- ...l_organizations_update_integration_body.go | 36 +++++ pkg/protos/organizations/organizations.pb.go | 13 +- protos/organizations.proto | 1 + web_src/src/api-client/types.gen.ts | 1 + web_src/src/hooks/useIntegrations.ts | 5 +- .../settings/IntegrationDetails.tsx | 49 ++++++- .../organization/settings/Integrations.tsx | 52 +++++-- web_src/src/ui/componentSidebar/index.tsx | 129 +++++++++++------- 12 files changed, 319 insertions(+), 78 deletions(-) diff --git a/api/swagger/superplane.swagger.json b/api/swagger/superplane.swagger.json index 5599bd9dd0..cc3be53aed 100644 --- a/api/swagger/superplane.swagger.json +++ b/api/swagger/superplane.swagger.json @@ -4783,6 +4783,9 @@ "properties": { "configuration": { "type": "object" + }, + "name": { + "type": "string" } } }, diff --git a/pkg/grpc/actions/organizations/update_integration.go b/pkg/grpc/actions/organizations/update_integration.go index b8437c0bb5..1b35faded1 100644 --- a/pkg/grpc/actions/organizations/update_integration.go +++ b/pkg/grpc/actions/organizations/update_integration.go @@ -2,6 +2,7 @@ package organizations import ( "context" + "errors" "fmt" "maps" @@ -18,9 +19,20 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "gorm.io/datatypes" + "gorm.io/gorm" ) -func UpdateIntegration(ctx context.Context, registry *registry.Registry, oidcProvider oidc.Provider, baseURL string, webhooksBaseURL string, orgID string, integrationID string, configuration map[string]any) (*pb.UpdateIntegrationResponse, error) { +func UpdateIntegration( + ctx context.Context, + registry *registry.Registry, + oidcProvider oidc.Provider, + baseURL string, + webhooksBaseURL string, + orgID string, + integrationID string, + configuration map[string]any, + name string, +) (*pb.UpdateIntegrationResponse, error) { org, err := uuid.Parse(orgID) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "invalid organization ID: %v", err) @@ -36,11 +48,28 @@ func UpdateIntegration(ctx context.Context, registry *registry.Registry, oidcPro return nil, status.Errorf(codes.NotFound, "integration not found: %v", err) } + if name != "" && name != instance.InstallationName { + existing, err := models.FindIntegrationByName(org, name) + if err == nil && existing.ID != instance.ID { + return nil, status.Errorf(codes.AlreadyExists, "an integration with the name %s already exists in this organization", name) + } + + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return nil, status.Error(codes.Internal, "failed to verify integration name uniqueness") + } + + instance.InstallationName = name + } + integration, err := registry.GetIntegration(instance.AppName) if err != nil { return nil, status.Errorf(codes.Internal, "integration %s not found", instance.AppName) } + if configuration == nil { + configuration = map[string]any{} + } + existingConfig := instance.Configuration.Data() configuration, err = encryptConfigurationIfNeeded(ctx, registry, integration, configuration, instance.ID, existingConfig) if err != nil { diff --git a/pkg/grpc/actions/organizations/update_integration_test.go b/pkg/grpc/actions/organizations/update_integration_test.go index 7461d97d33..1e410794d3 100644 --- a/pkg/grpc/actions/organizations/update_integration_test.go +++ b/pkg/grpc/actions/organizations/update_integration_test.go @@ -48,7 +48,7 @@ func Test__UpdateIntegration(t *testing.T) { // // Update the integration configuration // - updateResponse, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, map[string]any{"key": "value2", "new_key": "new_value"}) + updateResponse, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, map[string]any{"key": "value2", "new_key": "new_value"}, "") require.NoError(t, err) require.NotNil(t, updateResponse) require.NotNil(t, updateResponse.Integration) @@ -105,7 +105,7 @@ func Test__UpdateIntegration(t *testing.T) { // // Update the integration configuration (this should fail) // - updateResponse, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, map[string]any{"key": "value2"}) + updateResponse, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, map[string]any{"key": "value2"}, "") require.NoError(t, err) require.NotNil(t, updateResponse) @@ -128,7 +128,7 @@ func Test__UpdateIntegration(t *testing.T) { // // Try to update with an invalid integration ID // - _, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), "invalid-uuid", map[string]any{"key": "value"}) + _, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), "invalid-uuid", map[string]any{"key": "value"}, "") require.Error(t, err) s, ok := status.FromError(err) assert.True(t, ok) @@ -140,7 +140,7 @@ func Test__UpdateIntegration(t *testing.T) { // // Try to update a non-existent integration // - _, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), uuid.NewString(), map[string]any{"key": "value"}) + _, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), uuid.NewString(), map[string]any{"key": "value"}, "") require.Error(t, err) s, ok := status.FromError(err) assert.True(t, ok) @@ -173,7 +173,7 @@ func Test__UpdateIntegration(t *testing.T) { // // Update only one key // - _, err = UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, map[string]any{"key2": "updated_value2"}) + _, err = UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, map[string]any{"key2": "updated_value2"}, "") require.NoError(t, err) // @@ -186,4 +186,63 @@ func Test__UpdateIntegration(t *testing.T) { assert.Equal(t, "updated_value2", config["key2"], "key2 should be updated") assert.Equal(t, "value3", config["key3"], "key3 should be preserved") }) + + t.Run("update integration name -> integration updated", func(t *testing.T) { + r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{ + OnSync: func(ctx core.SyncContext) error { + ctx.Integration.Ready() + return nil + }, + }) + + integrationName := support.RandomName("integration") + appConfig, err := structpb.NewStruct(map[string]any{"key": "value1"}) + require.NoError(t, err) + + createResponse, err := CreateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), "dummy", integrationName, appConfig) + require.NoError(t, err) + integrationID := createResponse.Integration.Metadata.Id + + updatedName := support.RandomName("integration") + updateResponse, err := UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), integrationID, nil, updatedName) + require.NoError(t, err) + require.NotNil(t, updateResponse) + require.NotNil(t, updateResponse.Integration) + + _, err = models.FindIntegrationByName(r.Organization.ID, integrationName) + require.Error(t, err) + + integration, err := models.FindIntegrationByName(r.Organization.ID, updatedName) + require.NoError(t, err) + assert.Equal(t, updatedName, integration.InstallationName) + assert.Equal(t, updatedName, updateResponse.Integration.Metadata.Name) + }) + + t.Run("update integration name to existing name -> already exists", func(t *testing.T) { + r.Registry.Integrations["dummy"] = support.NewDummyIntegration(support.DummyIntegrationOptions{ + OnSync: func(ctx core.SyncContext) error { + ctx.Integration.Ready() + return nil + }, + }) + + firstName := support.RandomName("integration") + secondName := support.RandomName("integration") + appConfig, err := structpb.NewStruct(map[string]any{"key": "value1"}) + require.NoError(t, err) + + _, err = CreateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), "dummy", firstName, appConfig) + require.NoError(t, err) + + secondIntegration, err := CreateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), "dummy", secondName, appConfig) + require.NoError(t, err) + + _, err = UpdateIntegration(ctx, r.Registry, nil, baseURL, baseURL, r.Organization.ID.String(), secondIntegration.Integration.Metadata.Id, nil, firstName) + require.Error(t, err) + + s, ok := status.FromError(err) + require.True(t, ok) + assert.Equal(t, codes.AlreadyExists, s.Code()) + assert.Contains(t, s.Message(), "already exists") + }) } diff --git a/pkg/grpc/organization_service.go b/pkg/grpc/organization_service.go index 24793f9cfa..679e423ab8 100644 --- a/pkg/grpc/organization_service.go +++ b/pkg/grpc/organization_service.go @@ -129,6 +129,11 @@ func (s *OrganizationService) CreateIntegration(ctx context.Context, req *pb.Cre func (s *OrganizationService) UpdateIntegration(ctx context.Context, req *pb.UpdateIntegrationRequest) (*pb.UpdateIntegrationResponse, error) { orgID := ctx.Value(authorization.DomainIdContextKey).(string) + configuration := map[string]any{} + if req.Configuration != nil { + configuration = req.Configuration.AsMap() + } + return organizations.UpdateIntegration( ctx, s.registry, @@ -137,7 +142,8 @@ func (s *OrganizationService) UpdateIntegration(ctx context.Context, req *pb.Upd s.webhooksBaseURL, orgID, req.IntegrationId, - req.Configuration.AsMap(), + configuration, + req.Name, ) } diff --git a/pkg/openapi_client/model_organizations_update_integration_body.go b/pkg/openapi_client/model_organizations_update_integration_body.go index 0cf4dfcde5..bf62162c8e 100644 --- a/pkg/openapi_client/model_organizations_update_integration_body.go +++ b/pkg/openapi_client/model_organizations_update_integration_body.go @@ -21,6 +21,7 @@ var _ MappedNullable = &OrganizationsUpdateIntegrationBody{} // OrganizationsUpdateIntegrationBody struct for OrganizationsUpdateIntegrationBody type OrganizationsUpdateIntegrationBody struct { Configuration map[string]interface{} `json:"configuration,omitempty"` + Name *string `json:"name,omitempty"` } // NewOrganizationsUpdateIntegrationBody instantiates a new OrganizationsUpdateIntegrationBody object @@ -72,6 +73,38 @@ func (o *OrganizationsUpdateIntegrationBody) SetConfiguration(v map[string]inter o.Configuration = v } +// GetName returns the Name field value if set, zero value otherwise. +func (o *OrganizationsUpdateIntegrationBody) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OrganizationsUpdateIntegrationBody) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *OrganizationsUpdateIntegrationBody) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *OrganizationsUpdateIntegrationBody) SetName(v string) { + o.Name = &v +} + func (o OrganizationsUpdateIntegrationBody) MarshalJSON() ([]byte, error) { toSerialize, err := o.ToMap() if err != nil { @@ -85,6 +118,9 @@ func (o OrganizationsUpdateIntegrationBody) ToMap() (map[string]interface{}, err if !IsNil(o.Configuration) { toSerialize["configuration"] = o.Configuration } + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } return toSerialize, nil } diff --git a/pkg/protos/organizations/organizations.pb.go b/pkg/protos/organizations/organizations.pb.go index d8ea2bec63..bd809f8b69 100644 --- a/pkg/protos/organizations/organizations.pb.go +++ b/pkg/protos/organizations/organizations.pb.go @@ -1590,6 +1590,7 @@ type UpdateIntegrationRequest struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` IntegrationId string `protobuf:"bytes,2,opt,name=integration_id,json=integrationId,proto3" json:"integration_id,omitempty"` Configuration *_struct.Struct `protobuf:"bytes,3,opt,name=configuration,proto3" json:"configuration,omitempty"` + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1645,6 +1646,13 @@ func (x *UpdateIntegrationRequest) GetConfiguration() *_struct.Struct { return nil } +func (x *UpdateIntegrationRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + type UpdateIntegrationResponse struct { state protoimpl.MessageState `protogen:"open.v1"` Integration *Integration `protobuf:"bytes,1,opt,name=integration,proto3" json:"integration,omitempty"` @@ -2564,11 +2572,12 @@ const file_organizations_proto_rawDesc = "" + "\x16IntegrationResourceRef\x12\x12\n" + "\x04type\x18\x01 \x01(\tR\x04type\x12\x12\n" + "\x04name\x18\x02 \x01(\tR\x04name\x12\x0e\n" + - "\x02id\x18\x03 \x01(\tR\x02id\"\x90\x01\n" + + "\x02id\x18\x03 \x01(\tR\x02id\"\xa4\x01\n" + "\x18UpdateIntegrationRequest\x12\x0e\n" + "\x02id\x18\x01 \x01(\tR\x02id\x12%\n" + "\x0eintegration_id\x18\x02 \x01(\tR\rintegrationId\x12=\n" + - "\rconfiguration\x18\x03 \x01(\v2\x17.google.protobuf.StructR\rconfiguration\"d\n" + + "\rconfiguration\x18\x03 \x01(\v2\x17.google.protobuf.StructR\rconfiguration\x12\x12\n" + + "\x04name\x18\x04 \x01(\tR\x04name\"d\n" + "\x19UpdateIntegrationResponse\x12G\n" + "\vintegration\x18\x01 \x01(\v2%.Superplane.Organizations.IntegrationR\vintegration\"Q\n" + "\x18DeleteIntegrationRequest\x12\x0e\n" + diff --git a/protos/organizations.proto b/protos/organizations.proto index 454cfc290f..d98708b9cc 100644 --- a/protos/organizations.proto +++ b/protos/organizations.proto @@ -375,6 +375,7 @@ message UpdateIntegrationRequest { string id = 1; string integration_id = 2; google.protobuf.Struct configuration = 3; + string name = 4; } message UpdateIntegrationResponse { diff --git a/web_src/src/api-client/types.gen.ts b/web_src/src/api-client/types.gen.ts index be271040a7..8a782d424f 100644 --- a/web_src/src/api-client/types.gen.ts +++ b/web_src/src/api-client/types.gen.ts @@ -752,6 +752,7 @@ export type OrganizationsUpdateIntegrationBody = { configuration?: { [key: string]: unknown; }; + name?: string; }; export type OrganizationsUpdateIntegrationResponse = { diff --git a/web_src/src/hooks/useIntegrations.ts b/web_src/src/hooks/useIntegrations.ts index 36f0fb3f28..a13b9c5029 100644 --- a/web_src/src/hooks/useIntegrations.ts +++ b/web_src/src/hooks/useIntegrations.ts @@ -153,12 +153,13 @@ export const useUpdateIntegration = (organizationId: string, integrationId: stri const queryClient = useQueryClient(); return useMutation({ - mutationFn: async (configuration: Record) => { + mutationFn: async (data: { name?: string; configuration?: Record }) => { return await organizationsUpdateIntegration( withOrganizationHeader({ path: { id: organizationId, integrationId }, body: { - configuration, + name: data.name, + configuration: data.configuration, }, }), ); diff --git a/web_src/src/pages/organization/settings/IntegrationDetails.tsx b/web_src/src/pages/organization/settings/IntegrationDetails.tsx index f6dcb0e53d..dc0a36ebca 100644 --- a/web_src/src/pages/organization/settings/IntegrationDetails.tsx +++ b/web_src/src/pages/organization/settings/IntegrationDetails.tsx @@ -8,9 +8,12 @@ import { useUpdateIntegration, } from "@/hooks/useIntegrations"; import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; import { ConfigurationFieldRenderer } from "@/ui/configurationFieldRenderer"; import type { ConfigurationField } from "@/api-client"; import { showErrorToast } from "@/utils/toast"; +import { getApiErrorMessage } from "@/utils/errors"; import { getIntegrationTypeDisplayName } from "@/utils/integrationDisplayName"; import { IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { IntegrationInstructions } from "@/ui/IntegrationInstructions"; @@ -26,6 +29,7 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) const { integrationId } = useParams<{ integrationId: string }>(); const { canAct, isLoading: permissionsLoading } = usePermissions(); const [configValues, setConfigValues] = useState>({}); + const [integrationName, setIntegrationName] = useState(""); const [showDeleteConfirm, setShowDeleteConfirm] = useState(false); const canUpdateIntegrations = canAct("integrations", "update"); const canDeleteIntegrations = canAct("integrations", "delete"); @@ -47,6 +51,10 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) } }, [integration]); + useEffect(() => { + setIntegrationName(integration?.metadata?.name || integration?.spec?.integrationName || ""); + }, [integration?.metadata?.name, integration?.spec?.integrationName]); + // Group usedIn nodes by workflow const workflowGroups = useMemo(() => { if (!integration?.status?.usedIn) return []; @@ -74,10 +82,20 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) const handleConfigSubmit = async (e: React.FormEvent) => { e.preventDefault(); if (!canUpdateIntegrations) return; + + const nextName = integrationName.trim(); + if (!nextName) { + showErrorToast("Integration name is required"); + return; + } + try { - await updateMutation.mutateAsync(configValues); + await updateMutation.mutateAsync({ + name: nextName, + configuration: configValues, + }); } catch (_error) { - showErrorToast("Failed to update configuration"); + showErrorToast("Failed to update integration"); } }; @@ -242,6 +260,21 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) className="w-full" >
    +
    + +

    A unique name for this integration

    + setIntegrationName(e.target.value)} + placeholder="e.g., my-app-integration" + disabled={!canUpdateIntegrations} + /> +
    + {integrationDef.configuration.map((field: ConfigurationField) => ( - {updateMutation.isError && ( - Failed to update configuration + + Failed to update integration: {getApiErrorMessage(updateMutation.error)} + )}
    diff --git a/web_src/src/pages/organization/settings/Integrations.tsx b/web_src/src/pages/organization/settings/Integrations.tsx index 826c90911c..2fc9896829 100644 --- a/web_src/src/pages/organization/settings/Integrations.tsx +++ b/web_src/src/pages/organization/settings/Integrations.tsx @@ -39,13 +39,36 @@ export function Integrations({ organizationId }: IntegrationsProps) { const createIntegrationMutation = useCreateIntegration(organizationId); const isLoading = loadingAvailable || loadingInstalled; + const integrationNames = useMemo(() => { + return new Set( + organizationIntegrations.map((integration) => integration.metadata?.name?.trim()).filter(Boolean) as string[], + ); + }, [organizationIntegrations]); + const selectedInstructions = useMemo(() => { return selectedIntegration?.instructions?.trim(); }, [selectedIntegration?.instructions]); + + const getNextIntegrationName = (baseName?: string) => { + const normalizedBaseName = baseName?.trim() || "integration"; + if (!integrationNames.has(normalizedBaseName)) { + return normalizedBaseName; + } + + let suffix = 2; + let candidate = `${normalizedBaseName}-${suffix}`; + while (integrationNames.has(candidate)) { + suffix += 1; + candidate = `${normalizedBaseName}-${suffix}`; + } + + return candidate; + }; + const handleConnectClick = (integration: IntegrationsIntegrationDefinition) => { if (!canCreateIntegrations) return; setSelectedIntegration(integration); - setIntegrationName(integration.name || ""); + setIntegrationName(getNextIntegrationName(integration.name)); setConfiguration({}); setIsModalOpen(true); }; @@ -99,7 +122,11 @@ export function Integrations({ organizationId }: IntegrationsProps) {

    Connected

    {[...organizationIntegrations] - .sort((a, b) => (a.spec?.integrationName || "").localeCompare(b.spec?.integrationName || "")) + .sort((a, b) => + (a.metadata?.name || a.spec?.integrationName || "").localeCompare( + b.metadata?.name || b.spec?.integrationName || "", + ), + ) .map((integration) => { const integrationDefinition = availableIntegrations.find( (a) => a.name === integration.spec?.integrationName, @@ -108,6 +135,10 @@ export function Integrations({ organizationId }: IntegrationsProps) { integrationDefinition?.label || getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || integration.spec?.integrationName; + const integrationDisplayName = + integration.metadata?.name || + getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || + integration.spec?.integrationName; const integrationName = integrationDefinition?.name || integration.spec?.integrationName; const statusLabel = integration.status?.state ? integration.status.state.charAt(0).toUpperCase() + integration.status.state.slice(1) @@ -128,11 +159,11 @@ export function Integrations({ organizationId }: IntegrationsProps) {

    - {integrationLabel || - integration.metadata?.name || - getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || - integration.spec?.integrationName} + {integrationDisplayName}

    + {integrationLabel && integrationDisplayName !== integrationLabel ? ( +

    Type: {integrationLabel}

    + ) : null} {integrationDefinition?.description ? (

    {integrationDefinition.description} @@ -182,19 +213,14 @@ export function Integrations({ organizationId }: IntegrationsProps) {

    Available

    - {availableIntegrations.filter( - (integration) => !organizationIntegrations.some((i) => i.spec?.integrationName === integration.name), - ).length === 0 ? ( + {availableIntegrations.length === 0 ? (
    -

    You've connected all integrations!

    +

    No integrations available.

    ) : (
    {[...availableIntegrations] - .filter( - (integration) => !organizationIntegrations.some((i) => i.spec?.integrationName === integration.name), - ) .sort((a, b) => (a.label || a.name || "").localeCompare(b.label || b.name || "")) .map((app) => { const appName = app.name; diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index 8480307e15..03a05b07cc 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -254,6 +254,7 @@ export const ComponentSidebar = ({ const [createIntegrationName, setCreateIntegrationName] = useState(""); const [createIntegrationConfig, setCreateIntegrationConfig] = useState>({}); const [configureIntegrationId, setConfigureIntegrationId] = useState(null); + const [configureIntegrationName, setConfigureIntegrationName] = useState(""); // Use autocompleteExampleObj directly - current node is already filtered out upstream const resolvedAutocompleteExampleObj = autocompleteExampleObj ?? null; @@ -327,21 +328,30 @@ export const ComponentSidebar = ({ const handleCloseConfigureIntegrationDialog = useCallback(() => { setConfigureIntegrationId(null); + setConfigureIntegrationName(""); setConfigureIntegrationConfig({}); updateIntegrationMutation.reset(); }, [updateIntegrationMutation]); const handleConfigureIntegrationSubmit = useCallback(async () => { if (!configureIntegrationId || !domainId) return; + + const nextName = configureIntegrationName.trim(); + if (!nextName) { + showErrorToast("Integration name is required"); + return; + } + try { - await updateIntegrationMutation.mutateAsync(configureIntegrationConfig); + await updateIntegrationMutation.mutateAsync({ name: nextName, configuration: configureIntegrationConfig }); handleCloseConfigureIntegrationDialog(); } catch (_error) { - showErrorToast("Failed to update configuration"); + showErrorToast("Failed to update integration"); } }, [ configureIntegrationId, domainId, + configureIntegrationName, configureIntegrationConfig, updateIntegrationMutation, handleCloseConfigureIntegrationDialog, @@ -378,6 +388,12 @@ export const ComponentSidebar = ({ } }, [configureIntegration?.spec?.configuration]); + useEffect(() => { + setConfigureIntegrationName( + configureIntegration?.metadata?.name || configureIntegration?.spec?.integrationName || "", + ); + }, [configureIntegration?.metadata?.name, configureIntegration?.spec?.integrationName]); + // Seed open ids from incoming props (without closing already open ones) useEffect(() => { const seeded = new Set(openEventIds); @@ -1003,16 +1019,30 @@ export const ComponentSidebar = ({ className="mb-6" /> )} - {configureIntegrationDefinition?.configuration && - configureIntegrationDefinition.configuration.length > 0 ? ( -
    { - e.preventDefault(); - void handleConfigureIntegrationSubmit(); - }} - className="space-y-4" - > - {configureIntegrationDefinition.configuration.map((field: ConfigurationField) => { + { + e.preventDefault(); + void handleConfigureIntegrationSubmit(); + }} + className="space-y-4" + > +
    + +

    A unique name for this integration

    + setConfigureIntegrationName(e.target.value)} + placeholder="e.g., my-app-integration" + /> +
    + + {configureIntegrationDefinition?.configuration && + configureIntegrationDefinition.configuration.length > 0 ? ( + configureIntegrationDefinition.configuration.map((field: ConfigurationField) => { if (!field.name) return null; return ( ); - })} - - - - - {updateIntegrationMutation.isError && ( -
    -

    - Failed to update configuration: {getApiErrorMessage(updateIntegrationMutation.error)} -

    -
    - )} - - ) : ( -

    No configuration fields available.

    - )} + }) + ) : ( +

    No configuration fields available.

    + )} + + + + + + {updateIntegrationMutation.isError && ( +
    +

    + Failed to update integration: {getApiErrorMessage(updateIntegrationMutation.error)} +

    +
    + )} + ) : null} From 13244451b6bf9ef9b1177529147490a329698640 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Tue, 10 Feb 2026 16:19:37 -0300 Subject: [PATCH 046/160] fix: show integration error on the sidebar (#3020) image image Closes: https://github.com/superplanehq/superplane/issues/3010 Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- .../src/ui/componentSidebar/SettingsTab.tsx | 121 +++++++++++------- web_src/src/ui/componentSidebar/index.tsx | 8 +- 2 files changed, 79 insertions(+), 50 deletions(-) diff --git a/web_src/src/ui/componentSidebar/SettingsTab.tsx b/web_src/src/ui/componentSidebar/SettingsTab.tsx index 3ad9dece1a..965ed95f09 100644 --- a/web_src/src/ui/componentSidebar/SettingsTab.tsx +++ b/web_src/src/ui/componentSidebar/SettingsTab.tsx @@ -14,6 +14,7 @@ import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@ import { ConfigurationFieldRenderer } from "@/ui/configurationFieldRenderer"; import { isFieldRequired, isFieldVisible, parseDefaultValues, validateFieldForSubmission } from "@/utils/components"; import { useRealtimeValidation } from "@/hooks/useRealtimeValidation"; +import { SimpleTooltip } from "./SimpleTooltip"; interface SettingsTabProps { mode: "create" | "edit"; @@ -354,56 +355,78 @@ export function SettingsTab({
    {selectedIntegrationFull && ( -
    -
    - -
    -

    - {getIntegrationTypeDisplayName(undefined, selectedIntegrationFull.spec?.integrationName) || - "Integration"} -

    -
    -
    -
    - - {selectedIntegrationFull.status?.state - ? selectedIntegrationFull.status.state.charAt(0).toUpperCase() + - selectedIntegrationFull.status.state.slice(1) - : "Unknown"} - - {selectedIntegrationFull.metadata?.id && onOpenConfigureIntegrationDialog && ( - - )} -
    -
    +
    + +
    +

    + {getIntegrationTypeDisplayName( + undefined, + selectedIntegrationFull.spec?.integrationName, + ) || "Integration"} +

    +
    +
    +
    + + {selectedIntegrationFull.status?.state + ? selectedIntegrationFull.status.state.charAt(0).toUpperCase() + + selectedIntegrationFull.status.state.slice(1) + : "Unknown"} + + {selectedIntegrationFull.metadata?.id && onOpenConfigureIntegrationDialog && ( + + )} +
    +
    + ); + + if (hasIntegrationError) { + return ( + + {integrationStatusCard} + + ); + } + + return integrationStatusCard; + })()} + )} )} diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index 03a05b07cc..553c516c94 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -6,7 +6,7 @@ import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { getIntegrationTypeDisplayName } from "@/utils/integrationDisplayName"; import { resolveIcon } from "@/lib/utils"; -import { Check, Copy, Loader2, X } from "lucide-react"; +import { Check, Copy, Loader2, TriangleAlert, X } from "lucide-react"; import React, { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { getHeaderIconSrc, IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { @@ -1012,6 +1012,12 @@ export const ComponentSidebar = ({
    + {configureIntegration.status?.state === "error" && configureIntegration.status?.stateDescription && ( +
    + +

    {configureIntegration.status.stateDescription}

    +
    + )} {configureIntegration?.status?.browserAction && ( Date: Tue, 10 Feb 2026 21:10:28 +0100 Subject: [PATCH 047/160] feat: Add rootly.createEvent component (#2979) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Description Closes #2821 This PR adds the Rootly **Create Event** action so SuperPlane workflows can post timeline notes/annotations to Rootly incidents. The action targets Rootly’s incident events endpoint and supports optional visibility (`internal`/`external`) while keeping defaults consistent with Rootly. ## Video Demo https://github.com/user-attachments/assets/578e7a93-97f9-4e66-b79b-6264601e1e5b ## **Backend Implementation** - Added `rootly.createEvent` action with `incidentId`, `event`, and optional `visibility` validation in setup. - Implemented Rootly client support for `POST /incidents/{id}/events` with JSON:API request/response structs for incident events. - Emits `rootly.incident.event` on success with `id`, `event`, `visibility`, `occurred_at`, and `created_at`. - Added example output fixture and unit tests covering success and error paths. ## **Frontend Implementation** - Added Rootly Create Event mapper to render execution details and metadata for incident ID and visibility. - Introduced `IncidentEvent` type and details formatter to display event content, visibility, and timestamps. - Registered the new mapper and event state registry for consistent execution state handling. - Reuses Rootly icon and color patterns to match existing integration styling. ## Documentation - Auto-generated component documentation via relevant make command ## Checklist - [x] Tests (existing + new component) pass - [x] Code compiles without errors - [x] Signed-off commits - [x] Example output JSON (if required) - [x] Updated component documentation - [x] Attached video of component working --------- Signed-off-by: devroy10 Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Rootly.mdx | 44 +++++ pkg/integrations/rootly/client.go | 78 ++++++++ pkg/integrations/rootly/create_event.go | 174 ++++++++++++++++ pkg/integrations/rootly/create_event_test.go | 185 ++++++++++++++++++ pkg/integrations/rootly/example.go | 10 + .../rootly/example_output_create_event.json | 11 ++ pkg/integrations/rootly/rootly.go | 1 + .../pages/workflowv2/mappers/rootly/base.ts | 24 ++- .../workflowv2/mappers/rootly/create_event.ts | 84 ++++++++ .../pages/workflowv2/mappers/rootly/index.ts | 3 + .../pages/workflowv2/mappers/rootly/types.ts | 8 + 11 files changed, 621 insertions(+), 1 deletion(-) create mode 100644 pkg/integrations/rootly/create_event.go create mode 100644 pkg/integrations/rootly/create_event_test.go create mode 100644 pkg/integrations/rootly/example_output_create_event.json create mode 100644 web_src/src/pages/workflowv2/mappers/rootly/create_event.ts diff --git a/docs/components/Rootly.mdx b/docs/components/Rootly.mdx index ba2ce32e62..29c1d4ee7b 100644 --- a/docs/components/Rootly.mdx +++ b/docs/components/Rootly.mdx @@ -15,6 +15,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; ## Actions + @@ -68,6 +69,49 @@ This trigger automatically sets up a Rootly webhook endpoint when configured. Th } ``` + + +## Create Event + +The Create Event component adds a timeline event (note/annotation) to a Rootly incident. + +### Use Cases + +- **Investigation notes**: Add detailed investigation notes to the incident timeline +- **Status updates**: Post automated status updates as workflows progress +- **Cross-system sync**: Sync comments from external tools into the incident timeline + +### Configuration + +- **Incident ID**: The Rootly incident UUID to add the event to (required, supports expressions) +- **Event**: The note/annotation text (required, supports expressions) +- **Visibility**: Internal or external visibility (optional, default per Rootly) + +### Output + +Returns the created incident event with: +- **id**: Event ID +- **event**: Event content +- **visibility**: Event visibility +- **occurred_at**: Event timestamp +- **created_at**: Creation timestamp + +### Example Output + +```json +{ + "data": { + "created_at": "2026-02-10T07:34:35.902-8:00", + "event": "Investigation update: database connections stabilized.", + "id": "a2d32bb7-0417-4d0d-8483-a583c3-7853", + "occurred_at": "2026-02-10T07:34:35.902-8:00", + "visibility": "internal" + }, + "timestamp": "2026-02-10T15:34:36.09877478Z", + "type": "rootly.incident.event" +} +``` + ## Create Incident diff --git a/pkg/integrations/rootly/client.go b/pkg/integrations/rootly/client.go index 3c41a19db1..04670a525b 100644 --- a/pkg/integrations/rootly/client.go +++ b/pkg/integrations/rootly/client.go @@ -227,6 +227,32 @@ type IncidentsResponse struct { Data []IncidentData `json:"data"` } +// IncidentEvent represents a Rootly incident event (timeline note). +type IncidentEvent struct { + ID string `json:"id"` + Event string `json:"event"` + Visibility string `json:"visibility"` + OccurredAt string `json:"occurred_at"` + CreatedAt string `json:"created_at"` +} + +type IncidentEventData struct { + ID string `json:"id"` + Type string `json:"type"` + Attributes IncidentEventAttributes `json:"attributes"` +} + +type IncidentEventAttributes struct { + Event string `json:"event"` + Visibility string `json:"visibility"` + OccurredAt string `json:"occurred_at"` + CreatedAt string `json:"created_at"` +} + +type IncidentEventResponse struct { + Data IncidentEventData `json:"data"` +} + // CreateIncidentRequest represents the request to create an incident type CreateIncidentRequest struct { Data CreateIncidentData `json:"data"` @@ -285,6 +311,58 @@ func (c *Client) CreateIncident(title, summary, severity string) (*Incident, err }, nil } +// CreateIncidentEventRequest represents the request to create an incident event. +type CreateIncidentEventRequest struct { + Data CreateIncidentEventData `json:"data"` +} + +type CreateIncidentEventData struct { + Type string `json:"type"` + Attributes CreateIncidentEventAttributes `json:"attributes"` +} + +type CreateIncidentEventAttributes struct { + Event string `json:"event"` + Visibility string `json:"visibility,omitempty"` +} + +func (c *Client) CreateIncidentEvent(incidentID, event, visibility string) (*IncidentEvent, error) { + request := CreateIncidentEventRequest{ + Data: CreateIncidentEventData{ + Type: "incident_events", + Attributes: CreateIncidentEventAttributes{ + Event: event, + Visibility: visibility, + }, + }, + } + + body, err := json.Marshal(request) + if err != nil { + return nil, fmt.Errorf("error marshaling request: %v", err) + } + + url := fmt.Sprintf("%s/incidents/%s/events", c.BaseURL, incidentID) + responseBody, err := c.execRequest(http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return nil, err + } + + var response IncidentEventResponse + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + return &IncidentEvent{ + ID: response.Data.ID, + Event: response.Data.Attributes.Event, + Visibility: response.Data.Attributes.Visibility, + OccurredAt: response.Data.Attributes.OccurredAt, + CreatedAt: response.Data.Attributes.CreatedAt, + }, nil +} + func (c *Client) GetIncident(id string) (*Incident, error) { url := fmt.Sprintf("%s/incidents/%s", c.BaseURL, id) responseBody, err := c.execRequest(http.MethodGet, url, nil) diff --git a/pkg/integrations/rootly/create_event.go b/pkg/integrations/rootly/create_event.go new file mode 100644 index 0000000000..f99ac96d5d --- /dev/null +++ b/pkg/integrations/rootly/create_event.go @@ -0,0 +1,174 @@ +package rootly + +import ( + "errors" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type CreateEvent struct{} + +type CreateEventSpec struct { + IncidentID string `json:"incidentId"` + Event string `json:"event"` + Visibility string `json:"visibility"` +} + +func (c *CreateEvent) Name() string { + return "rootly.createEvent" +} + +func (c *CreateEvent) Label() string { + return "Create Event" +} + +func (c *CreateEvent) Description() string { + return "Add a timeline event to a Rootly incident" +} + +func (c *CreateEvent) Documentation() string { + return `The Create Event component adds a timeline event (note/annotation) to a Rootly incident. + +## Use Cases + +- **Investigation notes**: Add detailed investigation notes to the incident timeline +- **Status updates**: Post automated status updates as workflows progress +- **Cross-system sync**: Sync comments from external tools into the incident timeline + +## Configuration + +- **Incident ID**: The Rootly incident UUID to add the event to (required, supports expressions) +- **Event**: The note/annotation text (required, supports expressions) +- **Visibility**: Internal or external visibility (optional, default per Rootly) + +## Output + +Returns the created incident event with: +- **id**: Event ID +- **event**: Event content +- **visibility**: Event visibility +- **occurred_at**: Event timestamp +- **created_at**: Creation timestamp` +} + +func (c *CreateEvent) Icon() string { + return "message-square" +} + +func (c *CreateEvent) Color() string { + return "gray" +} + +func (c *CreateEvent) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateEvent) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "incidentId", + Label: "Incident ID", + Type: configuration.FieldTypeString, + Required: true, + Description: "The Rootly incident UUID to add the event to", + Placeholder: "e.g., abc123-def456", + }, + { + Name: "event", + Label: "Event", + Type: configuration.FieldTypeText, + Required: true, + Description: "The note/annotation text to add to the incident timeline", + }, + { + Name: "visibility", + Label: "Visibility", + Type: configuration.FieldTypeSelect, + Required: false, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Internal", Value: "internal"}, + {Label: "External", Value: "external"}, + }, + }, + }, + Description: "Set event visibility (optional, defaults to Rootly settings)", + }, + } +} + +func (c *CreateEvent) Setup(ctx core.SetupContext) error { + spec := CreateEventSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + if spec.IncidentID == "" { + return errors.New("incidentId is required") + } + + if spec.Event == "" { + return errors.New("event is required") + } + + if spec.Visibility != "" && spec.Visibility != "internal" && spec.Visibility != "external" { + return errors.New("visibility must be internal or external") + } + + return nil +} + +func (c *CreateEvent) Execute(ctx core.ExecutionContext) error { + spec := CreateEventSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("error creating client: %v", err) + } + + incidentEvent, err := client.CreateIncidentEvent(spec.IncidentID, spec.Event, spec.Visibility) + if err != nil { + return fmt.Errorf("failed to create incident event: %v", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "rootly.incident.event", + []any{incidentEvent}, + ) +} + +func (c *CreateEvent) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateEvent) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateEvent) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateEvent) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateEvent) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *CreateEvent) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/rootly/create_event_test.go b/pkg/integrations/rootly/create_event_test.go new file mode 100644 index 0000000000..2c577230fd --- /dev/null +++ b/pkg/integrations/rootly/create_event_test.go @@ -0,0 +1,185 @@ +package rootly + +import ( + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__CreateEvent__Setup(t *testing.T) { + component := &CreateEvent{} + + t.Run("invalid configuration -> decode error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: "invalid", + }) + + require.ErrorContains(t, err, "error decoding configuration") + }) + + t.Run("missing incidentId -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "event": "Update", + }, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) + + t.Run("missing event -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "inc-123", + }, + }) + + require.ErrorContains(t, err, "event is required") + }) + + t.Run("invalid visibility -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "inc-123", + "event": "Update", + "visibility": "public", + }, + }) + + require.ErrorContains(t, err, "visibility must be internal or external") + }) + + t.Run("valid configuration -> success", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "incidentId": "inc-123", + "event": "Update", + "visibility": "internal", + }, + }) + + require.NoError(t, err) + }) +} + +func Test__CreateEvent__Execute(t *testing.T) { + component := &CreateEvent{} + + t.Run("successful event creation", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "data": { + "id": "evt-123", + "type": "incident_events", + "attributes": { + "event": "Investigation update", + "visibility": "internal", + "occurred_at": "2026-01-19T12:15:00Z", + "created_at": "2026-01-19T12:15:00Z" + } + } + }`)), + }, + }, + } + + appCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "test-api-key", + }, + } + + executionState := &contexts.ExecutionStateContext{ + KVs: make(map[string]string), + } + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "inc-123", + "event": "Investigation update", + "visibility": "internal", + }, + HTTP: httpContext, + Integration: appCtx, + ExecutionState: executionState, + }) + + require.NoError(t, err) + assert.True(t, executionState.Passed) + assert.Equal(t, "default", executionState.Channel) + assert.Equal(t, "rootly.incident.event", executionState.Type) + + require.Len(t, httpContext.Requests, 1) + req := httpContext.Requests[0] + assert.Equal(t, http.MethodPost, req.Method) + assert.Equal(t, "https://api.rootly.com/v1/incidents/inc-123/events", req.URL.String()) + assert.Equal(t, "application/vnd.api+json", req.Header.Get("Content-Type")) + assert.Equal(t, "application/vnd.api+json", req.Header.Get("Accept")) + assert.Equal(t, "Bearer test-api-key", req.Header.Get("Authorization")) + + body, err := io.ReadAll(req.Body) + require.NoError(t, err) + + var payload map[string]any + err = json.Unmarshal(body, &payload) + require.NoError(t, err) + + data := payload["data"].(map[string]any) + assert.Equal(t, "incident_events", data["type"]) + attributes := data["attributes"].(map[string]any) + assert.Equal(t, "Investigation update", attributes["event"]) + assert.Equal(t, "internal", attributes["visibility"]) + }) + + t.Run("API error -> returns error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusBadRequest, + Body: io.NopCloser(strings.NewReader(`{"error":"invalid"}`)), + }, + }, + } + + appCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "test-api-key", + }, + } + + executionState := &contexts.ExecutionStateContext{ + KVs: make(map[string]string), + } + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "inc-123", + "event": "Investigation update", + }, + HTTP: httpContext, + Integration: appCtx, + ExecutionState: executionState, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to create incident event") + }) +} + +func Test__CreateEvent__OutputChannels(t *testing.T) { + component := &CreateEvent{} + channels := component.OutputChannels(nil) + + require.Len(t, channels, 1) + assert.Equal(t, "default", channels[0].Name) +} diff --git a/pkg/integrations/rootly/example.go b/pkg/integrations/rootly/example.go index 795ea9f784..21d8e4741b 100644 --- a/pkg/integrations/rootly/example.go +++ b/pkg/integrations/rootly/example.go @@ -13,6 +13,12 @@ var exampleOutputCreateIncidentBytes []byte var exampleOutputCreateIncidentOnce sync.Once var exampleOutputCreateIncident map[string]any +//go:embed example_output_create_event.json +var exampleOutputCreateEventBytes []byte + +var exampleOutputCreateEventOnce sync.Once +var exampleOutputCreateEvent map[string]any + //go:embed example_data_on_incident.json var exampleDataOnIncidentBytes []byte @@ -23,6 +29,10 @@ func (c *CreateIncident) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIncidentOnce, exampleOutputCreateIncidentBytes, &exampleOutputCreateIncident) } +func (c *CreateEvent) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateEventOnce, exampleOutputCreateEventBytes, &exampleOutputCreateEvent) +} + func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/rootly/example_output_create_event.json b/pkg/integrations/rootly/example_output_create_event.json new file mode 100644 index 0000000000..728b13b0fb --- /dev/null +++ b/pkg/integrations/rootly/example_output_create_event.json @@ -0,0 +1,11 @@ +{ + "data": { + "created_at": "2026-02-10T07:34:35.902-8:00", + "event": "Investigation update: database connections stabilized.", + "id": "a2d32bb7-0417-4d0d-8483-a583c3-7853", + "occurred_at": "2026-02-10T07:34:35.902-8:00", + "visibility": "internal" + }, + "timestamp": "2026-02-10T15:34:36.09877478Z", + "type": "rootly.incident.event" +} \ No newline at end of file diff --git a/pkg/integrations/rootly/rootly.go b/pkg/integrations/rootly/rootly.go index 1b58b5f646..ba96ead14e 100644 --- a/pkg/integrations/rootly/rootly.go +++ b/pkg/integrations/rootly/rootly.go @@ -62,6 +62,7 @@ func (r *Rootly) Configuration() []configuration.Field { func (r *Rootly) Components() []core.Component { return []core.Component{ &CreateIncident{}, + &CreateEvent{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/rootly/base.ts b/web_src/src/pages/workflowv2/mappers/rootly/base.ts index aba73eb6c1..179fe1e4d5 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/base.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/base.ts @@ -1,4 +1,4 @@ -import { Incident } from "./types"; +import { Incident, IncidentEvent } from "./types"; export function getDetailsForIncident(incident: Incident): Record { const details: Record = {}; @@ -27,3 +27,25 @@ export function getDetailsForIncident(incident: Incident): Record { + const details: Record = {}; + + if (incidentEvent?.created_at) { + details["Created At"] = new Date(incidentEvent.created_at).toLocaleString(); + } + + if (incidentEvent?.id) { + details["Event ID"] = incidentEvent.id; + } + + if (incidentEvent?.event) { + details["Event"] = incidentEvent.event; + } + + if (incidentEvent?.visibility) { + details["Visibility"] = incidentEvent.visibility; + } + + return details; +} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts b/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts new file mode 100644 index 0000000000..279a2cee89 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts @@ -0,0 +1,84 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getBackgroundColorClass } from "@/utils/colors"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import { MetadataItem } from "@/ui/metadataList"; +import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; +import { IncidentEvent } from "./types"; +import { getDetailsForIncidentEvent } from "./base"; +import { formatTimeAgo } from "@/utils/date"; + +export const createEventMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name || "unknown"; + + return { + iconSrc: rootlyIcon, + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition.label || + context.componentDefinition.name || + "Unnamed component", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + metadata: metadataList(context.node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default: OutputPayload[] }; + if (!outputs?.default || outputs.default.length === 0) { + return {}; + } + const incidentEvent = outputs.default[0].data as IncidentEvent; + return getDetailsForIncidentEvent(incidentEvent); + }, + + subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) return ""; + return formatTimeAgo(new Date(context.execution.createdAt)); + }, +}; + +function metadataList(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as { incidentId?: string; visibility?: string }; + + if (configuration?.incidentId) { + metadata.push({ icon: "hash", label: "Incident: " + configuration.incidentId }); + } + + if (configuration?.visibility) { + metadata.push({ icon: "eye", label: "Visibility: " + configuration.visibility }); + } + + return metadata; +} + +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/index.ts b/web_src/src/pages/workflowv2/mappers/rootly/index.ts index 17cd54090a..d4176f273d 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/index.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/index.ts @@ -1,10 +1,12 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; import { onIncidentTriggerRenderer } from "./on_incident"; import { createIncidentMapper } from "./create_incident"; +import { createEventMapper } from "./create_event"; import { buildActionStateRegistry } from "../utils"; export const componentMappers: Record = { createIncident: createIncidentMapper, + createEvent: createEventMapper, }; export const triggerRenderers: Record = { @@ -13,4 +15,5 @@ export const triggerRenderers: Record = { export const eventStateRegistry: Record = { createIncident: buildActionStateRegistry("created"), + createEvent: buildActionStateRegistry("created"), }; diff --git a/web_src/src/pages/workflowv2/mappers/rootly/types.ts b/web_src/src/pages/workflowv2/mappers/rootly/types.ts index 9d837677f6..9b33908b22 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/types.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/types.ts @@ -25,3 +25,11 @@ export interface Incident { mitigated_at?: string; url?: string; } + +export interface IncidentEvent { + id?: string; + event?: string; + visibility?: string; + occurred_at?: string; + created_at?: string; +} From 42bd7a1fb957ad5b47073b2cbbf422e07aab6a51 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Tue, 10 Feb 2026 18:34:57 -0300 Subject: [PATCH 048/160] feat: DockerHub integration (#2969) Add Docker Hub integration, including a `dockerhub.getImageTag` component and a `dockerhub.onImagePush` trigger, to enable SuperPlane users to manage and react to Docker Hub events. Webhook provisioning is manual, since DockerHub does not offer a reliable way to provision webhooks. --------- Signed-off-by: Lucas Pinheiro Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- docs/components/DockerHub.mdx | 126 +++++++++ pkg/core/trigger.go | 1 + pkg/integrations/dockerhub/client.go | 225 ++++++++++++++++ pkg/integrations/dockerhub/client_test.go | 71 +++++ pkg/integrations/dockerhub/dockerhub.go | 144 ++++++++++ pkg/integrations/dockerhub/example.go | 28 ++ .../dockerhub/example_data_on_image_push.json | 23 ++ .../example_output_get_image_tag.json | 28 ++ pkg/integrations/dockerhub/get_image_tag.go | 175 ++++++++++++ .../dockerhub/get_image_tag_test.go | 94 +++++++ pkg/integrations/dockerhub/list_resources.go | 50 ++++ pkg/integrations/dockerhub/on_image_push.go | 252 ++++++++++++++++++ .../dockerhub/on_image_push_test.go | 173 ++++++++++++ pkg/integrations/dockerhub/token.go | 159 +++++++++++ pkg/integrations/dockerhub/token_test.go | 47 ++++ pkg/public/server.go | 2 + pkg/server/server.go | 1 + .../mappers/dockerhub/get_image_tag.ts | 106 ++++++++ .../workflowv2/mappers/dockerhub/index.ts | 20 ++ .../mappers/dockerhub/on_image_push.tsx | 155 +++++++++++ .../workflowv2/mappers/dockerhub/types.ts | 40 +++ web_src/src/pages/workflowv2/mappers/index.ts | 10 + .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + web_src/src/utils/integrationDisplayName.ts | 1 + 25 files changed, 1937 insertions(+) create mode 100644 docs/components/DockerHub.mdx create mode 100644 pkg/integrations/dockerhub/client.go create mode 100644 pkg/integrations/dockerhub/client_test.go create mode 100644 pkg/integrations/dockerhub/dockerhub.go create mode 100644 pkg/integrations/dockerhub/example.go create mode 100644 pkg/integrations/dockerhub/example_data_on_image_push.json create mode 100644 pkg/integrations/dockerhub/example_output_get_image_tag.json create mode 100644 pkg/integrations/dockerhub/get_image_tag.go create mode 100644 pkg/integrations/dockerhub/get_image_tag_test.go create mode 100644 pkg/integrations/dockerhub/list_resources.go create mode 100644 pkg/integrations/dockerhub/on_image_push.go create mode 100644 pkg/integrations/dockerhub/on_image_push_test.go create mode 100644 pkg/integrations/dockerhub/token.go create mode 100644 pkg/integrations/dockerhub/token_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/dockerhub/get_image_tag.ts create mode 100644 web_src/src/pages/workflowv2/mappers/dockerhub/index.ts create mode 100644 web_src/src/pages/workflowv2/mappers/dockerhub/on_image_push.tsx create mode 100644 web_src/src/pages/workflowv2/mappers/dockerhub/types.ts diff --git a/docs/components/DockerHub.mdx b/docs/components/DockerHub.mdx new file mode 100644 index 0000000000..7640394aa1 --- /dev/null +++ b/docs/components/DockerHub.mdx @@ -0,0 +1,126 @@ +--- +title: "DockerHub" +--- + +Manage and react to DockerHub repositories and tags + +## Triggers + + + + + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + +## Instructions + +To generate a DockerHub access token: +- Go to **DockerHub** → **Account Settings** → **Personal Access Tokens** +- Generate a new token +- **Copy the token**, and enter your DockerHub username and the token below + + + +## On Image Push + +The On Image Push trigger starts a workflow execution when an image tag is pushed to DockerHub. + +### Use Cases + +- **Build pipelines**: Trigger builds and deployments on container pushes +- **Release workflows**: Promote artifacts when a new tag is published +- **Security automation**: Kick off scans or alerts for newly pushed images + +### Configuration + +- **Repository**: DockerHub repository name, in the format of `namespace/name` +- **Tags**: Optional filters for image tags (for example: `latest` or `^v[0-9]+`) + +### Webhook Setup + +This trigger generates a webhook URL in SuperPlane. Add that URL as a DockerHub webhook for the selected repository so DockerHub can deliver push events. + +### Example Data + +```json +{ + "data": { + "callback_url": "https://hub.docker.com/u/superplane/demo/hook/abcd/", + "push_data": { + "pushed_at": 1736400000, + "pusher": "superplane-bot", + "tag": "v1.2.3" + }, + "repository": { + "description": "Demo image for SuperPlane workflows", + "is_private": false, + "name": "demo", + "namespace": "superplane", + "pull_count": 3456, + "repo_name": "superplane/demo", + "repo_url": "https://hub.docker.com/r/superplane/demo", + "star_count": 12, + "status": "Active" + } + }, + "timestamp": "2026-02-03T12:00:00Z", + "type": "dockerhub.image.push" +} +``` + + + +## Get Image Tag + +The Get Image Tag component retrieves metadata for a DockerHub image tag. + +### Use Cases + +- **Release automation**: Fetch tag metadata for deployments +- **Audit trails**: Resolve tag details for traceability +- **Insights**: Inspect image sizes, digests, and last pushed times + +### Configuration + +- **Repository**: DockerHub repository name, in the format of `namespace/name` +- **Tag**: Image tag to retrieve (for example: `latest` or `v1.2.3`) + +### Example Output + +```json +{ + "data": { + "full_size": 52837442, + "id": 123456, + "images": [ + { + "architecture": "amd64", + "digest": "sha256:fe12ab34cd56ef78ab90cd12ef34ab56cd78ef90ab12cd34ef56ab78cd90ef12", + "last_pulled": "2025-01-06T11:02:10.123456Z", + "last_pushed": "2025-01-05T21:06:53.506400Z", + "os": "linux", + "size": 52837442, + "status": "active" + } + ], + "last_updated": "2025-01-05T21:06:53.506400Z", + "last_updater": 1234, + "last_updater_username": "superplane-bot", + "name": "latest", + "repository": 98765, + "status": "active", + "tag_last_pulled": "2025-01-06T11:02:10.123456Z", + "tag_last_pushed": "2025-01-05T21:06:53.506400Z", + "v2": "true" + }, + "timestamp": "2026-02-03T12:00:00Z", + "type": "dockerhub.tag" +} +``` + diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 523874b102..0e86340fa0 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -115,6 +115,7 @@ type WebhookRequestContext struct { WorkflowID string NodeID string Configuration any + Metadata MetadataContext Logger *log.Entry Webhook NodeWebhookContext Events EventContext diff --git a/pkg/integrations/dockerhub/client.go b/pkg/integrations/dockerhub/client.go new file mode 100644 index 0000000000..8c1f1a57a7 --- /dev/null +++ b/pkg/integrations/dockerhub/client.go @@ -0,0 +1,225 @@ +package dockerhub + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + defaultBaseURL = "https://hub.docker.com" + defaultPageSize = 100 +) + +type Client struct { + AccessToken string + BaseURL string + http core.HTTPContext +} + +func NewClient(httpClient core.HTTPContext, integration core.IntegrationContext) (*Client, error) { + if integration == nil { + return nil, fmt.Errorf("no integration context") + } + + accessToken, err := findSecret(integration, accessTokenSecretName) + if err != nil { + return nil, fmt.Errorf("access token not configured: %w", err) + } + + token := strings.TrimSpace(accessToken) + if token == "" { + return nil, fmt.Errorf("access token is required") + } + + return &Client{ + AccessToken: token, + BaseURL: defaultBaseURL, + http: httpClient, + }, nil +} + +func findSecret(ctx core.IntegrationContext, secretName string) (string, error) { + secrets, err := ctx.GetSecrets() + if err != nil { + return "", err + } + + for _, secret := range secrets { + if secret.Name == secretName { + return string(secret.Value), nil + } + } + + return "", fmt.Errorf("secret %s not found", secretName) +} + +func (c *Client) doRequest(method, URL string, body io.Reader) (*http.Response, []byte, error) { + finalURL := URL + if !strings.HasPrefix(URL, "http") { + finalURL = c.BaseURL + URL + } + + req, err := http.NewRequest(method, finalURL, body) + if err != nil { + return nil, nil, fmt.Errorf("failed to build request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+c.AccessToken) + + res, err := c.http.Do(req) + if err != nil { + return nil, nil, fmt.Errorf("request failed: %w", err) + } + defer res.Body.Close() + + responseBody, err := io.ReadAll(res.Body) + if err != nil { + return nil, nil, fmt.Errorf("failed to read response body: %w", err) + } + + if res.StatusCode < http.StatusOK || res.StatusCode >= http.StatusMultipleChoices { + return nil, nil, fmt.Errorf("request failed with %d: %s", res.StatusCode, string(responseBody)) + } + + return res, responseBody, nil +} + +type Repository struct { + Name string `json:"name" mapstructure:"name"` + Namespace string `json:"namespace" mapstructure:"namespace"` + Description string `json:"description" mapstructure:"description"` + IsPrivate bool `json:"is_private" mapstructure:"is_private"` + StarCount int `json:"star_count" mapstructure:"star_count"` + PullCount int `json:"pull_count" mapstructure:"pull_count"` + Status string `json:"status_description" mapstructure:"status_description"` +} + +type ListRepositoriesResponse struct { + Next string `json:"next"` + Results []Repository `json:"results"` +} + +func (c *Client) ValidateCredentials(namespace string) error { + namespace = strings.TrimSpace(namespace) + if namespace == "" { + return fmt.Errorf("namespace is required") + } + + _, err := c.ListRepositories(namespace) + return err +} + +func (c *Client) ListRepositories(namespace string) ([]Repository, error) { + namespace = strings.TrimSpace(namespace) + if namespace == "" { + return nil, fmt.Errorf("namespace is required") + } + + path := fmt.Sprintf("/v2/namespaces/%s/repositories?page_size=%d", namespace, defaultPageSize) + repositories := []Repository{} + + for path != "" { + _, responseBody, err := c.doRequest(http.MethodGet, path, nil) + if err != nil { + return nil, err + } + + var response ListRepositoriesResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to parse repositories response: %w", err) + } + + repositories = append(repositories, response.Results...) + path = response.Next + } + + return repositories, nil +} + +func (c *Client) GetRepository(namespace, repository string) (*Repository, error) { + if namespace == "" || repository == "" { + return nil, fmt.Errorf("namespace and repository are required") + } + + path := fmt.Sprintf("/v2/namespaces/%s/repositories/%s", namespace, repository) + _, responseBody, err := c.doRequest(http.MethodGet, path, nil) + if err != nil { + return nil, err + } + + var repo Repository + if err := json.Unmarshal(responseBody, &repo); err != nil { + return nil, fmt.Errorf("failed to parse repository response: %w", err) + } + + return &repo, nil +} + +type Image struct { + Architecture string `json:"architecture"` + OS string `json:"os"` + Digest string `json:"digest"` + Size int64 `json:"size"` + Status string `json:"status"` + LastPulled string `json:"last_pulled"` + LastPushed string `json:"last_pushed"` +} + +type ImageSet []Image + +func (i *ImageSet) UnmarshalJSON(data []byte) error { + if len(data) == 0 || string(data) == "null" { + return nil + } + + if data[0] == '[' { + var images []Image + if err := json.Unmarshal(data, &images); err != nil { + return err + } + *i = images + return nil + } + + var image Image + if err := json.Unmarshal(data, &image); err != nil { + return err + } + *i = []Image{image} + return nil +} + +type Tag struct { + ID int64 `json:"id"` + Name string `json:"name"` + FullSize int64 `json:"full_size"` + LastUpdated string `json:"last_updated"` + LastUpdater int64 `json:"last_updater"` + LastUpdaterUsername string `json:"last_updater_username"` + Status string `json:"status"` + TagLastPulled string `json:"tag_last_pulled"` + TagLastPushed string `json:"tag_last_pushed"` + Repository int64 `json:"repository"` + Images ImageSet `json:"images"` +} + +func (c *Client) GetRepositoryTag(namespace, repository, tag string) (*Tag, error) { + path := fmt.Sprintf("/v2/namespaces/%s/repositories/%s/tags/%s", namespace, repository, tag) + _, responseBody, err := c.doRequest(http.MethodGet, path, nil) + if err != nil { + return nil, err + } + + var result Tag + if err := json.Unmarshal(responseBody, &result); err != nil { + return nil, fmt.Errorf("failed to parse tag response: %w", err) + } + + return &result, nil +} diff --git a/pkg/integrations/dockerhub/client_test.go b/pkg/integrations/dockerhub/client_test.go new file mode 100644 index 0000000000..f7ce398094 --- /dev/null +++ b/pkg/integrations/dockerhub/client_test.go @@ -0,0 +1,71 @@ +package dockerhub + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__DockerHub__NewClient(t *testing.T) { + t.Run("missing access token secret -> error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + + _, err := NewClient(&contexts.HTTPContext{}, integrationCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "access token not configured") + }) + + t.Run("valid configuration -> client created", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + accessTokenSecretName: {Name: accessTokenSecretName, Value: []byte("token")}, + }, + } + + client, err := NewClient(&contexts.HTTPContext{}, integrationCtx) + require.NoError(t, err) + assert.Equal(t, "token", client.AccessToken) + }) +} + +func Test__DockerHub__ListRepositories(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + { + "next": null, + "results": [ + {"name": "demo", "namespace": "superplane"} + ] + } + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + accessTokenSecretName: {Name: accessTokenSecretName, Value: []byte("token")}, + }, + } + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + repos, err := client.ListRepositories("superplane") + require.NoError(t, err) + require.Len(t, repos, 1) + assert.Equal(t, "demo", repos[0].Name) + + require.Len(t, httpCtx.Requests, 1) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/v2/namespaces/superplane/repositories") + assert.Equal(t, "Bearer token", httpCtx.Requests[0].Header.Get("Authorization")) +} diff --git a/pkg/integrations/dockerhub/dockerhub.go b/pkg/integrations/dockerhub/dockerhub.go new file mode 100644 index 0000000000..8f4e3f7e56 --- /dev/null +++ b/pkg/integrations/dockerhub/dockerhub.go @@ -0,0 +1,144 @@ +package dockerhub + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +func init() { + registry.RegisterIntegration("dockerhub", &DockerHub{}) +} + +type DockerHub struct{} + +type Configuration struct { + Username string `json:"username"` + AccessToken string `json:"accessToken"` +} + +func (d *DockerHub) Name() string { + return "dockerhub" +} + +func (d *DockerHub) Label() string { + return "DockerHub" +} + +func (d *DockerHub) Icon() string { + return "docker" +} + +func (d *DockerHub) Description() string { + return "Manage and react to DockerHub repositories and tags" +} + +func (d *DockerHub) Instructions() string { + return ` +To generate a DockerHub access token: +- Go to **DockerHub** → **Account Settings** → **Personal Access Tokens** +- Generate a new token +- **Copy the token**, and enter your DockerHub username and the token below +` +} + +func (d *DockerHub) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "username", + Label: "Username", + Type: configuration.FieldTypeString, + Required: true, + Description: "Username or organization name", + }, + { + Name: "accessToken", + Label: "Access Token", + Type: configuration.FieldTypeString, + Required: true, + Sensitive: true, + Description: "Personal access token", + }, + } +} + +func (d *DockerHub) Components() []core.Component { + return []core.Component{ + &GetImageTag{}, + } +} + +func (d *DockerHub) Triggers() []core.Trigger { + return []core.Trigger{ + &OnImagePush{}, + } +} + +func (d *DockerHub) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (d *DockerHub) Sync(ctx core.SyncContext) error { + config := Configuration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + refreshIn, err := refreshAccessToken(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to refresh access token: %w", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create client: %w", err) + } + + err = client.ValidateCredentials(config.Username) + if err != nil { + return fmt.Errorf("failed to validate credentials: %w", err) + } + + err = ctx.Integration.ScheduleActionCall("refreshAccessToken", map[string]any{}, *refreshIn) + if err != nil { + return fmt.Errorf("failed to schedule token refresh: %w", err) + } + + ctx.Integration.Ready() + return nil +} + +func (d *DockerHub) HandleRequest(ctx core.HTTPRequestContext) { + // no-op - webhooks are handled by triggers +} + +func (d *DockerHub) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + return listDockerHubResources(resourceType, ctx) +} + +func (d *DockerHub) Actions() []core.Action { + return []core.Action{ + { + Name: "refreshAccessToken", + Description: "Refresh access token", + }, + } +} + +func (d *DockerHub) HandleAction(ctx core.IntegrationActionContext) error { + switch ctx.Name { + case "refreshAccessToken": + refreshIn, err := refreshAccessToken(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to refresh access token: %w", err) + } + + return ctx.Integration.ScheduleActionCall("refreshAccessToken", map[string]any{}, *refreshIn) + + default: + return fmt.Errorf("unknown action: %s", ctx.Name) + } +} diff --git a/pkg/integrations/dockerhub/example.go b/pkg/integrations/dockerhub/example.go new file mode 100644 index 0000000000..e53c724ed1 --- /dev/null +++ b/pkg/integrations/dockerhub/example.go @@ -0,0 +1,28 @@ +package dockerhub + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_output_get_image_tag.json +var exampleOutputGetImageTagBytes []byte + +//go:embed example_data_on_image_push.json +var exampleDataOnImagePushBytes []byte + +var exampleOutputGetImageTagOnce sync.Once +var exampleOutputGetImageTag map[string]any + +var exampleDataOnImagePushOnce sync.Once +var exampleDataOnImagePush map[string]any + +func getImageTagExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputGetImageTagOnce, exampleOutputGetImageTagBytes, &exampleOutputGetImageTag) +} + +func onImagePushExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnImagePushOnce, exampleDataOnImagePushBytes, &exampleDataOnImagePush) +} diff --git a/pkg/integrations/dockerhub/example_data_on_image_push.json b/pkg/integrations/dockerhub/example_data_on_image_push.json new file mode 100644 index 0000000000..94b521bc3f --- /dev/null +++ b/pkg/integrations/dockerhub/example_data_on_image_push.json @@ -0,0 +1,23 @@ +{ + "timestamp": "2026-02-03T12:00:00Z", + "type": "dockerhub.image.push", + "data": { + "callback_url": "https://hub.docker.com/u/superplane/demo/hook/abcd/", + "push_data": { + "tag": "v1.2.3", + "pushed_at": 1736400000, + "pusher": "superplane-bot" + }, + "repository": { + "repo_name": "superplane/demo", + "name": "demo", + "namespace": "superplane", + "repo_url": "https://hub.docker.com/r/superplane/demo", + "is_private": false, + "status": "Active", + "star_count": 12, + "pull_count": 3456, + "description": "Demo image for SuperPlane workflows" + } + } +} diff --git a/pkg/integrations/dockerhub/example_output_get_image_tag.json b/pkg/integrations/dockerhub/example_output_get_image_tag.json new file mode 100644 index 0000000000..5c0d435d3c --- /dev/null +++ b/pkg/integrations/dockerhub/example_output_get_image_tag.json @@ -0,0 +1,28 @@ +{ + "timestamp": "2026-02-03T12:00:00Z", + "type": "dockerhub.tag", + "data": { + "id": 123456, + "name": "latest", + "full_size": 52837442, + "last_updated": "2025-01-05T21:06:53.506400Z", + "last_updater": 1234, + "last_updater_username": "superplane-bot", + "status": "active", + "tag_last_pulled": "2025-01-06T11:02:10.123456Z", + "tag_last_pushed": "2025-01-05T21:06:53.506400Z", + "repository": 98765, + "v2": "true", + "images": [ + { + "architecture": "amd64", + "os": "linux", + "digest": "sha256:fe12ab34cd56ef78ab90cd12ef34ab56cd78ef90ab12cd34ef56ab78cd90ef12", + "size": 52837442, + "status": "active", + "last_pulled": "2025-01-06T11:02:10.123456Z", + "last_pushed": "2025-01-05T21:06:53.506400Z" + } + ] + } +} diff --git a/pkg/integrations/dockerhub/get_image_tag.go b/pkg/integrations/dockerhub/get_image_tag.go new file mode 100644 index 0000000000..8759522d2c --- /dev/null +++ b/pkg/integrations/dockerhub/get_image_tag.go @@ -0,0 +1,175 @@ +package dockerhub + +import ( + "fmt" + "net/http" + "strings" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type GetImageTag struct{} + +type GetImageTagConfiguration struct { + Namespace string `json:"namespace" mapstructure:"namespace"` + Repository string `json:"repository" mapstructure:"repository"` + Tag string `json:"tag" mapstructure:"tag"` +} + +func (c *GetImageTag) Name() string { + return "dockerhub.getImageTag" +} + +func (c *GetImageTag) Label() string { + return "Get Image Tag" +} + +func (c *GetImageTag) Description() string { + return "Get metadata for a DockerHub image tag" +} + +func (c *GetImageTag) Documentation() string { + return `The Get Image Tag component retrieves metadata for a DockerHub image tag. + +## Use Cases + +- **Release automation**: Fetch tag metadata for deployments +- **Audit trails**: Resolve tag details for traceability +- **Insights**: Inspect image sizes, digests, and last pushed times + +## Configuration + +- **Repository**: DockerHub repository name, in the format of ` + "`namespace/name`" + ` +- **Tag**: Image tag to retrieve (for example: ` + "`latest`" + ` or ` + "`v1.2.3`" + `) +` +} + +func (c *GetImageTag) Icon() string { + return "docker" +} + +func (c *GetImageTag) Color() string { + return "gray" +} + +func (c *GetImageTag) ExampleOutput() map[string]any { + return getImageTagExampleOutput() +} + +func (c *GetImageTag) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetImageTag) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "repository", + Label: "Repository", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "dockerhub.repository", + }, + }, + }, + { + Name: "tag", + Label: "Tag", + Type: configuration.FieldTypeString, + Required: true, + Placeholder: "latest", + }, + } +} + +func (c *GetImageTag) Setup(ctx core.SetupContext) error { + var config GetImageTagConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + repository := strings.TrimSpace(config.Repository) + if repository == "" { + return fmt.Errorf("repository is required") + } + + tag := strings.TrimSpace(config.Tag) + if tag == "" { + return fmt.Errorf("tag is required") + } + + return nil +} + +func (c *GetImageTag) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetImageTag) Execute(ctx core.ExecutionContext) error { + var config GetImageTagConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + repository := strings.TrimSpace(config.Repository) + if repository == "" { + return fmt.Errorf("repository is required") + } + + tag := strings.TrimSpace(config.Tag) + if tag == "" { + return fmt.Errorf("tag is required") + } + + parts := strings.Split(repository, "/") + if len(parts) != 2 { + return fmt.Errorf("repository must be in the format of namespace/name") + } + + namespace := strings.TrimSpace(parts[0]) + repositoryName := strings.TrimSpace(parts[1]) + + if namespace == "" || repositoryName == "" { + return fmt.Errorf("repository must be in the format of namespace/name") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create client: %w", err) + } + + tagResponse, err := client.GetRepositoryTag(namespace, repositoryName, tag) + if err != nil { + return fmt.Errorf("failed to fetch image tag: %w", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "dockerhub.tag", + []any{tagResponse}, + ) +} + +func (c *GetImageTag) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetImageTag) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetImageTag) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetImageTag) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetImageTag) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/dockerhub/get_image_tag_test.go b/pkg/integrations/dockerhub/get_image_tag_test.go new file mode 100644 index 0000000000..0a7f513966 --- /dev/null +++ b/pkg/integrations/dockerhub/get_image_tag_test.go @@ -0,0 +1,94 @@ +package dockerhub + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetImageTag__Setup(t *testing.T) { + component := &GetImageTag{} + + t.Run("invalid configuration -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Integration: &contexts.IntegrationContext{}, + Metadata: &contexts.MetadataContext{}, + Configuration: "invalid", + }) + + require.ErrorContains(t, err, "failed to decode configuration") + }) + + t.Run("missing repository -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Integration: &contexts.IntegrationContext{}, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"tag": "latest"}, + }) + + require.ErrorContains(t, err, "repository is required") + }) + + t.Run("missing tag -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Integration: &contexts.IntegrationContext{}, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"repository": "superplane/demo"}, + }) + + require.ErrorContains(t, err, "tag is required") + }) + + t.Run("valid configuration -> stores metadata", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + HTTP: &contexts.HTTPContext{}, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "repository": "superplane/demo", + "tag": "latest", + }, + }) + + require.NoError(t, err) + }) +} + +func Test__GetImageTag__Execute(t *testing.T) { + component := &GetImageTag{} + + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"id":1,"name":"latest"}`)), + }, + }, + } + + execState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + + err := component.Execute(core.ExecutionContext{ + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + accessTokenSecretName: {Name: accessTokenSecretName, Value: []byte("token")}, + }, + }, + HTTP: httpCtx, + ExecutionState: execState, + Configuration: map[string]any{ + "repository": "superplane/demo", + "tag": "latest", + }, + }) + + require.NoError(t, err) + assert.Equal(t, core.DefaultOutputChannel.Name, execState.Channel) + assert.Equal(t, "dockerhub.tag", execState.Type) + require.Len(t, execState.Payloads, 1) +} diff --git a/pkg/integrations/dockerhub/list_resources.go b/pkg/integrations/dockerhub/list_resources.go new file mode 100644 index 0000000000..838bf66a70 --- /dev/null +++ b/pkg/integrations/dockerhub/list_resources.go @@ -0,0 +1,50 @@ +package dockerhub + +import ( + "fmt" + + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + ResourceTypeRepository = "dockerhub.repository" +) + +func listDockerHubResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + switch resourceType { + case ResourceTypeRepository: + return listDockerHubRepositories(ctx) + + default: + return []core.IntegrationResource{}, nil + } +} + +func listDockerHubRepositories(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + namespace, err := ctx.Integration.GetConfig("username") + if err != nil { + return nil, fmt.Errorf("integration username is required: %w", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + repositories, err := client.ListRepositories(string(namespace)) + if err != nil { + return nil, fmt.Errorf("failed to list repositories: %w", err) + } + + resources := make([]core.IntegrationResource, 0, len(repositories)) + for _, repository := range repositories { + name := repository.Namespace + "/" + repository.Name + resources = append(resources, core.IntegrationResource{ + Type: ResourceTypeRepository, + Name: name, + ID: name, + }) + } + + return resources, nil +} diff --git a/pkg/integrations/dockerhub/on_image_push.go b/pkg/integrations/dockerhub/on_image_push.go new file mode 100644 index 0000000000..8aee594c87 --- /dev/null +++ b/pkg/integrations/dockerhub/on_image_push.go @@ -0,0 +1,252 @@ +package dockerhub + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnImagePush struct{} + +type OnImagePushConfiguration struct { + Repository string `json:"repository" mapstructure:"repository"` + Tags []configuration.Predicate `json:"tags" mapstructure:"tags"` +} + +type OnImagePushMetadata struct { + Repository *RepositoryMetadata `json:"repository" mapstructure:"repository"` + WebhookURL string `json:"webhookUrl" mapstructure:"webhookUrl"` +} + +type RepositoryMetadata struct { + Namespace string `json:"namespace" mapstructure:"namespace"` + Name string `json:"name" mapstructure:"name"` +} + +type ImagePushPayload struct { + CallbackURL string `json:"callback_url"` + PushData ImagePushData `json:"push_data"` + Repository ImagePushRepository `json:"repository"` +} + +type ImagePushData struct { + Tag string `json:"tag"` + PushedAt int64 `json:"pushed_at"` + Pusher string `json:"pusher"` +} + +type ImagePushRepository struct { + RepoName string `json:"repo_name"` + Name string `json:"name"` + Namespace string `json:"namespace"` + RepoURL string `json:"repo_url"` + IsPrivate bool `json:"is_private"` + Status string `json:"status"` + StarCount int `json:"star_count"` + PullCount int `json:"pull_count"` + Owner string `json:"owner"` + Repository string `json:"repository"` +} + +func (p *OnImagePush) Name() string { + return "dockerhub.onImagePush" +} + +func (p *OnImagePush) Label() string { + return "On Image Push" +} + +func (p *OnImagePush) Description() string { + return "Listen to DockerHub image push events" +} + +func (p *OnImagePush) Documentation() string { + return `The On Image Push trigger starts a workflow execution when an image tag is pushed to DockerHub. + +## Use Cases + +- **Build pipelines**: Trigger builds and deployments on container pushes +- **Release workflows**: Promote artifacts when a new tag is published +- **Security automation**: Kick off scans or alerts for newly pushed images + +## Configuration + +- **Repository**: DockerHub repository name, in the format of ` + "`namespace/name`" + ` +- **Tags**: Optional filters for image tags (for example: ` + "`latest`" + ` or ` + "`^v[0-9]+`" + `) + +## Webhook Setup + +This trigger generates a webhook URL in SuperPlane. Add that URL as a DockerHub webhook for the selected repository so DockerHub can deliver push events.` +} + +func (p *OnImagePush) Icon() string { + return "docker" +} + +func (p *OnImagePush) Color() string { + return "gray" +} + +func (p *OnImagePush) ExampleData() map[string]any { + return onImagePushExampleData() +} + +func (p *OnImagePush) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "repository", + Label: "Repository", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "dockerhub.repository", + UseNameAsValue: true, + Parameters: []configuration.ParameterRef{ + { + Name: "namespace", + ValueFrom: &configuration.ParameterValueFrom{ + Field: "namespace", + }, + }, + }, + }, + }, + }, + { + Name: "tags", + Label: "Tags", + Type: configuration.FieldTypeAnyPredicateList, + Required: false, + TypeOptions: &configuration.TypeOptions{ + AnyPredicateList: &configuration.AnyPredicateListTypeOptions{ + Operators: configuration.AllPredicateOperators, + }, + }, + }, + } +} + +func (p *OnImagePush) Setup(ctx core.TriggerContext) error { + metadata := OnImagePushMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + config := OnImagePushConfiguration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + repository := strings.TrimSpace(config.Repository) + if repository == "" { + return fmt.Errorf("repository is required") + } + + parts := strings.Split(repository, "/") + if len(parts) != 2 { + return fmt.Errorf("repository must be in the format of namespace/name") + } + + namespace := parts[0] + repositoryName := parts[1] + + if metadata.Repository != nil && + metadata.Repository.Name == repositoryName && + metadata.Repository.Namespace == namespace && + metadata.WebhookURL != "" { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create client: %w", err) + } + + repoInfo, err := client.GetRepository(namespace, repositoryName) + if err != nil { + return fmt.Errorf("failed to validate repository %s in namespace %s: %w", repositoryName, namespace, err) + } + + webhookURL := metadata.WebhookURL + if webhookURL == "" { + webhookURL, err = ctx.Webhook.Setup() + if err != nil { + return fmt.Errorf("failed to setup webhook: %w", err) + } + } + + return ctx.Metadata.Set(OnImagePushMetadata{ + WebhookURL: webhookURL, + Repository: &RepositoryMetadata{ + Namespace: repoInfo.Namespace, + Name: repoInfo.Name, + }, + }) +} + +func (p *OnImagePush) Actions() []core.Action { + return []core.Action{} +} + +func (p *OnImagePush) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (p *OnImagePush) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + config := OnImagePushConfiguration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + metadata := OnImagePushMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode metadata: %w", err) + } + + payload := ImagePushPayload{} + if err := json.Unmarshal(ctx.Body, &payload); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %w", err) + } + + if metadata.Repository == nil { + return http.StatusOK, nil + } + + if metadata.Repository.Namespace != payload.Repository.Namespace { + ctx.Logger.Infof("Ignoring event for namespace %s", payload.Repository.Namespace) + return http.StatusOK, nil + } + + if metadata.Repository.Name != payload.Repository.Name { + ctx.Logger.Infof("Ignoring event for repository %s", payload.Repository.Name) + return http.StatusOK, nil + } + + if len(config.Tags) > 0 { + tag := strings.TrimSpace(payload.PushData.Tag) + if tag == "" { + return http.StatusOK, nil + } + + if !configuration.MatchesAnyPredicate(config.Tags, tag) { + ctx.Logger.Infof("Ignoring event with non-matching tag %s", tag) + return http.StatusOK, nil + } + } + + if err := ctx.Events.Emit("dockerhub.image.push", payload); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %w", err) + } + + return http.StatusOK, nil +} + +func (p *OnImagePush) Cleanup(ctx core.TriggerContext) error { + return nil +} diff --git a/pkg/integrations/dockerhub/on_image_push_test.go b/pkg/integrations/dockerhub/on_image_push_test.go new file mode 100644 index 0000000000..0a61f94d9a --- /dev/null +++ b/pkg/integrations/dockerhub/on_image_push_test.go @@ -0,0 +1,173 @@ +package dockerhub + +import ( + "io" + "net/http" + "strings" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnImagePush__Setup(t *testing.T) { + trigger := &OnImagePush{} + + t.Run("repository is required", func(t *testing.T) { + err := trigger.Setup(core.TriggerContext{ + Integration: &contexts.IntegrationContext{}, + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{"repository": ""}, + }) + + require.ErrorContains(t, err, "repository is required") + }) + + t.Run("valid configuration -> stores metadata and generates webhook URL", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"name":"demo","namespace":"superplane"}`)), + }, + }, + } + + metadata := &contexts.MetadataContext{} + integrationCtx := &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + accessTokenSecretName: {Name: accessTokenSecretName, Value: []byte("token")}, + }, + } + + err := trigger.Setup(core.TriggerContext{ + HTTP: httpCtx, + Integration: integrationCtx, + Metadata: metadata, + Webhook: &contexts.WebhookContext{}, + Configuration: map[string]any{ + "repository": "superplane/demo", + }, + }) + + require.NoError(t, err) + stored, ok := metadata.Metadata.(OnImagePushMetadata) + require.True(t, ok) + assert.Equal(t, "demo", stored.Repository.Name) + assert.NotEmpty(t, stored.WebhookURL) + }) +} + +func Test__OnImagePush__HandleWebhook(t *testing.T) { + trigger := &OnImagePush{} + + t.Run("invalid JSON -> 400", func(t *testing.T) { + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: []byte(`invalid`), + Events: &contexts.EventContext{}, + Configuration: map[string]any{"repository": "superplane/demo"}, + Metadata: &contexts.MetadataContext{ + Metadata: OnImagePushMetadata{ + Repository: &RepositoryMetadata{ + Namespace: "superplane", + Name: "demo", + }, + }, + }, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusBadRequest, code) + assert.ErrorContains(t, err, "error parsing request body") + }) + + t.Run("repository mismatch -> ignored", func(t *testing.T) { + body := []byte(`{"repository":{"name":"other", "namespace":"superplane"},"push_data":{"tag":"latest"}}`) + events := &contexts.EventContext{} + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Events: events, + Configuration: map[string]any{"repository": "superplane/demo"}, + Logger: log.NewEntry(log.New()), + Metadata: &contexts.MetadataContext{ + Metadata: OnImagePushMetadata{ + Repository: &RepositoryMetadata{ + Namespace: "superplane", + Name: "demo", + }, + }, + }, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + assert.Equal(t, 0, events.Count()) + }) + + t.Run("tag filter mismatch -> ignored", func(t *testing.T) { + body := []byte(`{"repository":{"name":"demo", "namespace":"superplane"},"push_data":{"tag":"latest"}}`) + events := &contexts.EventContext{} + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Logger: log.NewEntry(log.New()), + Events: events, + Metadata: &contexts.MetadataContext{ + Metadata: OnImagePushMetadata{ + Repository: &RepositoryMetadata{ + Namespace: "superplane", + Name: "demo", + }, + }, + }, + Configuration: map[string]any{ + "repository": "superplane/demo", + "tags": []map[string]any{ + { + "type": configuration.PredicateTypeEquals, + "value": "v1.*", + }, + }, + }, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + assert.Equal(t, 0, events.Count()) + }) + + t.Run("match -> event emitted", func(t *testing.T) { + body := []byte(`{"repository":{"name":"demo", "namespace":"superplane"},"push_data":{"tag":"v1.2.3"}}`) + events := &contexts.EventContext{} + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Events: events, + Logger: log.NewEntry(log.New()), + Metadata: &contexts.MetadataContext{ + Metadata: OnImagePushMetadata{ + Repository: &RepositoryMetadata{ + Namespace: "superplane", + Name: "demo", + }, + }, + }, + Configuration: map[string]any{ + "repository": "superplane/demo", + "tags": []map[string]any{ + { + "type": configuration.PredicateTypeMatches, + "value": "^v1.*", + }, + }, + }, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + require.Equal(t, 1, events.Count()) + assert.Equal(t, "dockerhub.image.push", events.Payloads[0].Type) + }) +} diff --git a/pkg/integrations/dockerhub/token.go b/pkg/integrations/dockerhub/token.go new file mode 100644 index 0000000000..ad4a760095 --- /dev/null +++ b/pkg/integrations/dockerhub/token.go @@ -0,0 +1,159 @@ +package dockerhub + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + accessTokenSecretName = "accessTokenJwt" + authTokenEndpoint = "/v2/auth/token" + minRefreshInterval = time.Minute +) + +func refreshAccessToken(httpCtx core.HTTPContext, integration core.IntegrationContext) (*time.Duration, error) { + username, err := integration.GetConfig("username") + if err != nil { + return nil, fmt.Errorf("failed to get username: %w", err) + } + + accessToken, err := integration.GetConfig("accessToken") + if err != nil { + return nil, fmt.Errorf("failed to get access token: %w", err) + } + + token, refreshIn, err := createAccessToken(httpCtx, string(username), string(accessToken)) + if err != nil { + return nil, err + } + + if err := integration.SetSecret(accessTokenSecretName, []byte(token)); err != nil { + return nil, err + } + + return refreshIn, nil +} + +type AccessTokenRequest struct { + Identifier string `json:"identifier"` + Secret string `json:"secret"` +} + +type AccessTokenResponse struct { + AccessToken string `json:"access_token"` +} + +func createAccessToken(httpCtx core.HTTPContext, identifier, secret string) (string, *time.Duration, error) { + payload, err := json.Marshal(AccessTokenRequest{ + Identifier: identifier, + Secret: secret, + }) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal access token request: %w", err) + } + + req, err := http.NewRequest(http.MethodPost, defaultBaseURL+authTokenEndpoint, bytes.NewReader(payload)) + if err != nil { + return "", nil, fmt.Errorf("failed to create access token request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + + res, err := httpCtx.Do(req) + if err != nil { + return "", nil, fmt.Errorf("access token request failed: %w", err) + } + defer res.Body.Close() + + body, err := io.ReadAll(res.Body) + if err != nil { + return "", nil, fmt.Errorf("failed to read access token response: %w", err) + } + + if res.StatusCode < http.StatusOK || res.StatusCode >= http.StatusMultipleChoices { + return "", nil, fmt.Errorf("request failed with %d: %s", res.StatusCode, string(body)) + } + + var response AccessTokenResponse + if err := json.Unmarshal(body, &response); err != nil { + return "", nil, fmt.Errorf("failed to parse access token response: %w", err) + } + + if response.AccessToken == "" { + return "", nil, fmt.Errorf("access token response was empty") + } + + expiresAt, err := parseJWTExpiry(response.AccessToken) + if err != nil { + return "", nil, fmt.Errorf("failed to parse access token expiry: %w", err) + } + + // + // We schedule the refresh for 1min before the expiration time. + // + interval := time.Until(expiresAt.Add(-time.Minute)) + if interval < minRefreshInterval { + interval = minRefreshInterval + } + + return response.AccessToken, &interval, nil +} + +func parseJWTExpiry(token string) (*time.Time, error) { + parts := strings.Split(token, ".") + if len(parts) < 2 { + return nil, fmt.Errorf("invalid JWT token") + } + + payloadBytes, err := base64.RawURLEncoding.DecodeString(parts[1]) + if err != nil { + return nil, fmt.Errorf("failed to decode JWT payload: %w", err) + } + + var payload map[string]any + if err := json.Unmarshal(payloadBytes, &payload); err != nil { + return nil, fmt.Errorf("failed to parse JWT payload: %w", err) + } + + expValue, ok := payload["exp"] + if !ok { + return nil, fmt.Errorf("JWT exp not found") + } + + expSeconds, err := parseJWTNumericClaim(expValue) + if err != nil { + return nil, fmt.Errorf("invalid JWT exp: %w", err) + } + + expiresAt := time.Unix(expSeconds, 0) + return &expiresAt, nil +} + +func parseJWTNumericClaim(value any) (int64, error) { + switch v := value.(type) { + case float64: + return int64(v), nil + case json.Number: + return v.Int64() + case int64: + return v, nil + case int: + return int64(v), nil + case string: + parsed, err := json.Number(v).Int64() + if err != nil { + return 0, err + } + return parsed, nil + default: + return 0, fmt.Errorf("unsupported claim type %T", value) + } +} diff --git a/pkg/integrations/dockerhub/token_test.go b/pkg/integrations/dockerhub/token_test.go new file mode 100644 index 0000000000..6623053a03 --- /dev/null +++ b/pkg/integrations/dockerhub/token_test.go @@ -0,0 +1,47 @@ +package dockerhub + +import ( + "encoding/base64" + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func Test__ParseJWTExpiry(t *testing.T) { + t.Run("parses exp claim", func(t *testing.T) { + exp := time.Now().Add(10 * time.Minute).Unix() + token := buildJWT(t, map[string]any{"exp": exp}) + + parsed, err := parseJWTExpiry(token) + require.NoError(t, err) + require.Equal(t, time.Unix(exp, 0), *parsed) + }) + + t.Run("missing exp -> error", func(t *testing.T) { + token := buildJWT(t, map[string]any{"sub": "user"}) + _, err := parseJWTExpiry(token) + require.Error(t, err) + }) +} + +func buildJWT(t *testing.T, payload map[string]any) string { + t.Helper() + + header := map[string]any{ + "alg": "HS256", + "typ": "JWT", + } + + headerBytes, err := json.Marshal(header) + require.NoError(t, err) + + payloadBytes, err := json.Marshal(payload) + require.NoError(t, err) + + headerSegment := base64.RawURLEncoding.EncodeToString(headerBytes) + payloadSegment := base64.RawURLEncoding.EncodeToString(payloadBytes) + + return headerSegment + "." + payloadSegment + ".signature" +} diff --git a/pkg/public/server.go b/pkg/public/server.go index 4c8eb55d61..4ddd80c222 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -807,6 +807,7 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + Metadata: contexts.NewNodeMetadataContext(tx, &node), Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), @@ -841,6 +842,7 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + Metadata: contexts.NewNodeMetadataContext(tx, &node), Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), diff --git a/pkg/server/server.go b/pkg/server/server.go index b96ff29078..5809fcda48 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -39,6 +39,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/datadog" _ "github.com/superplanehq/superplane/pkg/integrations/daytona" _ "github.com/superplanehq/superplane/pkg/integrations/discord" + _ "github.com/superplanehq/superplane/pkg/integrations/dockerhub" _ "github.com/superplanehq/superplane/pkg/integrations/grafana" _ "github.com/superplanehq/superplane/pkg/integrations/github" _ "github.com/superplanehq/superplane/pkg/integrations/gitlab" diff --git a/web_src/src/pages/workflowv2/mappers/dockerhub/get_image_tag.ts b/web_src/src/pages/workflowv2/mappers/dockerhub/get_image_tag.ts new file mode 100644 index 0000000000..b5a326b00e --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/dockerhub/get_image_tag.ts @@ -0,0 +1,106 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import dockerIcon from "@/assets/icons/integrations/docker.svg"; +import { formatTimeAgo } from "@/utils/date"; +import { formatTimestampInUserTimezone } from "@/utils/timezone"; +import { MetadataItem } from "@/ui/metadataList"; +import { Tag } from "./types"; +import { formatBytes, stringOrDash } from "../utils"; + +interface GetImageTagConfiguration { + repository?: string; + tag?: string; +} + +export const getImageTagMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name || "unknown"; + + return { + title: context.node.name || context.componentDefinition.label || "Unnamed component", + iconSrc: dockerIcon, + iconColor: getColorClass(context.componentDefinition.color), + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + eventSections: lastExecution ? getImageTagEventSections(context.nodes, lastExecution, componentName) : undefined, + includeEmptyState: !lastExecution, + metadata: getImageTagMetadataList(context.node), + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const result = outputs?.default?.[0]?.data as Tag | undefined; + + if (!result) { + return {}; + } + + const images = result.images; + const image = Array.isArray(images) ? images[0] : images; + + return { + Tag: stringOrDash(result.name), + Status: stringOrDash(result.status), + "Image Size": formatBytes(image?.size), + "Full Size": formatBytes(result.full_size), + "Last Updated": result.last_updated ? formatTimestampInUserTimezone(result.last_updated) : "-", + "Last Pushed": result.tag_last_pushed ? formatTimestampInUserTimezone(result.tag_last_pushed) : "-", + "Last Pulled": result.tag_last_pulled ? formatTimestampInUserTimezone(result.tag_last_pulled) : "-", + "Last Updater": stringOrDash(result.last_updater_username), + "Image Digest": stringOrDash(image?.digest), + Architecture: stringOrDash(image?.architecture), + OS: stringOrDash(image?.os), + }; + }, + + subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) { + return ""; + } + return formatTimeAgo(new Date(context.execution.createdAt)); + }, +}; + +function getImageTagMetadataList(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as GetImageTagConfiguration | undefined; + + if (configuration?.repository) { + metadata.push({ icon: "package", label: configuration.repository }); + } + + if (configuration?.tag) { + metadata.push({ icon: "tag", label: configuration.tag }); + } + + return metadata; +} + +function getImageTagEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/dockerhub/index.ts b/web_src/src/pages/workflowv2/mappers/dockerhub/index.ts new file mode 100644 index 0000000000..8b20f946df --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/dockerhub/index.ts @@ -0,0 +1,20 @@ +import { ComponentBaseMapper, CustomFieldRenderer, EventStateRegistry, TriggerRenderer } from "../types"; +import { getImageTagMapper } from "./get_image_tag"; +import { onImagePushCustomFieldRenderer, onImagePushTriggerRenderer } from "./on_image_push"; +import { buildActionStateRegistry } from "../utils"; + +export const componentMappers: Record = { + getImageTag: getImageTagMapper, +}; + +export const triggerRenderers: Record = { + onImagePush: onImagePushTriggerRenderer, +}; + +export const customFieldRenderers: Record = { + onImagePush: onImagePushCustomFieldRenderer, +}; + +export const eventStateRegistry: Record = { + getImageTag: buildActionStateRegistry("retrieved"), +}; diff --git a/web_src/src/pages/workflowv2/mappers/dockerhub/on_image_push.tsx b/web_src/src/pages/workflowv2/mappers/dockerhub/on_image_push.tsx new file mode 100644 index 0000000000..27c5b34a08 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/dockerhub/on_image_push.tsx @@ -0,0 +1,155 @@ +import { getBackgroundColorClass } from "@/utils/colors"; +import { CustomFieldRenderer, NodeInfo, TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { TriggerProps } from "@/ui/trigger"; +import dockerIcon from "@/assets/icons/integrations/docker.svg"; +import { Repository, RepositoryMetadata } from "./types"; +import { formatTimeAgo } from "@/utils/date"; +import { formatTimestampInUserTimezone } from "@/utils/timezone"; +import { formatPredicate, Predicate, stringOrDash } from "../utils"; +import { MetadataItem } from "@/ui/metadataList"; + +export interface OnImagePushMetadata { + repository?: RepositoryMetadata; + webhookUrl?: string; +} + +export interface OnImagePushConfiguration { + repository?: string; + tags?: Predicate[]; +} + +interface PushData { + tag?: string; + pushed_at?: number; + pusher?: string; +} + +interface ImagePushEvent { + callback_url?: string; + push_data?: PushData; + repository?: Repository; +} + +/** + * Renderer for the "dockerhub.onImagePush" trigger + */ +export const onImagePushTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as ImagePushEvent; + const repository = eventData?.repository?.repo_name; + const tag = eventData?.push_data?.tag; + + const title = repository ? `${repository}${tag ? `:${tag}` : ""}` : "Image push"; + const subtitle = context.event?.createdAt ? formatTimeAgo(new Date(context.event?.createdAt || "")) : ""; + + return { title, subtitle }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as ImagePushEvent; + const repository = eventData?.repository; + const pushData = eventData?.push_data; + const pushedAt = pushData?.pushed_at ? new Date(pushData.pushed_at * 1000).toISOString() : undefined; + + const visibility = repository?.is_private === undefined ? "-" : repository.is_private ? "Private" : "Public"; + + return { + Repository: stringOrDash(repository?.repo_name), + Tag: stringOrDash(pushData?.tag), + Pusher: stringOrDash(pushData?.pusher), + "Pushed At": pushedAt ? formatTimestampInUserTimezone(pushedAt) : "-", + "Repository URL": stringOrDash(repository?.repo_url), + Visibility: visibility, + Stars: stringOrDash(repository?.star_count), + Pulls: stringOrDash(repository?.pull_count), + }; + }, + + getTriggerProps: (context: TriggerRendererContext) => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as OnImagePushMetadata | undefined; + const configuration = node.configuration as OnImagePushConfiguration | undefined; + const metadataItems: MetadataItem[] = []; + + if (metadata?.repository) { + metadataItems.push({ + icon: "package", + label: getRepositoryLabel(metadata), + }); + } + + if (configuration?.tags?.length) { + metadataItems.push({ + icon: "tag", + label: configuration.tags.map(formatPredicate).join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: dockerIcon, + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const { title, subtitle } = onImagePushTriggerRenderer.getTitleAndSubtitle({ event: lastEvent }); + props.lastEventData = { + title, + subtitle, + receivedAt: new Date(lastEvent.createdAt), + state: "triggered", + eventId: lastEvent.id, + }; + } + + return props; + }, +}; + +export const onImagePushCustomFieldRenderer: CustomFieldRenderer = { + render: (node: NodeInfo) => { + const metadata = node.metadata as OnImagePushMetadata | undefined; + const repositoryLabel = getRepositoryLabel(metadata); + const repositoryUrl = `https://hub.docker.com/repository/docker/${repositoryLabel}/webhooks`; + const webhookUrl = metadata?.webhookUrl || "[URL GENERATED ONCE THE CANVAS IS SAVED]"; + + return ( +
    +
    +
    + DockerHub Webhook Setup +
    +
      +
    1. + Go to the{" "} + + {repositoryLabel} + {" "} + webhooks page +
    2. +
    3. Add webhook
    4. +
    5. Set the webhook URL below and save
    6. +
    +
    + Webhook URL +
    +
    +                    {webhookUrl}
    +                  
    +
    +
    +

    DockerHub will send tag push events to SuperPlane once the webhook is configured.

    +
    +
    +
    +
    + ); + }, +}; + +function getRepositoryLabel(metadata?: OnImagePushMetadata): string | undefined { + return metadata?.repository?.namespace + ? `${metadata.repository.namespace}/${metadata.repository.name}` + : metadata?.repository?.name; +} diff --git a/web_src/src/pages/workflowv2/mappers/dockerhub/types.ts b/web_src/src/pages/workflowv2/mappers/dockerhub/types.ts new file mode 100644 index 0000000000..d039f2e171 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/dockerhub/types.ts @@ -0,0 +1,40 @@ +export interface Repository { + name?: string; + namespace?: string; + repo_name?: string; + repo_url?: string; + description?: string; + is_private?: boolean; + star_count?: number; + pull_count?: number; + status?: string; +} + +export interface RepositoryMetadata { + name?: string; + namespace?: string; +} + +export interface TagImage { + architecture?: string; + os?: string; + digest?: string; + size?: number; + status?: string; + last_pulled?: string; + last_pushed?: string; +} + +export interface Tag { + id?: number; + name?: string; + full_size?: number; + last_updated?: string; + last_updater?: number; + last_updater_username?: string; + status?: string; + tag_last_pulled?: string; + tag_last_pushed?: string; + repository?: number; + images?: TagImage[] | TagImage; +} diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 67826690fe..1fd4340638 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -101,6 +101,12 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; +import { + componentMappers as dockerhubComponentMappers, + customFieldRenderers as dockerhubCustomFieldRenderers, + triggerRenderers as dockerhubTriggerRenderers, + eventStateRegistry as dockerhubEventStateRegistry, +} from "./dockerhub"; import { componentMappers as grafanaComponentMappers, triggerRenderers as grafanaTriggerRenderers, @@ -155,6 +161,7 @@ const appMappers: Record> = { discord: discordComponentMappers, openai: openaiComponentMappers, claude: claudeComponentMappers, + dockerhub: dockerhubComponentMappers, grafana: grafanaComponentMappers, }; @@ -176,6 +183,7 @@ const appTriggerRenderers: Record> = { discord: discordTriggerRenderers, openai: openaiTriggerRenderers, claude: claudeTriggerRenderers, + dockerhub: dockerhubTriggerRenderers, grafana: grafanaTriggerRenderers, }; @@ -197,6 +205,7 @@ const appEventStateRegistries: Record claude: claudeEventStateRegistry, aws: awsEventStateRegistry, gitlab: gitlabEventStateRegistry, + dockerhub: dockerhubEventStateRegistry, grafana: grafanaEventStateRegistry, }; @@ -223,6 +232,7 @@ const customFieldRenderers: Record = { const appCustomFieldRenderers: Record> = { github: githubCustomFieldRenderers, + dockerhub: dockerhubCustomFieldRenderers, }; /** diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index b34fae223c..8b85279533 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -33,6 +33,7 @@ import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; import renderIcon from "@/assets/icons/integrations/render.svg"; +import dockerIcon from "@/assets/icons/integrations/docker.svg"; export interface BuildingBlock { name: string; @@ -409,6 +410,7 @@ function CategorySection({ slack: slackIcon, sendgrid: sendgridIcon, render: renderIcon, + dockerhub: dockerIcon, aws: { codeArtifact: awsIcon, lambda: awsLambdaIcon, @@ -481,6 +483,7 @@ function CategorySection({ slack: slackIcon, sendgrid: sendgridIcon, render: renderIcon, + dockerhub: dockerIcon, aws: { codeArtifact: awsCodeArtifactIcon, ecr: awsEcrIcon, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 4756c09f7d..205ba5a8eb 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -19,6 +19,7 @@ import smtpIcon from "@/assets/icons/integrations/smtp.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; import renderIcon from "@/assets/icons/integrations/render.svg"; +import dockerIcon from "@/assets/icons/integrations/docker.svg"; /** Integration type name (e.g. "github") → logo src. Used for Settings tab and header. */ export const INTEGRATION_APP_LOGO_MAP: Record = { @@ -41,6 +42,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { smtp: smtpIcon, sendgrid: sendgridIcon, render: renderIcon, + dockerhub: dockerIcon, }; /** Block name first part (e.g. "github") or compound (e.g. aws.lambda) → logo src for header. */ @@ -62,6 +64,7 @@ export const APP_LOGO_MAP: Record> = { slack: slackIcon, sendgrid: sendgridIcon, render: renderIcon, + dockerhub: dockerIcon, aws: { lambda: awsLambdaIcon, }, diff --git a/web_src/src/utils/integrationDisplayName.ts b/web_src/src/utils/integrationDisplayName.ts index 3166153dbb..64487766ea 100644 --- a/web_src/src/utils/integrationDisplayName.ts +++ b/web_src/src/utils/integrationDisplayName.ts @@ -19,6 +19,7 @@ const INTEGRATION_TYPE_DISPLAY_NAMES: Record = { aws: "AWS", smtp: "SMTP", sendgrid: "SendGrid", + dockerhub: "DockerHub", }; /** From ff48f6c55a7779c9ccac7f68b5811e03e888877e Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Wed, 11 Feb 2026 12:53:33 +0500 Subject: [PATCH 049/160] fixes for the trigger webhook url + for datashource uid Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 + pkg/integrations/grafana/on_alert_firing.go | 29 ++++++++++++++++++- pkg/integrations/grafana/query_data_source.go | 4 +-- .../sendgrid/on_email_event_test.go | 4 +++ pkg/workers/contexts/node_webhook_context.go | 8 +++++ test/support/contexts/contexts.go | 5 ++++ web_src/src/ui/componentSidebar/index.tsx | 23 +++++++++++---- 7 files changed, 65 insertions(+), 9 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 0e86340fa0..b5c3fad91a 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -134,6 +134,7 @@ type WebhookRequestContext struct { type NodeWebhookContext interface { Setup() (string, error) + GetURL() (string, error) GetSecret() ([]byte, error) ResetSecret() ([]byte, []byte, error) GetBaseURL() string diff --git a/pkg/integrations/grafana/on_alert_firing.go b/pkg/integrations/grafana/on_alert_firing.go index d413b46cea..c30fb39726 100644 --- a/pkg/integrations/grafana/on_alert_firing.go +++ b/pkg/integrations/grafana/on_alert_firing.go @@ -56,7 +56,15 @@ func (t *OnAlertFiring) Configuration() []configuration.Field { } func (t *OnAlertFiring) Setup(ctx core.TriggerContext) error { - return ctx.Integration.RequestWebhook(struct{}{}) + if err := ctx.Integration.RequestWebhook(struct{}{}); err != nil { + return err + } + + if err := setWebhookURLMetadata(ctx); err != nil { + ctx.Logger.Warnf("grafana onAlertFiring: failed to store webhook url metadata: %v", err) + } + + return nil } func (t *OnAlertFiring) Actions() []core.Action { @@ -128,3 +136,22 @@ func extractString(value any) string { } return strings.TrimSpace(text) } + +func setWebhookURLMetadata(ctx core.TriggerContext) error { + webhookURL, err := ctx.Webhook.GetURL() + if err != nil { + return err + } + + metadata := map[string]any{} + if existing := ctx.Metadata.Get(); existing != nil { + if existingMap, ok := existing.(map[string]any); ok { + for key, value := range existingMap { + metadata[key] = value + } + } + } + + metadata["webhookUrl"] = webhookURL + return ctx.Metadata.Set(metadata) +} diff --git a/pkg/integrations/grafana/query_data_source.go b/pkg/integrations/grafana/query_data_source.go index c7f090674a..5a53a6714b 100644 --- a/pkg/integrations/grafana/query_data_source.go +++ b/pkg/integrations/grafana/query_data_source.go @@ -33,7 +33,7 @@ type grafanaQueryRequest struct { type grafanaQuery struct { RefID string `json:"refId"` - DatasourceUID string `json:"datasourceUid"` + Datasource any `json:"datasource,omitempty"` Expr string `json:"expr,omitempty"` Query string `json:"query,omitempty"` Format string `json:"format,omitempty"` @@ -160,7 +160,7 @@ func (q *QueryDataSource) Execute(ctx core.ExecutionContext) error { Queries: []grafanaQuery{ { RefID: "A", - DatasourceUID: strings.TrimSpace(spec.DataSourceUID), + Datasource: map[string]string{"uid": strings.TrimSpace(spec.DataSourceUID)}, Expr: strings.TrimSpace(spec.Query), Query: strings.TrimSpace(spec.Query), }, diff --git a/pkg/integrations/sendgrid/on_email_event_test.go b/pkg/integrations/sendgrid/on_email_event_test.go index 7e0282d9c2..a18f7aad48 100644 --- a/pkg/integrations/sendgrid/on_email_event_test.go +++ b/pkg/integrations/sendgrid/on_email_event_test.go @@ -171,6 +171,10 @@ func (t *testNodeWebhookContext) Setup() (string, error) { return "", nil } +func (t *testNodeWebhookContext) GetURL() (string, error) { + return "", nil +} + func (t *testNodeWebhookContext) GetSecret() ([]byte, error) { return t.secret, nil } diff --git a/pkg/workers/contexts/node_webhook_context.go b/pkg/workers/contexts/node_webhook_context.go index a0b1f78b69..a1c0e487be 100644 --- a/pkg/workers/contexts/node_webhook_context.go +++ b/pkg/workers/contexts/node_webhook_context.go @@ -66,6 +66,14 @@ func (c *NodeWebhookContext) ResetSecret() ([]byte, []byte, error) { return []byte(plainKey), encryptedKey, nil } +func (c *NodeWebhookContext) GetURL() (string, error) { + if c.node.WebhookID == nil { + return "", fmt.Errorf("node does not have a webhook") + } + + return fmt.Sprintf("%s/webhooks/%s", c.GetBaseURL(), c.node.WebhookID.String()), nil +} + func (c *NodeWebhookContext) Setup() (string, error) { webhook, err := c.findOrCreateWebhook() if err != nil { diff --git a/test/support/contexts/contexts.go b/test/support/contexts/contexts.go index 5fe47703d1..45d86eb63c 100644 --- a/test/support/contexts/contexts.go +++ b/test/support/contexts/contexts.go @@ -49,6 +49,11 @@ func (w *WebhookContext) Setup() (string, error) { return id.String(), nil } +func (w *WebhookContext) GetURL() (string, error) { + id := uuid.New() + return fmt.Sprintf("%s/webhooks/%s", w.GetBaseURL(), id.String()), nil +} + func (w *WebhookContext) GetBaseURL() string { return "http://localhost:3000/api/v1" } diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index 553c516c94..03394bdf02 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -279,14 +279,25 @@ export const ComponentSidebar = ({ ); const selectedIntegrationForDialog = isCreateIntegrationDialogOpen ? createIntegrationDefinition : undefined; const selectedInstructions = selectedIntegrationForDialog?.instructions?.trim(); + const nodeWebhookUrl = useMemo(() => { + if (!nodeId) return ""; + const node = workflowNodes.find((n) => n.id === nodeId); + const metadata = node?.metadata as Record | undefined; + if (!metadata) return ""; + const webhookUrl = typeof metadata.webhookUrl === "string" ? metadata.webhookUrl : ""; + const webhookUrlSnake = typeof metadata.webhook_url === "string" ? metadata.webhook_url : ""; + const legacyUrl = typeof metadata.url === "string" ? metadata.url : ""; + return webhookUrl || webhookUrlSnake || legacyUrl || ""; + }, [nodeId, workflowNodes]); const handleCopyNodeId = useCallback(async () => { - if (nodeId) { - await navigator.clipboard.writeText(nodeId); + const textToCopy = nodeWebhookUrl || nodeId; + if (textToCopy) { + await navigator.clipboard.writeText(textToCopy); setJustCopied(true); setTimeout(() => setJustCopied(false), 1000); } - }, [nodeId]); + }, [nodeId, nodeWebhookUrl]); const handleOpenCreateIntegrationDialog = useCallback(() => { setCreateIntegrationName(createIntegrationDefinition?.name ?? ""); @@ -666,13 +677,13 @@ export const ComponentSidebar = ({

    {nodeName}

    - {nodeId && !hideNodeId && ( + {(nodeWebhookUrl || nodeId) && !hideNodeId && (
    - {nodeId} + {nodeWebhookUrl || nodeId} From 7b6b09322661e85fb4da3e3eaf10a37b648e283d Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:42:49 +0100 Subject: [PATCH 050/160] chore: Hide run button (#3028) Hides the run button from component node action menu. It was confusing for a lot of people - if we bring it back, functionality will need to be redesigned. Signed-off-by: Muhammad Fuzail Zubari --- test/e2e/canvas_page_test.go | 6 ++---- test/e2e/shared/canvas_steps.go | 8 ++++---- test/e2e/shared/component_steps.go | 8 ++++---- web_src/src/pages/workflowv2/mappers/start.tsx | 1 + web_src/src/ui/componentBase/index.tsx | 17 ----------------- 5 files changed, 11 insertions(+), 29 deletions(-) diff --git a/test/e2e/canvas_page_test.go b/test/e2e/canvas_page_test.go index de44ea62d3..9f8fe3f72d 100644 --- a/test/e2e/canvas_page_test.go +++ b/test/e2e/canvas_page_test.go @@ -258,13 +258,11 @@ func (s *CanvasPageSteps) givenACanvasWithManualTriggerAndWaitNodeAndQueuedItems s.canvas.Connect("Start", "Wait") s.canvas.Save() - nodeHeader := q.TestID("node", "start", "header") + startTemplateRun := q.Locator(`.react-flow__node:has([data-testid="node-start-header"]) [data-testid="start-template-run"]`) emitEvent := q.Locator("button:has-text('Emit Event')") for i := 0; i < itemsAmount; i++ { - s.session.HoverOver(nodeHeader) - s.session.Sleep(100) - s.session.Click(q.TestID("node-action-run")) + s.session.Click(startTemplateRun) s.session.Click(emitEvent) s.session.Sleep(100) } diff --git a/test/e2e/shared/canvas_steps.go b/test/e2e/shared/canvas_steps.go index dac6f0c0a2..66671781f5 100644 --- a/test/e2e/shared/canvas_steps.go +++ b/test/e2e/shared/canvas_steps.go @@ -2,6 +2,7 @@ package shared import ( "strconv" + "strings" "testing" "time" @@ -250,10 +251,9 @@ func (s *CanvasSteps) StartEditingNode(name string) { } func (s *CanvasSteps) RunManualTrigger(name string) { - nodeHeader := q.TestID("node", name, "header") - s.session.HoverOver(nodeHeader) - s.session.Sleep(100) - s.session.Click(q.TestID("node-action-run")) + // Use the Start node's template Run button (in the default payload template) instead of the removed header Run button + startTemplateRun := q.Locator(`.react-flow__node:has([data-testid="node-` + strings.ToLower(name) + `-header"]) [data-testid="start-template-run"]`) + s.session.Click(startTemplateRun) s.session.Click(q.TestID("emit-event-submit-button")) } diff --git a/test/e2e/shared/component_steps.go b/test/e2e/shared/component_steps.go index 6938cdb3f1..eba848f6da 100644 --- a/test/e2e/shared/component_steps.go +++ b/test/e2e/shared/component_steps.go @@ -2,6 +2,7 @@ package shared import ( "strconv" + "strings" "testing" "github.com/stretchr/testify/require" @@ -168,10 +169,9 @@ func (s *ComponentSteps) StartEditingNode(name string) { } func (s *ComponentSteps) RunManualTrigger(name string) { - nodeHeader := q.TestID("node", name, "header") - s.session.HoverOver(nodeHeader) - s.session.Sleep(100) - s.session.Click(q.TestID("node-action-run")) + // Use the Start node's template Run button (in the default payload template) instead of the removed header Run button + startTemplateRun := q.Locator(`.react-flow__node:has([data-testid="node-` + strings.ToLower(name) + `-header"]) [data-testid="start-template-run"]`) + s.session.Click(startTemplateRun) s.session.Click(q.TestID("emit-event-submit-button")) } diff --git a/web_src/src/pages/workflowv2/mappers/start.tsx b/web_src/src/pages/workflowv2/mappers/start.tsx index fdd83daebe..bf64ea0442 100644 --- a/web_src/src/pages/workflowv2/mappers/start.tsx +++ b/web_src/src/pages/workflowv2/mappers/start.tsx @@ -111,6 +111,7 @@ const startCustomFieldRenderer: CustomFieldRenderer = {
    - )} {onTogglePause && !hasError && (
    {integrationDef.configuration.map((field: ConfigurationField) => ( diff --git a/web_src/src/pages/organization/settings/Integrations.tsx b/web_src/src/pages/organization/settings/Integrations.tsx index 2fc9896829..3814a1320f 100644 --- a/web_src/src/pages/organization/settings/Integrations.tsx +++ b/web_src/src/pages/organization/settings/Integrations.tsx @@ -305,9 +305,6 @@ export function Integrations({ organizationId }: IntegrationsProps) { Integration Name * -

    - A unique name for this integration -

    +

    + A unique name for this integration +

    {/* Configuration Fields */} {selectedIntegration.configuration && selectedIntegration.configuration.length > 0 && ( -
    +
    {selectedIntegration.configuration.map((field) => { if (!field.name) return null; return ( diff --git a/web_src/src/ui/IntegrationInstructions.tsx b/web_src/src/ui/IntegrationInstructions.tsx index 1df6801046..0654f44c2a 100644 --- a/web_src/src/ui/IntegrationInstructions.tsx +++ b/web_src/src/ui/IntegrationInstructions.tsx @@ -3,7 +3,7 @@ import { ExternalLink } from "lucide-react"; import { Button } from "@/components/ui/button"; const INSTRUCTIONS_CLASSES = - "rounded-md border border-orange-950/15 bg-orange-100 p-4 text-sm text-gray-800 dark:border-blue-800 dark:bg-blue-950/30 dark:text-gray-200 [&_ol]:list-decimal [&_ol]:ml-5 [&_ol]:space-y-1 [&_ul]:list-disc [&_ul]:ml-5 [&_ul]:space-y-1"; + "rounded-md border border-orange-950/15 bg-orange-100 p-4 text-sm text-gray-800 dark:border-blue-800 dark:bg-blue-950/30 dark:text-gray-200 [&_a]:!underline [&_a]:underline-offset-2 [&_a]:decoration-2 [&_a]:decoration-current [&_ol]:list-decimal [&_ol]:ml-5 [&_ol]:space-y-1 [&_ul]:list-disc [&_ul]:ml-5 [&_ul]:space-y-1"; export interface IntegrationInstructionsProps { /** Markdown description (e.g. setup steps) */ @@ -39,7 +39,7 @@ export function IntegrationInstructions({ description, onContinue, className = " ol: ({ children }) =>
      {children}
    , li: ({ children }) =>
  • {children}
  • , a: ({ children, href }) => ( - + {children} ), diff --git a/web_src/src/ui/componentSidebar/SettingsTab.tsx b/web_src/src/ui/componentSidebar/SettingsTab.tsx index 965ed95f09..54d3738c87 100644 --- a/web_src/src/ui/componentSidebar/SettingsTab.tsx +++ b/web_src/src/ui/componentSidebar/SettingsTab.tsx @@ -390,10 +390,10 @@ export function SettingsTab({ {selectedIntegrationFull.status?.state diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index 03394bdf02..43de43eed6 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -6,7 +6,7 @@ import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { getIntegrationTypeDisplayName } from "@/utils/integrationDisplayName"; import { resolveIcon } from "@/lib/utils"; -import { Check, Copy, Loader2, TriangleAlert, X } from "lucide-react"; +import { Check, Copy, Loader2, Settings, TriangleAlert, X } from "lucide-react"; import React, { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { getHeaderIconSrc, IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { @@ -34,6 +34,7 @@ import { TriggersTrigger, BlueprintsBlueprint, OrganizationsIntegration, + OrganizationsBrowserAction, ComponentsIntegrationRef, } from "@/api-client"; import { EventState, EventStateMap } from "../componentBase"; @@ -253,6 +254,9 @@ export const ComponentSidebar = ({ const [isCreateIntegrationDialogOpen, setIsCreateIntegrationDialogOpen] = useState(false); const [createIntegrationName, setCreateIntegrationName] = useState(""); const [createIntegrationConfig, setCreateIntegrationConfig] = useState>({}); + const [createIntegrationBrowserAction, setCreateIntegrationBrowserAction] = useState< + OrganizationsBrowserAction | undefined + >(undefined); const [configureIntegrationId, setConfigureIntegrationId] = useState(null); const [configureIntegrationName, setConfigureIntegrationName] = useState(""); // Use autocompleteExampleObj directly - current node is already filtered out upstream @@ -279,6 +283,17 @@ export const ComponentSidebar = ({ ); const selectedIntegrationForDialog = isCreateIntegrationDialogOpen ? createIntegrationDefinition : undefined; const selectedInstructions = selectedIntegrationForDialog?.instructions?.trim(); + const integrationHomeHref = useMemo(() => { + if (!domainId) return "#"; + const selectedIntegrationId = + integrationRef?.id || + integrations?.find((integration) => integration.spec?.integrationName === selectedIntegrationForDialog?.name) + ?.metadata?.id; + if (selectedIntegrationId) { + return `/${domainId}/settings/integrations/${selectedIntegrationId}`; + } + return `/${domainId}/settings/integrations`; + }, [domainId, integrationRef?.id, integrations, selectedIntegrationForDialog?.name]); const nodeWebhookUrl = useMemo(() => { if (!nodeId) return ""; const node = workflowNodes.find((n) => n.id === nodeId); @@ -302,6 +317,7 @@ export const ComponentSidebar = ({ const handleOpenCreateIntegrationDialog = useCallback(() => { setCreateIntegrationName(createIntegrationDefinition?.name ?? ""); setCreateIntegrationConfig({}); + setCreateIntegrationBrowserAction(undefined); setIsCreateIntegrationDialogOpen(true); }, [createIntegrationDefinition?.name]); @@ -309,20 +325,32 @@ export const ComponentSidebar = ({ setIsCreateIntegrationDialogOpen(false); setCreateIntegrationName(""); setCreateIntegrationConfig({}); + setCreateIntegrationBrowserAction(undefined); createIntegrationMutation.reset(); }, [createIntegrationMutation]); const handleCreateIntegrationSubmit = useCallback(async () => { if (!selectedIntegrationForDialog?.name || !domainId) return; + const nextName = createIntegrationName.trim(); + if (!nextName) { + showErrorToast("Integration name is required"); + return; + } + try { - await createIntegrationMutation.mutateAsync({ + const result = await createIntegrationMutation.mutateAsync({ integrationName: selectedIntegrationForDialog.name, - name: createIntegrationName.trim(), + name: nextName, configuration: createIntegrationConfig, }); + const browserAction = result.data?.integration?.status?.browserAction; + if (browserAction) { + setCreateIntegrationBrowserAction(browserAction); + return; + } handleCloseCreateIntegrationDialog(); - } catch (_error) { - showErrorToast("Failed to create integration"); + } catch (error) { + showErrorToast(`Failed to create integration: ${getApiErrorMessage(error)}`); } }, [ selectedIntegrationForDialog?.name, @@ -333,6 +361,30 @@ export const ComponentSidebar = ({ handleCloseCreateIntegrationDialog, ]); + const handleCreateBrowserAction = useCallback(() => { + if (!createIntegrationBrowserAction) return; + const { url, method, formFields } = createIntegrationBrowserAction; + if (method?.toUpperCase() === "POST" && formFields) { + const form = document.createElement("form"); + form.method = "POST"; + form.action = url || ""; + form.target = "_blank"; + form.style.display = "none"; + Object.entries(formFields).forEach(([key, value]) => { + const input = document.createElement("input"); + input.type = "hidden"; + input.name = key; + input.value = String(value); + form.appendChild(input); + }); + document.body.appendChild(form); + form.submit(); + document.body.removeChild(form); + } else if (url) { + window.open(url, "_blank"); + } + }, [createIntegrationBrowserAction]); + const handleOpenConfigureIntegrationDialog = useCallback((integrationId: string) => { setConfigureIntegrationId(integrationId); }, []); @@ -915,14 +967,27 @@ export const ComponentSidebar = ({ iconSlug={selectedIntegrationForDialog.icon} className="h-6 w-6 text-gray-500 dark:text-gray-400" /> - - Configure{" "} - {getIntegrationTypeDisplayName(undefined, selectedIntegrationForDialog.name) || - selectedIntegrationForDialog.name} - +
    + + Configure{" "} + {getIntegrationTypeDisplayName(undefined, selectedIntegrationForDialog.name) || + selectedIntegrationForDialog.name} + + + + +
    - {selectedInstructions && ( - + {(createIntegrationBrowserAction?.description || selectedInstructions) && ( + )}
    @@ -931,17 +996,17 @@ export const ComponentSidebar = ({ Integration Name * -

    A unique name for this integration

    setCreateIntegrationName(e.target.value)} placeholder="e.g., my-app-integration" /> +

    A unique name for this integration

    {selectedIntegrationForDialog.configuration && selectedIntegrationForDialog.configuration.length > 0 && ( -
    +
    {selectedIntegrationForDialog.configuration.map((field: ConfigurationField) => { if (!field.name) return null; return ( @@ -963,28 +1028,41 @@ export const ComponentSidebar = ({ )}
    - - + {createIntegrationBrowserAction ? ( + <> + + + + ) : ( + <> + + + + )} {createIntegrationMutation.isError && (
    @@ -1016,11 +1094,24 @@ export const ComponentSidebar = ({ iconSlug={configureIntegrationDefinition?.icon} className="h-6 w-6 text-gray-500 dark:text-gray-400" /> - - Configure{" "} - {getIntegrationTypeDisplayName(undefined, configureIntegration.spec?.integrationName) || - configureIntegration.spec?.integrationName} - +
    + + Configure{" "} + {getIntegrationTypeDisplayName(undefined, configureIntegration.spec?.integrationName) || + configureIntegration.spec?.integrationName} + + + + +
    {configureIntegration.status?.state === "error" && configureIntegration.status?.stateDescription && ( @@ -1048,13 +1139,13 @@ export const ComponentSidebar = ({ Integration Name * -

    A unique name for this integration

    setConfigureIntegrationName(e.target.value)} placeholder="e.g., my-app-integration" /> +

    A unique name for this integration

    {configureIntegrationDefinition?.configuration && From 483c89d43e75029485d58d5eda38c4e70c07a897 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Thu, 12 Feb 2026 12:19:17 +0100 Subject: [PATCH 059/160] chore: WebhookContext is now available for action components in the Setup and Execute callback function (#3072) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Needed in places where we don't have access to setting up a webhook via an API. e.g. In Cursor. Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/component.go | 2 ++ pkg/grpc/actions/canvases/update_canvas.go | 5 +++-- pkg/server/server.go | 3 ++- pkg/workers/node_executor.go | 25 ++++++++++++---------- pkg/workers/node_executor_test.go | 12 +++++------ 5 files changed, 27 insertions(+), 20 deletions(-) diff --git a/pkg/core/component.go b/pkg/core/component.go index bc62d89b80..036e4704e2 100644 --- a/pkg/core/component.go +++ b/pkg/core/component.go @@ -153,6 +153,7 @@ type ExecutionContext struct { Integration IntegrationContext Notifications NotificationContext Secrets SecretsContext + Webhook NodeWebhookContext } /* @@ -178,6 +179,7 @@ type SetupContext struct { Requests RequestContext Auth AuthContext Integration IntegrationContext + Webhook NodeWebhookContext } /* diff --git a/pkg/grpc/actions/canvases/update_canvas.go b/pkg/grpc/actions/canvases/update_canvas.go index 46f57ba0da..64c03902d0 100644 --- a/pkg/grpc/actions/canvases/update_canvas.go +++ b/pkg/grpc/actions/canvases/update_canvas.go @@ -312,7 +312,7 @@ func setupNode(ctx context.Context, tx *gorm.DB, encryptor crypto.Encryptor, reg case models.NodeTypeTrigger: return setupTrigger(ctx, tx, encryptor, registry, node, webhookBaseURL) case models.NodeTypeComponent: - return setupComponent(tx, encryptor, registry, node) + return setupComponent(ctx, tx, encryptor, registry, node, webhookBaseURL) case models.NodeTypeWidget: // Widgets are not persisted and don't have any logic to execute and to setup. return nil @@ -363,7 +363,7 @@ func setupTrigger(ctx context.Context, tx *gorm.DB, encryptor crypto.Encryptor, return tx.Save(node).Error } -func setupComponent(tx *gorm.DB, encryptor crypto.Encryptor, registry *registry.Registry, node *models.CanvasNode) error { +func setupComponent(ctx context.Context, tx *gorm.DB, encryptor crypto.Encryptor, registry *registry.Registry, node *models.CanvasNode, webhookBaseURL string) error { ref := node.Ref.Data() component, err := registry.GetComponent(ref.Component.Name) if err != nil { @@ -376,6 +376,7 @@ func setupComponent(tx *gorm.DB, encryptor crypto.Encryptor, registry *registry. HTTP: registry.HTTPContext(), Metadata: contexts.NewNodeMetadataContext(tx, node), Requests: contexts.NewNodeRequestContext(tx, node), + Webhook: contexts.NewNodeWebhookContext(ctx, tx, encryptor, node, webhookBaseURL), } if node.AppInstallationID != nil { diff --git a/pkg/server/server.go b/pkg/server/server.go index 738fdd492a..c3c95d960a 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -81,7 +81,8 @@ func startWorkers(encryptor crypto.Encryptor, registry *registry.Registry, oidcP if os.Getenv("START_WORKFLOW_NODE_EXECUTOR") == "yes" || os.Getenv("START_NODE_EXECUTOR") == "yes" { log.Println("Starting Node Executor") - w := workers.NewNodeExecutor(encryptor, registry, baseURL) + webhookBaseURL := getWebhookBaseURL(baseURL) + w := workers.NewNodeExecutor(encryptor, registry, baseURL, webhookBaseURL) go w.Start(context.Background()) } diff --git a/pkg/workers/node_executor.go b/pkg/workers/node_executor.go index 6389d1dd9b..1edef80f06 100644 --- a/pkg/workers/node_executor.go +++ b/pkg/workers/node_executor.go @@ -27,20 +27,22 @@ import ( var ErrRecordLocked = errors.New("record locked") type NodeExecutor struct { - encryptor crypto.Encryptor - registry *registry.Registry - baseURL string - semaphore *semaphore.Weighted - logger *logrus.Entry + encryptor crypto.Encryptor + registry *registry.Registry + baseURL string + webhookBaseURL string + semaphore *semaphore.Weighted + logger *logrus.Entry } -func NewNodeExecutor(encryptor crypto.Encryptor, registry *registry.Registry, baseURL string) *NodeExecutor { +func NewNodeExecutor(encryptor crypto.Encryptor, registry *registry.Registry, baseURL string, webhookBaseURL string) *NodeExecutor { return &NodeExecutor{ - encryptor: encryptor, - registry: registry, - baseURL: baseURL, - semaphore: semaphore.NewWeighted(25), - logger: logrus.WithFields(logrus.Fields{"worker": "NodeExecutor"}), + encryptor: encryptor, + registry: registry, + baseURL: baseURL, + webhookBaseURL: webhookBaseURL, + semaphore: semaphore.NewWeighted(25), + logger: logrus.WithFields(logrus.Fields{"worker": "NodeExecutor"}), } } @@ -290,6 +292,7 @@ func (w *NodeExecutor) executeComponentNode(tx *gorm.DB, execution *models.Canva Auth: contexts.NewAuthContext(tx, workflow.OrganizationID, nil, nil), Notifications: contexts.NewNotificationContext(tx, workflow.OrganizationID, execution.WorkflowID), Secrets: contexts.NewSecretsContext(tx, workflow.OrganizationID, w.encryptor), + Webhook: contexts.NewNodeWebhookContext(context.Background(), tx, w.encryptor, node, w.webhookBaseURL), } ctx.ExpressionEnv = func(expression string) (map[string]any, error) { builder := contexts.NewNodeConfigurationBuilder(tx, execution.WorkflowID). diff --git a/pkg/workers/node_executor_test.go b/pkg/workers/node_executor_test.go index c7882d0916..25393d2a9c 100644 --- a/pkg/workers/node_executor_test.go +++ b/pkg/workers/node_executor_test.go @@ -56,12 +56,12 @@ func Test__NodeExecutor_PreventsConcurrentProcessing(t *testing.T) { // Create two workers and have them try to process the execution concurrently. // go func() { - executor1 := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost") + executor1 := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost", "http://localhost") results <- executor1.LockAndProcessNodeExecution(execution.ID) }() go func() { - executor2 := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost") + executor2 := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost", "http://localhost") results <- executor2.LockAndProcessNodeExecution(execution.ID) }() @@ -146,7 +146,7 @@ func Test__NodeExecutor_BlueprintNodeExecution(t *testing.T) { // Process the execution and verify the blueprint node creates a child execution // and moves the parent execution to started state. // - executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost") + executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost", "http://localhost") err := executor.LockAndProcessNodeExecution(execution.ID) require.NoError(t, err) @@ -224,7 +224,7 @@ func Test__NodeExecutor_ComponentNodeWithoutStateChange(t *testing.T) { // Process the execution and verify the execution is started but NOT finished. // The approval component doesn't call Pass() in Execute(), so it should remain in started state. // - executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost") + executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost", "http://localhost") err = executor.LockAndProcessNodeExecution(execution.ID) require.NoError(t, err) @@ -291,7 +291,7 @@ func Test__NodeExecutor_ComponentNodeWithStateChange(t *testing.T) { // Process the execution and verify the execution is both started AND finished. // The noop component calls Pass() in Execute(), which should finish the execution. // - executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost") + executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost", "http://localhost") err := executor.LockAndProcessNodeExecution(execution.ID) require.NoError(t, err) @@ -372,7 +372,7 @@ func Test__NodeExecutor_BlueprintNodeExecutionFailsWhenConfigurationCannotBeBuil // LockAndProcessNodeExecution should not return an error, // since this isn't a runtime error, but a configuration error. // - executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost") + executor := NewNodeExecutor(r.Encryptor, r.Registry, "http://localhost", "http://localhost") err := executor.LockAndProcessNodeExecution(execution.ID) require.NoError(t, err) From 3fba07f6dec86482873483e777a889f90f01da3e Mon Sep 17 00:00:00 2001 From: harxhist Date: Thu, 12 Feb 2026 18:30:07 +0530 Subject: [PATCH 060/160] feat: Add Cursor Integration (#2991) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #2618 ## Description This PR introduces the **Cursor** integration to SuperPlane, allowing users to build workflows utilizing Cursor's AI-powered capabilities. It includes the base integration setup and two starter components: 1. **Launch Cloud Agent (Action):** Triggers a Cursor Cloud Agent on a specific repository/branch and tracks the execution state to completion. It links to the Cloud Agent and PR in the output. This agent has no limits(except credit limit). 2. **Get Daily Usage Data (Action):** Fetches daily team usage metrics from the Cursor Admin API for reporting and cost tracking. ### Implementation Notes - **Authentication:** Connects via Cursor BasicAuth (Admin API and Cloud Agents API). - **Code Logic:** The `Launch Cloud Agent` implementation handles significant logic to track the agent's lifecycle (polling status, handling completion, etc.). The code is structured to robustly handle this weight to ensure reliable execution tracking. ## Video Demo [Watch the Loom Video](https://www.loom.com/share/2f6f3f98ab6b47ce88444a15f93afe45) ## Checklist - [x] My code follows the style guidelines of this project - [x] I have performed a self-review of my own code - [x] I have commented my code, particularly in hard-to-understand areas - [x] I have made corresponding changes to the documentation (`make gen.components.docs`) - [x] I have added tests that prove my fix is effective or that my feature works - [x] New and existing unit tests pass locally with my changes - [x] I have signed off my commits (`git commit -s`) --------- Signed-off-by: Harsh Signed-off-by: Igor Šarčević Co-authored-by: Igor Šarčević Co-authored-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Cursor.mdx | 117 ++++ pkg/integrations/cursor/client.go | 211 ++++++++ pkg/integrations/cursor/client_test.go | 260 +++++++++ pkg/integrations/cursor/cursor.go | 148 +++++ pkg/integrations/cursor/cursor_test.go | 236 ++++++++ .../example_output_get_daily_usage_data.json | 28 + .../cursor/example_output_launch_agent.json | 7 + .../cursor/get_daily_usage_data.go | 256 +++++++++ .../cursor/get_daily_usage_data_test.go | 257 +++++++++ pkg/integrations/cursor/launch_agent.go | 229 ++++++++ .../cursor/launch_agent_monitor.go | 224 ++++++++ .../cursor/launch_agent_monitor_test.go | 504 ++++++++++++++++++ pkg/integrations/cursor/launch_agent_test.go | 353 ++++++++++++ pkg/integrations/cursor/launch_agent_types.go | 190 +++++++ pkg/server/server.go | 1 + .../src/assets/icons/integrations/cursor.svg | 32 ++ .../pages/workflowv2/mappers/cursor/base.ts | 70 +++ .../mappers/cursor/get_daily_usage_data.ts | 103 ++++ .../pages/workflowv2/mappers/cursor/index.ts | 16 + .../workflowv2/mappers/cursor/launch_agent.ts | 104 ++++ web_src/src/pages/workflowv2/mappers/index.ts | 8 + .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + web_src/src/utils/integrationDisplayName.ts | 1 + 24 files changed, 3361 insertions(+) create mode 100644 docs/components/Cursor.mdx create mode 100644 pkg/integrations/cursor/client.go create mode 100644 pkg/integrations/cursor/client_test.go create mode 100644 pkg/integrations/cursor/cursor.go create mode 100644 pkg/integrations/cursor/cursor_test.go create mode 100644 pkg/integrations/cursor/example_output_get_daily_usage_data.json create mode 100644 pkg/integrations/cursor/example_output_launch_agent.json create mode 100644 pkg/integrations/cursor/get_daily_usage_data.go create mode 100644 pkg/integrations/cursor/get_daily_usage_data_test.go create mode 100644 pkg/integrations/cursor/launch_agent.go create mode 100644 pkg/integrations/cursor/launch_agent_monitor.go create mode 100644 pkg/integrations/cursor/launch_agent_monitor_test.go create mode 100644 pkg/integrations/cursor/launch_agent_test.go create mode 100644 pkg/integrations/cursor/launch_agent_types.go create mode 100644 web_src/src/assets/icons/integrations/cursor.svg create mode 100644 web_src/src/pages/workflowv2/mappers/cursor/base.ts create mode 100644 web_src/src/pages/workflowv2/mappers/cursor/get_daily_usage_data.ts create mode 100644 web_src/src/pages/workflowv2/mappers/cursor/index.ts create mode 100644 web_src/src/pages/workflowv2/mappers/cursor/launch_agent.ts diff --git a/docs/components/Cursor.mdx b/docs/components/Cursor.mdx new file mode 100644 index 0000000000..3c869807c7 --- /dev/null +++ b/docs/components/Cursor.mdx @@ -0,0 +1,117 @@ +--- +title: "Cursor" +--- + +Build workflows with Cursor AI Agents and track usage + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + + +## Instructions + +To get your API keys, visit the [Cursor Dashboard](https://cursor.com/dashboard). You may need separate keys for Agents and Admin features. + + + +## Get Daily Usage Data + +The Get Daily Usage Data component fetches team usage metrics from Cursor's Admin API. + +### Use Cases + +- **Usage reporting**: Track team productivity and AI usage patterns +- **Cost tracking**: Monitor usage-based requests and subscription consumption +- **Analytics dashboards**: Build custom dashboards with Cursor usage data + +### How It Works + +1. Fetches usage data for the specified date range from Cursor's Admin API +2. Returns detailed metrics per user including lines added/deleted, requests, and model usage + +### Configuration + +- **Start Date**: Start of the date range (YYYY-MM-DD format, defaults to 7 days ago) +- **End Date**: End of the date range (YYYY-MM-DD format, defaults to today) + +### Output + +The output includes per-user daily metrics: +- Lines added/deleted (total and accepted) +- Tab completions shown/accepted +- Composer, chat, and agent requests +- Subscription vs usage-based request counts +- Most used model and file extensions + +### Notes + +- Requires a valid Cursor Admin API key configured in the integration +- Only returns data for active users + +### Example Output + +```json +{ + "data": [ + { + "acceptedLinesAdded": 1102, + "acceptedLinesDeleted": 645, + "agentRequests": 12, + "chatRequests": 128, + "composerRequests": 45, + "date": 1710720000000, + "email": "developer@company.com", + "isActive": true, + "mostUsedModel": "gpt-4", + "subscriptionIncludedReqs": 180, + "totalAccepts": 73, + "totalApplies": 87, + "totalLinesAdded": 1543, + "totalLinesDeleted": 892, + "totalRejects": 14, + "totalTabsAccepted": 289, + "totalTabsShown": 342, + "usageBasedReqs": 5 + } + ], + "period": { + "endDate": 1710892800000, + "startDate": 1710720000000 + } +} +``` + + + +## Launch Cloud Agent + +The Launch Cloud Agent component triggers a Cursor AI coding agent and waits for it to complete. + +### Use Cases +- **Automated code generation**: Generate code from natural language prompts +- **PR fixes**: Automatically fix issues on existing pull requests +- **Code refactoring**: Refactor code based on instructions +- **Feature implementation**: Implement new features from specifications + +### How It Works +1. Launches a Cursor Cloud Agent with the specified prompt and configuration +2. Waits for the agent to complete (monitored via webhook and polling) +3. Emits output with the agent result (success or failure) + +### Example Output + +```json +{ + "agentId": "agent_12345", + "branchName": "cursor/agent-550e8400", + "prUrl": "https://github.com/org/repo/pull/42", + "status": "done", + "summary": "Refactored login logic." +} +``` + diff --git a/pkg/integrations/cursor/client.go b/pkg/integrations/cursor/client.go new file mode 100644 index 0000000000..53e3e017dc --- /dev/null +++ b/pkg/integrations/cursor/client.go @@ -0,0 +1,211 @@ +package cursor + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/superplanehq/superplane/pkg/core" +) + +const defaultBaseURL = "https://api.cursor.com" + +func NewClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*Client, error) { + if ctx == nil { + return nil, fmt.Errorf("no integration context") + } + + launchAgentKey, _ := ctx.GetConfig("launchAgentKey") + adminAPIKey, _ := ctx.GetConfig("adminKey") + + return &Client{ + LaunchAgentKey: string(launchAgentKey), + AdminKey: string(adminAPIKey), + BaseURL: defaultBaseURL, + http: httpClient, + }, nil +} + +type Client struct { + LaunchAgentKey string + AdminKey string + BaseURL string + http core.HTTPContext +} + +type cursorErrorResponse struct { + Error string `json:"error"` + Message string `json:"message"` +} + +type UsageRequest struct { + StartDate int64 `json:"startDate"` + EndDate int64 `json:"endDate"` +} + +type UsageResponse map[string]any + +type ModelsResponse struct { + Models []string `json:"models"` +} + +func (c *Client) ListModels() ([]string, error) { + if c.LaunchAgentKey == "" { + return nil, fmt.Errorf("Cloud Agent API key is not configured") + } + + responseBody, err := c.execRequest(http.MethodGet, c.BaseURL+"/v0/models", nil, c.LaunchAgentKey) + if err != nil { + return nil, err + } + + var response ModelsResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal models response: %w", err) + } + + return response.Models, nil +} + +func (c *Client) VerifyLaunchAgent() error { + _, err := c.execRequest(http.MethodGet, c.BaseURL+"/v0/agents?limit=1", nil, c.LaunchAgentKey) + return err +} + +func (c *Client) VerifyAdmin() error { + now := time.Now().UTC() + startOfDay := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, time.UTC) + + req := UsageRequest{ + StartDate: startOfDay.Unix() * 1000, + EndDate: now.Unix() * 1000, + } + + _, err := c.GetDailyUsage(req) + return err +} + +func (c *Client) GetDailyUsage(req UsageRequest) (*UsageResponse, error) { + if c.AdminKey == "" { + return nil, fmt.Errorf("Admin API key is not configured") + } + + reqBody, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal usage request: %v", err) + } + + responseBody, err := c.execRequest(http.MethodPost, c.BaseURL+"/teams/daily-usage-data", bytes.NewBuffer(reqBody), c.AdminKey) + if err != nil { + return nil, err + } + + var response UsageResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal usage response: %v", err) + } + + return &response, nil +} + +func (c *Client) LaunchAgent(req launchAgentRequest) (*LaunchAgentResponse, error) { + if c.LaunchAgentKey == "" { + return nil, fmt.Errorf("Cloud Agent API key is not configured") + } + + reqBody, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal agent request: %w", err) + } + + responseBody, err := c.execRequest(http.MethodPost, c.BaseURL+"/v0/agents", bytes.NewBuffer(reqBody), c.LaunchAgentKey) + if err != nil { + return nil, err + } + + var response LaunchAgentResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal agent response: %w", err) + } + + return &response, nil +} + +func (c *Client) GetAgentStatus(agentID string) (*LaunchAgentResponse, error) { + if c.LaunchAgentKey == "" { + return nil, fmt.Errorf("Cloud Agent API key is not configured") + } + + url := fmt.Sprintf("%s/v0/agents/%s", c.BaseURL, agentID) + responseBody, err := c.execRequest(http.MethodGet, url, nil, c.LaunchAgentKey) + if err != nil { + return nil, err + } + + var response LaunchAgentResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal agent status response: %w", err) + } + + return &response, nil +} + +func (c *Client) CancelAgent(agentID string) error { + if c.LaunchAgentKey == "" { + return fmt.Errorf("Cloud Agent API key is not configured") + } + + url := fmt.Sprintf("%s/v0/agents/%s/cancel", c.BaseURL, agentID) + _, err := c.execRequest(http.MethodPost, url, nil, c.LaunchAgentKey) + return err +} + +func (c *Client) execRequest(method, URL string, body io.Reader, apiKey string) ([]byte, error) { + req, err := http.NewRequest(method, URL, body) + if err != nil { + return nil, fmt.Errorf("failed to build request: %v", err) + } + + if body != nil { + req.Header.Set("Content-Type", "application/json") + } + req.Header.Set("Authorization", "Bearer "+apiKey) + + res, err := c.http.Do(req) + if err != nil { + return nil, fmt.Errorf("request failed: %v", err) + } + defer res.Body.Close() + + responseBody, err := io.ReadAll(res.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %v", err) + } + if res.StatusCode < http.StatusOK || res.StatusCode >= http.StatusMultipleChoices { + var apiErr cursorErrorResponse + var errorMessage string + + if err := json.Unmarshal(responseBody, &apiErr); err == nil { + if apiErr.Message != "" { + errorMessage = apiErr.Message + } else if apiErr.Error != "" { + errorMessage = apiErr.Error + } else { + errorMessage = string(responseBody) + } + } else { + errorMessage = string(responseBody) + } + + if res.StatusCode == http.StatusUnauthorized { + return nil, fmt.Errorf("Cursor credentials are invalid or expired: %s", errorMessage) + } + + return nil, fmt.Errorf("request failed (%d): %s", res.StatusCode, errorMessage) + } + + return responseBody, nil +} diff --git a/pkg/integrations/cursor/client_test.go b/pkg/integrations/cursor/client_test.go new file mode 100644 index 0000000000..57b9a8d4f1 --- /dev/null +++ b/pkg/integrations/cursor/client_test.go @@ -0,0 +1,260 @@ +package cursor + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Client__VerifyLaunchAgent(t *testing.T) { + t.Run("success", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"agents":[]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-key", + }, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + err = client.VerifyLaunchAgent() + require.NoError(t, err) + + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "Bearer test-key", httpContext.Requests[0].Header.Get("Authorization")) + }) + + t.Run("unauthorized", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusUnauthorized, + Body: io.NopCloser(strings.NewReader(`{"error":"invalid key"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "invalid-key", + }, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + err = client.VerifyLaunchAgent() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid or expired") + }) +} + +func ptrBool(b bool) *bool { return &b } + +func Test__Client__LaunchAgent(t *testing.T) { + t.Run("success", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "id": "agent-123", + "status": "CREATING", + "source": {"repository": "https://github.com/org/repo", "ref": "main"}, + "target": {"branchName": "cursor/agent-abc123"} + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-key", + }, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + req := launchAgentRequest{ + Prompt: launchAgentPrompt{Text: "Fix the bug"}, + Source: launchAgentSource{ + Repository: "https://github.com/org/repo", + Ref: "main", + }, + Target: launchAgentTarget{ + AutoCreatePr: ptrBool(true), + BranchName: "cursor/agent-abc123", + }, + } + + response, err := client.LaunchAgent(req) + require.NoError(t, err) + + assert.Equal(t, "agent-123", response.ID) + assert.Equal(t, "CREATING", response.Status) + assert.Equal(t, "cursor/agent-abc123", response.Target.BranchName) + }) + + t.Run("no cloud agent key", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + _, err = client.LaunchAgent(launchAgentRequest{}) + require.Error(t, err) + assert.Contains(t, err.Error(), "Cloud Agent API key is not configured") + }) +} + +func Test__Client__GetAgentStatus(t *testing.T) { + t.Run("success", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "id": "agent-123", + "status": "FINISHED", + "summary": "Fixed the bug", + "target": {"prUrl": "https://github.com/org/repo/pull/42"} + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-key", + }, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + response, err := client.GetAgentStatus("agent-123") + require.NoError(t, err) + + assert.Equal(t, "agent-123", response.ID) + assert.Equal(t, "FINISHED", response.Status) + assert.Equal(t, "Fixed the bug", response.Summary) + assert.Equal(t, "https://github.com/org/repo/pull/42", response.Target.PrURL) + + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://api.cursor.com/v0/agents/agent-123", httpContext.Requests[0].URL.String()) + }) +} + +func Test__Client__GetDailyUsage(t *testing.T) { + t.Run("success", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "data": [ + { + "date": 1710720000000, + "isActive": true, + "totalLinesAdded": 1543, + "email": "dev@company.com" + } + ], + "period": { + "startDate": 1710720000000, + "endDate": 1710892800000 + } + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + req := UsageRequest{ + StartDate: 1710720000000, + EndDate: 1710892800000, + } + + response, err := client.GetDailyUsage(req) + require.NoError(t, err) + + data := (*response)["data"].([]any) + assert.Len(t, data, 1) + + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://api.cursor.com/teams/daily-usage-data", httpContext.Requests[0].URL.String()) + assert.Equal(t, "Bearer test-admin-key", httpContext.Requests[0].Header.Get("Authorization")) + }) + + t.Run("no admin key", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + _, err = client.GetDailyUsage(UsageRequest{}) + require.Error(t, err) + assert.Contains(t, err.Error(), "Admin API key is not configured") + }) +} + +func Test__Client__ListModels(t *testing.T) { + t.Run("success", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"models":["claude-3.5-sonnet","gpt-4o","o1-mini"]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-key", + }, + } + + client, err := NewClient(httpContext, integrationCtx) + require.NoError(t, err) + + models, err := client.ListModels() + require.NoError(t, err) + + assert.Len(t, models, 3) + assert.Contains(t, models, "claude-3.5-sonnet") + assert.Contains(t, models, "gpt-4o") + assert.Contains(t, models, "o1-mini") + }) +} diff --git a/pkg/integrations/cursor/cursor.go b/pkg/integrations/cursor/cursor.go new file mode 100644 index 0000000000..611ad102e7 --- /dev/null +++ b/pkg/integrations/cursor/cursor.go @@ -0,0 +1,148 @@ +package cursor + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +func init() { + registry.RegisterIntegration("cursor", &Cursor{}) +} + +type Cursor struct{} + +type Configuration struct { + LaunchAgentKey string `json:"launchAgentKey"` + AdminKey string `json:"adminKey"` +} + +func (i *Cursor) Name() string { + return "cursor" +} + +func (i *Cursor) Label() string { + return "Cursor" +} + +func (i *Cursor) Icon() string { + return "cpu" +} + +func (i *Cursor) Description() string { + return "Build workflows with Cursor AI Agents and track usage" +} + +func (i *Cursor) Instructions() string { + return "To get your API keys, visit the [Cursor Dashboard](https://cursor.com/dashboard). You may need separate keys for Agents and Admin features." +} + +func (i *Cursor) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "launchAgentKey", + Label: "Cloud Agent API Key", + Type: configuration.FieldTypeString, + Sensitive: true, + Description: "Required for launching AI Agents.", + Required: false, + }, + { + Name: "adminKey", + Label: "Admin API Key", + Type: configuration.FieldTypeString, + Sensitive: true, + Description: "(For Teams) Required for fetching Usage Data.", + Required: false, + }, + } +} + +func (i *Cursor) Sync(ctx core.SyncContext) error { + config := Configuration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %v", err) + } + + if config.LaunchAgentKey == "" && config.AdminKey == "" { + return fmt.Errorf("one of the keys is required") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + if config.LaunchAgentKey != "" { + if err := client.VerifyLaunchAgent(); err != nil { + return fmt.Errorf("cloud agent key verification failed: %w", err) + } + } + + if config.AdminKey != "" { + if err := client.VerifyAdmin(); err != nil { + return fmt.Errorf("admin key verification failed: %w", err) + } + } + + ctx.Integration.Ready() + return nil +} + +func (i *Cursor) Components() []core.Component { + return []core.Component{ + &LaunchAgent{}, + &GetDailyUsageData{}, + } +} + +func (i *Cursor) Triggers() []core.Trigger { + return []core.Trigger{} +} + +func (i *Cursor) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (i *Cursor) HandleRequest(ctx core.HTTPRequestContext) {} + +func (i *Cursor) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + if resourceType == "model" { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + models, err := client.ListModels() + if err != nil { + return nil, err + } + + resources := []core.IntegrationResource{ + {Type: "model", ID: "", Name: "Auto (Recommended)"}, + } + + for _, model := range models { + resources = append(resources, core.IntegrationResource{ + Type: "model", + ID: model, + Name: model, + }) + } + + return resources, nil + } + + return []core.IntegrationResource{}, nil +} + +func (i *Cursor) Actions() []core.Action { + return []core.Action{} +} + +func (i *Cursor) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} diff --git a/pkg/integrations/cursor/cursor_test.go b/pkg/integrations/cursor/cursor_test.go new file mode 100644 index 0000000000..bcb7a4a5d4 --- /dev/null +++ b/pkg/integrations/cursor/cursor_test.go @@ -0,0 +1,236 @@ +package cursor + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Cursor__Sync(t *testing.T) { + c := &Cursor{} + + t.Run("success with cloud agent key -> ready", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"agents":[]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-cloud-agent-key", + }, + } + + err := c.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "ready", integrationCtx.State) + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://api.cursor.com/v0/agents?limit=1", httpContext.Requests[0].URL.String()) + }) + + t.Run("success with admin key -> ready", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"data":[]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + err := c.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "ready", integrationCtx.State) + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://api.cursor.com/teams/daily-usage-data", httpContext.Requests[0].URL.String()) + }) + + t.Run("success with both keys -> ready", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"agents":[]}`)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"data":[]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-cloud-agent-key", + "adminKey": "test-admin-key", + }, + } + + err := c.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "ready", integrationCtx.State) + require.Len(t, httpContext.Requests, 2) + }) + + t.Run("no keys provided -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + + err := c.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "one of the keys is required") + }) + + t.Run("invalid cloud agent key -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusUnauthorized, + Body: io.NopCloser(strings.NewReader(`{"error":"invalid api key"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "invalid-key", + }, + } + + err := c.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "cloud agent key verification failed") + }) + + t.Run("invalid admin key -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusUnauthorized, + Body: io.NopCloser(strings.NewReader(`{"error":"invalid api key"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "invalid-key", + }, + } + + err := c.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "admin key verification failed") + }) +} + +func Test__Cursor__Components(t *testing.T) { + c := &Cursor{} + components := c.Components() + + assert.Len(t, components, 2) + + names := make([]string, len(components)) + for i, comp := range components { + names[i] = comp.Name() + } + + assert.Contains(t, names, "cursor.launchAgent") + assert.Contains(t, names, "cursor.getDailyUsageData") +} + +func Test__Cursor__ListResources(t *testing.T) { + c := &Cursor{} + + t.Run("list models", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"models":["claude-3.5-sonnet","gpt-4o","o1-mini"]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "launchAgentKey": "test-key", + }, + } + + resources, err := c.ListResources("model", core.ListResourcesContext{ + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Len(t, resources, 4) + assert.Equal(t, "", resources[0].ID) + assert.Equal(t, "Auto (Recommended)", resources[0].Name) + assert.Equal(t, "claude-3.5-sonnet", resources[1].ID) + assert.Equal(t, "gpt-4o", resources[2].ID) + assert.Equal(t, "o1-mini", resources[3].ID) + }) + + t.Run("unknown resource type returns empty", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + integrationCtx := &contexts.IntegrationContext{} + + resources, err := c.ListResources("unknown", core.ListResourcesContext{ + HTTP: httpContext, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Empty(t, resources) + }) +} diff --git a/pkg/integrations/cursor/example_output_get_daily_usage_data.json b/pkg/integrations/cursor/example_output_get_daily_usage_data.json new file mode 100644 index 0000000000..52b625eeca --- /dev/null +++ b/pkg/integrations/cursor/example_output_get_daily_usage_data.json @@ -0,0 +1,28 @@ +{ + "data": [ + { + "acceptedLinesAdded": 1102, + "acceptedLinesDeleted": 645, + "agentRequests": 12, + "chatRequests": 128, + "composerRequests": 45, + "date": 1710720000000, + "email": "developer@company.com", + "isActive": true, + "mostUsedModel": "gpt-4", + "subscriptionIncludedReqs": 180, + "totalAccepts": 73, + "totalApplies": 87, + "totalLinesAdded": 1543, + "totalLinesDeleted": 892, + "totalRejects": 14, + "totalTabsAccepted": 289, + "totalTabsShown": 342, + "usageBasedReqs": 5 + } + ], + "period": { + "endDate": 1710892800000, + "startDate": 1710720000000 + } +} \ No newline at end of file diff --git a/pkg/integrations/cursor/example_output_launch_agent.json b/pkg/integrations/cursor/example_output_launch_agent.json new file mode 100644 index 0000000000..ef06b512ff --- /dev/null +++ b/pkg/integrations/cursor/example_output_launch_agent.json @@ -0,0 +1,7 @@ +{ + "agentId": "agent_12345", + "branchName": "cursor/agent-550e8400", + "prUrl": "https://github.com/org/repo/pull/42", + "status": "done", + "summary": "Refactored login logic." + } \ No newline at end of file diff --git a/pkg/integrations/cursor/get_daily_usage_data.go b/pkg/integrations/cursor/get_daily_usage_data.go new file mode 100644 index 0000000000..61d20af8c6 --- /dev/null +++ b/pkg/integrations/cursor/get_daily_usage_data.go @@ -0,0 +1,256 @@ +package cursor + +import ( + "fmt" + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "time" +) + +const ( + GetDailyUsageDataPayloadType = "cursor.getDailyUsageData.result" +) + +type GetDailyUsageData struct{} + +type GetDailyUsageDataSpec struct { + StartDate string `json:"startDate" mapstructure:"startDate"` + EndDate string `json:"endDate" mapstructure:"endDate"` +} + +type GetDailyUsageDataOutput struct { + Data []map[string]any `json:"data"` + Period map[string]any `json:"period"` +} + +func (c *GetDailyUsageData) Name() string { + return "cursor.getDailyUsageData" +} + +func (c *GetDailyUsageData) Label() string { + return "Get Daily Usage Data" +} + +func (c *GetDailyUsageData) Description() string { + return "Fetches daily team usage metrics from Cursor's Admin API." +} + +func (c *GetDailyUsageData) Documentation() string { + return `The Get Daily Usage Data component fetches team usage metrics from Cursor's Admin API. + +## Use Cases + +- **Usage reporting**: Track team productivity and AI usage patterns +- **Cost tracking**: Monitor usage-based requests and subscription consumption +- **Analytics dashboards**: Build custom dashboards with Cursor usage data + +## How It Works + +1. Fetches usage data for the specified date range from Cursor's Admin API +2. Returns detailed metrics per user including lines added/deleted, requests, and model usage + +## Configuration + +- **Start Date**: Start of the date range (YYYY-MM-DD format, defaults to 7 days ago) +- **End Date**: End of the date range (YYYY-MM-DD format, defaults to today) + +## Output + +The output includes per-user daily metrics: +- Lines added/deleted (total and accepted) +- Tab completions shown/accepted +- Composer, chat, and agent requests +- Subscription vs usage-based request counts +- Most used model and file extensions + +## Notes + +- Requires a valid Cursor Admin API key configured in the integration +- Only returns data for active users` +} + +func (c *GetDailyUsageData) Icon() string { + return "bar-chart" +} + +func (c *GetDailyUsageData) Color() string { + return "#10B981" +} + +func (c *GetDailyUsageData) ExampleOutput() map[string]any { + return map[string]any{ + "data": []map[string]any{ + { + "date": 1710720000000, + "isActive": true, + "totalLinesAdded": 1543, + "totalLinesDeleted": 892, + "acceptedLinesAdded": 1102, + "acceptedLinesDeleted": 645, + "totalApplies": 87, + "totalAccepts": 73, + "totalRejects": 14, + "totalTabsShown": 342, + "totalTabsAccepted": 289, + "composerRequests": 45, + "chatRequests": 128, + "agentRequests": 12, + "subscriptionIncludedReqs": 180, + "usageBasedReqs": 5, + "mostUsedModel": "gpt-4", + "email": "developer@company.com", + }, + }, + "period": map[string]any{ + "startDate": 1710720000000, + "endDate": 1710892800000, + }, + } +} + +func (c *GetDailyUsageData) OutputChannels(config any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetDailyUsageData) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "startDate", + Label: "Start Date", + Type: configuration.FieldTypeString, + Description: "YYYY-MM-DD (Defaults to 7 days ago)", + Required: false, + }, + { + Name: "endDate", + Label: "End Date", + Type: configuration.FieldTypeString, + Description: "YYYY-MM-DD (Defaults to today)", + Required: false, + }, + } +} + +func (c *GetDailyUsageData) Setup(ctx core.SetupContext) error { + return nil +} + +func (c *GetDailyUsageData) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetDailyUsageData) Execute(ctx core.ExecutionContext) error { + spec := GetDailyUsageDataSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + now := time.Now().UTC() + + startOfToday := time.Date( + now.Year(), now.Month(), now.Day(), + 0, 0, 0, 0, time.UTC, + ) + + endOfDay := time.Date( + now.Year(), now.Month(), now.Day(), + 23, 59, 59, 0, time.UTC, + ) + + // Default: 7 days ago at 00:00:00 UTC + startOfWeek := startOfToday.AddDate(0, 0, -7) + + var startDate, endDate time.Time + var err error + + if spec.StartDate != "" { + startDate, err = time.Parse("2006-01-02", spec.StartDate) + if err != nil { + return fmt.Errorf("invalid start date format (expected YYYY-MM-DD): %w", err) + } + } else { + startDate = startOfWeek + } + + if spec.EndDate != "" { + endDate, err = time.Parse("2006-01-02", spec.EndDate) + if err != nil { + return fmt.Errorf("invalid end date format (expected YYYY-MM-DD): %w", err) + } + endDate = time.Date(endDate.Year(), endDate.Month(), endDate.Day(), 23, 59, 59, 0, time.UTC) + } else { + endDate = endOfDay + } + + if startDate.After(endDate) { + return fmt.Errorf("start date must be before end date") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create cursor client: %w", err) + } + + if client.AdminKey == "" { + return fmt.Errorf("admin API key is not configured in the integration") + } + + req := UsageRequest{ + StartDate: startDate.Unix() * 1000, + EndDate: endDate.Unix() * 1000, + } + + ctx.Logger.Infof("Fetching Cursor usage data from %s to %s", startDate.Format("2006-01-02"), endDate.Format("2006-01-02")) + + response, err := client.GetDailyUsage(req) + if err != nil { + return fmt.Errorf("failed to fetch usage data: %w", err) + } + + output := GetDailyUsageDataOutput{ + Data: []map[string]any{}, + Period: map[string]any{ + "startDate": req.StartDate, + "endDate": req.EndDate, + }, + } + + if data, ok := (*response)["data"].([]any); ok { + for _, item := range data { + if itemMap, ok := item.(map[string]any); ok { + output.Data = append(output.Data, itemMap) + } + } + } + + if period, ok := (*response)["period"].(map[string]any); ok { + output.Period = period + } + + ctx.Logger.Infof("Retrieved usage data for %d users", len(output.Data)) + + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, GetDailyUsageDataPayloadType, []any{output}) + +} + +func (c *GetDailyUsageData) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetDailyUsageData) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetDailyUsageData) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *GetDailyUsageData) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetDailyUsageData) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/cursor/get_daily_usage_data_test.go b/pkg/integrations/cursor/get_daily_usage_data_test.go new file mode 100644 index 0000000000..e048f80006 --- /dev/null +++ b/pkg/integrations/cursor/get_daily_usage_data_test.go @@ -0,0 +1,257 @@ +package cursor + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/google/uuid" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetDailyUsageData__Execute(t *testing.T) { + c := &GetDailyUsageData{} + + t.Run("success with default dates", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "data": [ + { + "date": 1710720000000, + "isActive": true, + "totalLinesAdded": 1543, + "email": "dev@company.com" + } + ], + "period": { + "startDate": 1710720000000, + "endDate": 1710892800000 + } + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + executionStateCtx := &contexts.ExecutionStateContext{} + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{}, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: executionStateCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.NoError(t, err) + + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://api.cursor.com/teams/daily-usage-data", httpContext.Requests[0].URL.String()) + + assert.Equal(t, core.DefaultOutputChannel.Name, executionStateCtx.Channel) + assert.Equal(t, GetDailyUsageDataPayloadType, executionStateCtx.Type) + }) + + t.Run("success with custom dates", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "data": [], + "period": { + "startDate": 1710720000000, + "endDate": 1710892800000 + } + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + executionStateCtx := &contexts.ExecutionStateContext{} + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{ + "startDate": "2024-03-18", + "endDate": "2024-03-20", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: executionStateCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.NoError(t, err) + }) + + t.Run("invalid start date format -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{ + "startDate": "invalid-date", + }, + HTTP: httpContext, + Integration: integrationCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid start date format") + }) + + t.Run("invalid end date format -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{ + "endDate": "invalid-date", + }, + HTTP: httpContext, + Integration: integrationCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid end date format") + }) + + t.Run("start date after end date -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{ + "startDate": "2024-03-25", + "endDate": "2024-03-20", + }, + HTTP: httpContext, + Integration: integrationCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "start date must be before end date") + }) + + t.Run("missing admin key -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{}, + HTTP: httpContext, + Integration: integrationCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "admin API key is not configured") + }) + + t.Run("API error -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusInternalServerError, + Body: io.NopCloser(strings.NewReader(`{"error":"server error"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "adminKey": "test-admin-key", + }, + } + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{}, + HTTP: httpContext, + Integration: integrationCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Execute(execCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to fetch usage data") + }) +} + +func Test__GetDailyUsageData__OutputChannels(t *testing.T) { + c := &GetDailyUsageData{} + channels := c.OutputChannels(nil) + + assert.Len(t, channels, 1) + assert.Equal(t, core.DefaultOutputChannel, channels[0]) +} + +func Test__GetDailyUsageData__Configuration(t *testing.T) { + c := &GetDailyUsageData{} + fields := c.Configuration() + + assert.Len(t, fields, 2) + + names := make([]string, len(fields)) + for i, f := range fields { + names[i] = f.Name + } + + assert.Contains(t, names, "startDate") + assert.Contains(t, names, "endDate") + for _, f := range fields { + assert.False(t, f.Required) + } +} diff --git a/pkg/integrations/cursor/launch_agent.go b/pkg/integrations/cursor/launch_agent.go new file mode 100644 index 0000000000..922f605cad --- /dev/null +++ b/pkg/integrations/cursor/launch_agent.go @@ -0,0 +1,229 @@ +package cursor + +import ( + "fmt" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type LaunchAgent struct{} + +func (c *LaunchAgent) Name() string { return "cursor.launchAgent" } + +func (c *LaunchAgent) Label() string { return "Launch Cloud Agent" } + +func (c *LaunchAgent) Description() string { + return "Launches a Cursor Cloud Agent to perform coding tasks asynchronously." +} + +func (c *LaunchAgent) Documentation() string { + return `The Launch Cloud Agent component triggers a Cursor AI coding agent and waits for it to complete. + +## Use Cases +- **Automated code generation**: Generate code from natural language prompts +- **PR fixes**: Automatically fix issues on existing pull requests +- **Code refactoring**: Refactor code based on instructions +- **Feature implementation**: Implement new features from specifications + +## How It Works +1. Launches a Cursor Cloud Agent with the specified prompt and configuration +2. Waits for the agent to complete (monitored via webhook and polling) +3. Emits output with the agent result (success or failure)` +} + +func (c *LaunchAgent) Icon() string { return "cpu" } + +func (c *LaunchAgent) Color() string { return "#8B5CF6" } + +func (c *LaunchAgent) ExampleOutput() map[string]any { + return map[string]any{ + "status": LaunchAgentStatusDone, + "agentId": "agent_12345", + "summary": "Refactored login logic.", + "prUrl": "https://github.com/org/repo/pull/42", + "branchName": "cursor/agent-550e8400", + } +} + +func (c *LaunchAgent) OutputChannels(config any) []core.OutputChannel { + return []core.OutputChannel{{Name: LaunchAgentDefaultChannel, Label: "Default"}} +} + +func (c *LaunchAgent) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "prompt", Label: "Instructions", Type: configuration.FieldTypeText, Description: "What should the agent do?", Required: true, + }, + { + Name: "model", Label: "Model", Type: configuration.FieldTypeIntegrationResource, Required: false, + TypeOptions: &configuration.TypeOptions{Resource: &configuration.ResourceTypeOptions{Type: "model"}}, + }, + { + Name: "sourceMode", Label: "Source", Type: configuration.FieldTypeSelect, Required: true, Default: "repository", + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "New Task (Repository + Branch)", Value: "repository"}, + {Label: "Fix Existing PR", Value: "pr"}, + }, + }, + }, + }, + { + Name: "repository", Label: "Repository URL", Type: configuration.FieldTypeString, Required: false, + VisibilityConditions: []configuration.VisibilityCondition{{Field: "sourceMode", Values: []string{"repository"}}}, + }, + { + Name: "branch", Label: "Base Branch", Type: configuration.FieldTypeString, Required: false, Default: LaunchAgentDefaultBranch, + VisibilityConditions: []configuration.VisibilityCondition{{Field: "sourceMode", Values: []string{"repository"}}}, + }, + { + Name: "prUrl", Label: "Existing PR URL", Type: configuration.FieldTypeString, Required: false, + VisibilityConditions: []configuration.VisibilityCondition{{Field: "sourceMode", Values: []string{"pr"}}}, + }, + { + Name: "autoCreatePr", Label: "Auto Create PR", Type: configuration.FieldTypeBool, Required: false, Default: true, + VisibilityConditions: []configuration.VisibilityCondition{{Field: "sourceMode", Values: []string{"repository"}}}, + }, + { + Name: "useCursorBot", Label: "Act as Cursor Bot", Type: configuration.FieldTypeBool, Required: false, Default: true, + }, + } +} + +func (c *LaunchAgent) Setup(ctx core.SetupContext) error { + spec := LaunchAgentSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if spec.Prompt == "" { + return fmt.Errorf("prompt is required") + } + + // Basic validation ensuring the mode matches the inputs + if spec.SourceMode == "repository" && spec.Repository == "" { + return fmt.Errorf("repository URL is required when using repository mode") + } else if spec.SourceMode == "pr" && spec.PrURL == "" { + return fmt.Errorf("PR URL is required when using PR mode") + } + + // Set up webhook so it's associated with the node and saved + _, err := ctx.Webhook.Setup() + return err +} + +func (c *LaunchAgent) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *LaunchAgent) Execute(ctx core.ExecutionContext) error { + spec := LaunchAgentSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + // 1. Prepare Configuration + if spec.Branch == "" { + spec.Branch = LaunchAgentDefaultBranch + } + if spec.SourceMode == "repository" && spec.Repository == "" { + return fmt.Errorf("repository URL is required") + } else if spec.SourceMode == "pr" && spec.PrURL == "" { + return fmt.Errorf("PR URL is required") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create cursor client: %w", err) + } + if client.LaunchAgentKey == "" { + return fmt.Errorf("cloud agent API key is not configured") + } + + // Get webhook URL and secret (webhook should already be set up in Setup) + webhookURL, err := ctx.Webhook.Setup() + if err != nil { + return fmt.Errorf("failed to get webhook URL: %w", err) + } + + webhookSecret, err := ctx.Webhook.GetSecret() + if err != nil { + return fmt.Errorf("failed to get webhook secret: %w", err) + } + + branchName := fmt.Sprintf("%s%s", LaunchAgentBranchPrefix, ctx.ID.String()[:8]) + + // 3. Construct API Payload + source := launchAgentSource{} + target := launchAgentTarget{ + AutoCreatePr: ptrFromBool(spec.AutoCreatePr), + OpenAsCursorGithubApp: ptrFromBool(spec.UseCursorBot), + BranchName: branchName, + SkipReviewerRequest: ptrFromBool(LaunchAgentSkipReviewerRequest), + } + + if spec.SourceMode == "pr" { + source.PrURL = spec.PrURL + autoBranch := false + target.AutoBranch = &autoBranch + } else { + source.Repository = spec.Repository + source.Ref = spec.Branch + } + + payload := launchAgentRequest{ + Prompt: launchAgentPrompt{Text: spec.Prompt}, + Source: source, + Target: target, + Webhook: launchAgentWebhook{URL: webhookURL, Secret: string(webhookSecret)}, + } + if spec.Model != "" { + payload.Model = spec.Model + } + + // 4. Trigger External Job + result, err := client.LaunchAgent(payload) + if err != nil { + return fmt.Errorf("failed to launch cursor agent: %w", err) + } + + // 5. Initialize State + metadata := LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: result.ID, Name: result.Name, Status: result.Status}, + Target: &TargetMetadata{BranchName: branchName}, + Source: &SourceMetadata{Repository: spec.Repository, Ref: spec.Branch}, + } + + // Populate additional target details if available + if result.Target != nil { + if result.Target.URL != "" { + metadata.Agent.URL = result.Target.URL + } + if result.Target.PrURL != "" { + metadata.Target.PrURL = result.Target.PrURL + } + if result.Target.BranchName != "" { + metadata.Target.BranchName = result.Target.BranchName + } + } + + if err := ctx.Metadata.Set(metadata); err != nil { + return fmt.Errorf("failed to set execution metadata: %w", err) + } + + // Set KV for Webhook correlation + if err := ctx.ExecutionState.SetKV("agent_id", result.ID); err != nil { + return fmt.Errorf("failed to set agent_id in KV: %w", err) + } + + ctx.Logger.Infof("Launched Cursor Agent %s. Waiting for completion...", result.ID) + + // 6. Start Monitoring (Fallback Polling) + return ctx.Requests.ScheduleActionCall("poll", map[string]any{"attempt": 1, "errors": 0}, LaunchAgentInitialPollInterval) +} + +func ptrFromBool(b bool) *bool { return &b } diff --git a/pkg/integrations/cursor/launch_agent_monitor.go b/pkg/integrations/cursor/launch_agent_monitor.go new file mode 100644 index 0000000000..71190c2822 --- /dev/null +++ b/pkg/integrations/cursor/launch_agent_monitor.go @@ -0,0 +1,224 @@ +package cursor + +import ( + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +// HandleWebhook processes incoming updates from Cursor +func (c *LaunchAgent) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + signature := ctx.Headers.Get(LaunchAgentWebhookSignatureHeader) + if signature == "" { + return http.StatusUnauthorized, fmt.Errorf("missing signature header") + } + + secret, err := ctx.Webhook.GetSecret() + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("error getting webhook secret: %w", err) + } + + if !verifyWebhookSignature(ctx.Body, signature, string(secret)) { + return http.StatusUnauthorized, fmt.Errorf("invalid webhook signature") + } + + // 2. Parse payload + var payload launchAgentWebhookPayload + if err := json.Unmarshal(ctx.Body, &payload); err != nil { + return http.StatusBadRequest, fmt.Errorf("invalid json body: %w", err) + } + + if payload.ID == "" { + return http.StatusBadRequest, fmt.Errorf("id missing from webhook payload") + } + + // 3. Correlate Webhook to Execution + executionCtx, err := ctx.FindExecutionByKV("agent_id", payload.ID) + if err != nil { + // Execution not found (likely old or deleted), ack to stop retries + return http.StatusOK, nil + } + + metadata := LaunchAgentExecutionMetadata{} + if err := mapstructure.Decode(executionCtx.Metadata.Get(), &metadata); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode metadata: %w", err) + } + + // 4. Idempotency Check + if metadata.Agent != nil && isTerminalStatus(metadata.Agent.Status) { + return http.StatusOK, nil + } + + // 5. Update State + executionCtx.Logger.Infof("Received webhook for Agent %s: %s", payload.ID, payload.Status) + if metadata.Agent == nil { + metadata.Agent = &AgentMetadata{} + } + metadata.Agent.ID = payload.ID + metadata.Agent.Status = payload.Status + metadata.Agent.Summary = payload.Summary + + if metadata.Target == nil { + metadata.Target = &TargetMetadata{} + } + if payload.PrURL != "" { + metadata.Target.PrURL = payload.PrURL + } + + if err := executionCtx.Metadata.Set(metadata); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to set metadata: %w", err) + } + + // 6. Complete Workflow if finished + if isTerminalStatus(payload.Status) { + branchName := "" + if metadata.Target != nil { + branchName = metadata.Target.BranchName + } + outputPayload := buildOutputPayload(payload.Status, payload.ID, payload.PrURL, payload.Summary, branchName) + if err := executionCtx.ExecutionState.Emit(LaunchAgentDefaultChannel, LaunchAgentPayloadType, []any{outputPayload}); err != nil { + return http.StatusInternalServerError, err + } + } + + return http.StatusOK, nil +} + +func (c *LaunchAgent) Actions() []core.Action { + return []core.Action{{Name: "poll", UserAccessible: false}} +} + +func (c *LaunchAgent) HandleAction(ctx core.ActionContext) error { + if ctx.Name == "poll" { + return c.poll(ctx) + } + return fmt.Errorf("unknown action: %s", ctx.Name) +} + +func (c *LaunchAgent) poll(ctx core.ActionContext) error { + if ctx.ExecutionState.IsFinished() { + return nil + } + + metadata := LaunchAgentExecutionMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + if metadata.Agent == nil || metadata.Agent.ID == "" || isTerminalStatus(metadata.Agent.Status) { + return nil + } + + // Retrieve polling parameters + pollAttempt := 1 + pollErrors := 0 + if attempt, ok := ctx.Parameters["attempt"].(float64); ok { + pollAttempt = int(attempt) + } + if errors, ok := ctx.Parameters["errors"].(float64); ok { + pollErrors = int(errors) + } + + // Check Max Attempts + if pollAttempt > LaunchAgentMaxPollAttempts { + ctx.Logger.Errorf("Agent %s exceeded maximum poll attempts. Failing.", metadata.Agent.ID) + branchName := "" + if metadata.Target != nil { + branchName = metadata.Target.BranchName + } + outputPayload := buildOutputPayload("timeout", metadata.Agent.ID, "", "Polling timed out", branchName) + return ctx.ExecutionState.Emit(LaunchAgentDefaultChannel, LaunchAgentPayloadType, []any{outputPayload}) + } + + // Perform API Check + ctx.Logger.Infof("Polling Agent %s (attempt %d/%d)...", metadata.Agent.ID, pollAttempt, LaunchAgentMaxPollAttempts) + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return c.scheduleNextPoll(ctx, pollAttempt+1, pollErrors) + } + + agentStatus, err := client.GetAgentStatus(metadata.Agent.ID) + if err != nil { + pollErrors++ + if pollErrors >= LaunchAgentMaxPollErrors { + ctx.Logger.Errorf("Agent %s exceeded max poll errors. Failing.", metadata.Agent.ID) + branchName := "" + if metadata.Target != nil { + branchName = metadata.Target.BranchName + } + outputPayload := buildOutputPayload("error", metadata.Agent.ID, "", "Polling failed repeatedly", branchName) + return ctx.ExecutionState.Emit(LaunchAgentDefaultChannel, LaunchAgentPayloadType, []any{outputPayload}) + } + return c.scheduleNextPoll(ctx, pollAttempt+1, pollErrors) + } + + // Update Metadata + pollErrors = 0 + metadata.Agent.Status = agentStatus.Status + metadata.Agent.Summary = agentStatus.Summary + if agentStatus.Target != nil { + if metadata.Target == nil { + metadata.Target = &TargetMetadata{} + } + if agentStatus.Target.URL != "" { + metadata.Agent.URL = agentStatus.Target.URL + } + if agentStatus.Target.PrURL != "" { + metadata.Target.PrURL = agentStatus.Target.PrURL + } + if agentStatus.Target.BranchName != "" { + metadata.Target.BranchName = agentStatus.Target.BranchName + } + } + _ = ctx.Metadata.Set(metadata) // Best effort save + + // Check for Completion + if isTerminalStatus(agentStatus.Status) { + prURL := "" + branchName := "" + if metadata.Target != nil { + prURL = metadata.Target.PrURL + branchName = metadata.Target.BranchName + } + outputPayload := buildOutputPayload(agentStatus.Status, metadata.Agent.ID, prURL, agentStatus.Summary, branchName) + return ctx.ExecutionState.Emit(LaunchAgentDefaultChannel, LaunchAgentPayloadType, []any{outputPayload}) + } + + return c.scheduleNextPoll(ctx, pollAttempt+1, pollErrors) +} + +func (c *LaunchAgent) scheduleNextPoll(ctx core.ActionContext, nextAttempt, errors int) error { + interval := LaunchAgentInitialPollInterval * time.Duration(1< LaunchAgentMaxPollInterval { + interval = LaunchAgentMaxPollInterval + } + return ctx.Requests.ScheduleActionCall("poll", map[string]any{"attempt": nextAttempt, "errors": errors}, interval) +} + +func (c *LaunchAgent) Cancel(ctx core.ExecutionContext) error { + metadata := LaunchAgentExecutionMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return nil + } + if metadata.Agent == nil || metadata.Agent.ID == "" { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil + } + + if err := client.CancelAgent(metadata.Agent.ID); err != nil { + ctx.Logger.Warnf("Failed to cancel Cursor Agent %s: %v", metadata.Agent.ID, err) + } else { + ctx.Logger.Infof("Cancelled Cursor Agent %s", metadata.Agent.ID) + } + return nil +} + +func (c *LaunchAgent) Cleanup(ctx core.SetupContext) error { return nil } diff --git a/pkg/integrations/cursor/launch_agent_monitor_test.go b/pkg/integrations/cursor/launch_agent_monitor_test.go new file mode 100644 index 0000000000..3be12b470e --- /dev/null +++ b/pkg/integrations/cursor/launch_agent_monitor_test.go @@ -0,0 +1,504 @@ +package cursor + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "io" + "net/http" + "strings" + "testing" + "time" + + "github.com/google/uuid" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func generateSignature(body []byte, secret string) string { + mac := hmac.New(sha256.New, []byte(secret)) + mac.Write(body) + return hex.EncodeToString(mac.Sum(nil)) +} + +func Test__LaunchAgent__HandleWebhook__SignatureVerification(t *testing.T) { + c := &LaunchAgent{} + + t.Run("missing signature header -> unauthorized", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "FINISHED", + } + body, _ := json.Marshal(payload) + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{}, // No signature header + Webhook: &contexts.WebhookContext{Secret: secret}, + } + + status, err := c.HandleWebhook(webhookCtx) + require.Error(t, err) + assert.Equal(t, http.StatusUnauthorized, status) + assert.Contains(t, err.Error(), "missing signature header") + }) + + t.Run("invalid signature -> unauthorized", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "FINISHED", + } + body, _ := json.Marshal(payload) + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{"invalid-signature"}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + } + + status, err := c.HandleWebhook(webhookCtx) + require.Error(t, err) + assert.Equal(t, http.StatusUnauthorized, status) + assert.Contains(t, err.Error(), "invalid webhook signature") + }) + + t.Run("valid signature -> success", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "FINISHED", + } + body, _ := json.Marshal(payload) + signature := generateSignature(body, secret) + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "RUNNING"}, + Target: &TargetMetadata{BranchName: "cursor/agent-abc"}, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{KVs: map[string]string{"agent_id": "agent-123"}} + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key, value string) (*core.ExecutionContext, error) { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Logger: logrus.NewEntry(logrus.New()), + }, nil + }, + } + + status, err := c.HandleWebhook(webhookCtx) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + }) +} + +func Test__LaunchAgent__HandleWebhook__IdempotencyCheck(t *testing.T) { + c := &LaunchAgent{} + + t.Run("already terminal status -> returns OK without processing", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "FINISHED", + } + body, _ := json.Marshal(payload) + signature := generateSignature(body, secret) + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "FINISHED"}, // Already terminal + }, + } + executionStateCtx := &contexts.ExecutionStateContext{KVs: map[string]string{"agent_id": "agent-123"}} + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key, value string) (*core.ExecutionContext, error) { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Logger: logrus.NewEntry(logrus.New()), + }, nil + }, + } + + status, err := c.HandleWebhook(webhookCtx) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + // Emit should NOT have been called + assert.Empty(t, executionStateCtx.Channel) + }) +} + +func Test__LaunchAgent__HandleWebhook__ExecutionNotFound(t *testing.T) { + c := &LaunchAgent{} + + t.Run("execution not found -> returns OK to stop retries", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "FINISHED", + } + body, _ := json.Marshal(payload) + signature := generateSignature(body, secret) + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key, value string) (*core.ExecutionContext, error) { + return nil, errors.New("execution not found") + }, + } + + status, err := c.HandleWebhook(webhookCtx) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + }) +} + +func Test__LaunchAgent__Actions(t *testing.T) { + c := &LaunchAgent{} + + t.Run("returns poll action", func(t *testing.T) { + actions := c.Actions() + require.Len(t, actions, 1) + assert.Equal(t, "poll", actions[0].Name) + assert.False(t, actions[0].UserAccessible) + }) +} + +func Test__LaunchAgent__HandleAction(t *testing.T) { + c := &LaunchAgent{} + + t.Run("unknown action -> error", func(t *testing.T) { + ctx := core.ActionContext{Name: "unknown"} + err := c.HandleAction(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown action") + }) +} + +func Test__LaunchAgent__Poll(t *testing.T) { + c := &LaunchAgent{} + + t.Run("execution already finished -> no-op", func(t *testing.T) { + executionStateCtx := &contexts.ExecutionStateContext{Finished: true} + + ctx := core.ActionContext{ + Name: "poll", + ExecutionState: executionStateCtx, + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + }) + + t.Run("no agent metadata -> no-op", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{Agent: nil}, + } + executionStateCtx := &contexts.ExecutionStateContext{Finished: false} + + ctx := core.ActionContext{ + Name: "poll", + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Parameters: map[string]any{}, + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + }) + + t.Run("agent already terminal -> no-op", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "FINISHED"}, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{Finished: false} + + ctx := core.ActionContext{ + Name: "poll", + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Parameters: map[string]any{}, + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + }) + + t.Run("max poll attempts exceeded -> emits timeout", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "RUNNING"}, + Target: &TargetMetadata{BranchName: "cursor/agent-abc"}, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{Finished: false, KVs: map[string]string{}} + + ctx := core.ActionContext{ + Name: "poll", + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Parameters: map[string]any{"attempt": float64(LaunchAgentMaxPollAttempts + 1)}, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + assert.True(t, executionStateCtx.Finished) + assert.Equal(t, LaunchAgentDefaultChannel, executionStateCtx.Channel) + }) + + t.Run("successful poll with terminal status -> emits completion", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "id": "agent-123", + "status": "FINISHED", + "summary": "Task completed", + "target": {"prUrl": "https://github.com/org/repo/pull/42"} + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"launchAgentKey": "test-key"}, + } + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "RUNNING"}, + Target: &TargetMetadata{BranchName: "cursor/agent-abc"}, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{Finished: false, KVs: map[string]string{}} + + ctx := core.ActionContext{ + Name: "poll", + HTTP: httpContext, + Integration: integrationCtx, + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Parameters: map[string]any{"attempt": float64(1)}, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + assert.True(t, executionStateCtx.Finished) + assert.Equal(t, LaunchAgentDefaultChannel, executionStateCtx.Channel) + assert.Equal(t, LaunchAgentPayloadType, executionStateCtx.Type) + }) + + t.Run("poll API error -> schedules next poll with error count", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusInternalServerError, + Body: io.NopCloser(strings.NewReader(`{"error": "server error"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"launchAgentKey": "test-key"}, + } + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "RUNNING"}, + Target: &TargetMetadata{BranchName: "cursor/agent-abc"}, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{Finished: false, KVs: map[string]string{}} + requestsCtx := &contexts.RequestContext{} + + ctx := core.ActionContext{ + Name: "poll", + HTTP: httpContext, + Integration: integrationCtx, + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Requests: requestsCtx, + Parameters: map[string]any{"attempt": float64(1), "errors": float64(0)}, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + assert.Equal(t, "poll", requestsCtx.Action) + assert.Equal(t, 2, requestsCtx.Params["attempt"]) + assert.Equal(t, 1, requestsCtx.Params["errors"]) + }) + + t.Run("max poll errors exceeded -> emits error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusInternalServerError, + Body: io.NopCloser(strings.NewReader(`{"error": "server error"}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"launchAgentKey": "test-key"}, + } + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123", Status: "RUNNING"}, + Target: &TargetMetadata{BranchName: "cursor/agent-abc"}, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{Finished: false, KVs: map[string]string{}} + + ctx := core.ActionContext{ + Name: "poll", + HTTP: httpContext, + Integration: integrationCtx, + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Parameters: map[string]any{"attempt": float64(1), "errors": float64(LaunchAgentMaxPollErrors - 1)}, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.HandleAction(ctx) + require.NoError(t, err) + assert.True(t, executionStateCtx.Finished) + assert.Equal(t, LaunchAgentDefaultChannel, executionStateCtx.Channel) + }) +} + +func Test__LaunchAgent__ScheduleNextPoll(t *testing.T) { + c := &LaunchAgent{} + + t.Run("calculates exponential backoff", func(t *testing.T) { + requestsCtx := &contexts.RequestContext{} + + ctx := core.ActionContext{ + Requests: requestsCtx, + } + + err := c.scheduleNextPoll(ctx, 2, 0) + require.NoError(t, err) + assert.Equal(t, "poll", requestsCtx.Action) + assert.Equal(t, 2, requestsCtx.Params["attempt"]) + assert.Equal(t, 0, requestsCtx.Params["errors"]) + // First poll: 30s * 2^(2-1) = 60s + assert.Equal(t, 60*time.Second, requestsCtx.Duration) + }) + + t.Run("caps at max poll interval", func(t *testing.T) { + requestsCtx := &contexts.RequestContext{} + + ctx := core.ActionContext{ + Requests: requestsCtx, + } + + err := c.scheduleNextPoll(ctx, 20, 0) + require.NoError(t, err) + assert.LessOrEqual(t, requestsCtx.Duration, LaunchAgentMaxPollInterval) + }) +} + +func Test__LaunchAgent__Cancel(t *testing.T) { + c := &LaunchAgent{} + + t.Run("no agent metadata -> no-op", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{Agent: nil}, + } + + ctx := core.ExecutionContext{ + ID: uuid.New(), + Metadata: metadataCtx, + } + + err := c.Cancel(ctx) + require.NoError(t, err) + }) + + t.Run("empty agent ID -> no-op", func(t *testing.T) { + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: ""}, + }, + } + + ctx := core.ExecutionContext{ + ID: uuid.New(), + Metadata: metadataCtx, + } + + err := c.Cancel(ctx) + require.NoError(t, err) + }) + + t.Run("successful cancel", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"launchAgentKey": "test-key"}, + } + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ID: "agent-123"}, + }, + } + + ctx := core.ExecutionContext{ + ID: uuid.New(), + HTTP: httpContext, + Integration: integrationCtx, + Metadata: metadataCtx, + Logger: logrus.NewEntry(logrus.New()), + } + + err := c.Cancel(ctx) + require.NoError(t, err) + require.Len(t, httpContext.Requests, 1) + assert.Contains(t, httpContext.Requests[0].URL.String(), "agent-123") + assert.Equal(t, http.MethodPost, httpContext.Requests[0].Method) + }) +} + +func Test__LaunchAgent__Cleanup(t *testing.T) { + c := &LaunchAgent{} + + t.Run("returns nil (no-op)", func(t *testing.T) { + ctx := core.SetupContext{} + err := c.Cleanup(ctx) + require.NoError(t, err) + }) +} diff --git a/pkg/integrations/cursor/launch_agent_test.go b/pkg/integrations/cursor/launch_agent_test.go new file mode 100644 index 0000000000..af571a6210 --- /dev/null +++ b/pkg/integrations/cursor/launch_agent_test.go @@ -0,0 +1,353 @@ +package cursor + +import ( + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "github.com/google/uuid" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__LaunchAgent__Setup(t *testing.T) { + c := &LaunchAgent{} + + t.Run("valid repository mode config", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + webhookCtx := &contexts.WebhookContext{} + setupCtx := core.SetupContext{ + Configuration: map[string]any{ + "prompt": "Fix the bug", + "sourceMode": "repository", + "repository": "https://github.com/org/repo", + "branch": "main", + }, + Integration: integrationCtx, + Webhook: webhookCtx, + } + + err := c.Setup(setupCtx) + require.NoError(t, err) + }) + + t.Run("valid PR mode config", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + webhookCtx := &contexts.WebhookContext{} + setupCtx := core.SetupContext{ + Configuration: map[string]any{ + "prompt": "Fix the PR", + "sourceMode": "pr", + "prUrl": "https://github.com/org/repo/pull/42", + }, + Integration: integrationCtx, + Webhook: webhookCtx, + } + + err := c.Setup(setupCtx) + require.NoError(t, err) + }) + + t.Run("missing prompt -> error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + webhookCtx := &contexts.WebhookContext{} + setupCtx := core.SetupContext{ + Configuration: map[string]any{ + "sourceMode": "repository", + "repository": "https://github.com/org/repo", + }, + Integration: integrationCtx, + Webhook: webhookCtx, + } + + err := c.Setup(setupCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "prompt is required") + }) + + t.Run("repository mode without repository -> error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + webhookCtx := &contexts.WebhookContext{} + setupCtx := core.SetupContext{ + Configuration: map[string]any{ + "prompt": "Fix the bug", + "sourceMode": "repository", + }, + Integration: integrationCtx, + Webhook: webhookCtx, + } + + err := c.Setup(setupCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "repository URL is required") + }) + + t.Run("PR mode without prUrl -> error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + webhookCtx := &contexts.WebhookContext{} + setupCtx := core.SetupContext{ + Configuration: map[string]any{ + "prompt": "Fix the PR", + "sourceMode": "pr", + }, + Integration: integrationCtx, + Webhook: webhookCtx, + } + + err := c.Setup(setupCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "PR URL is required") + }) + + t.Run("repository mode with non-empty repository is accepted", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{} + webhookCtx := &contexts.WebhookContext{} + setupCtx := core.SetupContext{ + Configuration: map[string]any{ + "prompt": "Fix the bug", + "sourceMode": "repository", + "repository": "not-a-url", + }, + Integration: integrationCtx, + Webhook: webhookCtx, + } + + err := c.Setup(setupCtx) + require.NoError(t, err) + }) +} + +func Test__LaunchAgent__Execute(t *testing.T) { + c := &LaunchAgent{} + + t.Run("successful launch", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "id": "agent-123", + "status": "CREATING", + "target": {"branchName": "cursor/agent-abc123"} + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + IntegrationID: uuid.New().String(), + Configuration: map[string]any{ + "launchAgentKey": "test-key", + }, + } + + executionID := uuid.New() + metadataCtx := &contexts.MetadataContext{} + executionStateCtx := &contexts.ExecutionStateContext{KVs: make(map[string]string)} + requestsCtx := &contexts.RequestContext{} + webhookCtx := &contexts.WebhookContext{Secret: "platform-managed-secret"} + + execCtx := core.ExecutionContext{ + ID: executionID, + Configuration: map[string]any{ + "prompt": "Fix the bug", + "sourceMode": "repository", + "repository": "https://github.com/org/repo", + "branch": "main", + "autoCreatePr": true, + "useCursorBot": true, + }, + HTTP: httpContext, + Integration: integrationCtx, + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Requests: requestsCtx, + Webhook: webhookCtx, + Logger: logrus.NewEntry(logrus.New()), + BaseURL: "https://superplane.example.com", + } + + err := c.Execute(execCtx) + require.NoError(t, err) + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://api.cursor.com/v0/agents", httpContext.Requests[0].URL.String()) + assert.NotNil(t, metadataCtx.Metadata) + assert.Equal(t, "agent-123", executionStateCtx.KVs["agent_id"]) + assert.Equal(t, "poll", requestsCtx.Action) + }) + + t.Run("missing cloud agent key -> error", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{}, + } + + execCtx := core.ExecutionContext{ + ID: uuid.New(), + Configuration: map[string]any{ + "prompt": "Fix the bug", + "sourceMode": "repository", + "repository": "https://github.com/org/repo", + }, + HTTP: httpContext, + Integration: integrationCtx, + } + + err := c.Execute(execCtx) + require.Error(t, err) + assert.Contains(t, err.Error(), "cloud agent API key is not configured") + }) +} + +func Test__LaunchAgent__HandleWebhook(t *testing.T) { + c := &LaunchAgent{} + + t.Run("successful completion webhook", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "FINISHED", + PrURL: "https://github.com/org/repo/pull/42", + Summary: "Fixed the bug", + } + body, _ := json.Marshal(payload) + signature := generateSignature(body, secret) + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ + ID: "agent-123", + Status: "RUNNING", + }, + Target: &TargetMetadata{ + BranchName: "cursor/agent-abc123", + }, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{ + KVs: map[string]string{ + "agent_id": "agent-123", + }, + } + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key, value string) (*core.ExecutionContext, error) { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Logger: logrus.NewEntry(logrus.New()), + }, nil + }, + } + + status, err := c.HandleWebhook(webhookCtx) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + + // Verify emit was called on default channel + assert.Equal(t, LaunchAgentDefaultChannel, executionStateCtx.Channel) + assert.Equal(t, LaunchAgentPayloadType, executionStateCtx.Type) + + // Verify metadata was updated + updatedMetadata := metadataCtx.Metadata.(LaunchAgentExecutionMetadata) + assert.Equal(t, "FINISHED", updatedMetadata.Agent.Status) + assert.Equal(t, "https://github.com/org/repo/pull/42", updatedMetadata.Target.PrURL) + }) + + t.Run("failed agent webhook", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + ID: "agent-123", + Status: "failed", + Summary: "Agent encountered an error", + } + body, _ := json.Marshal(payload) + signature := generateSignature(body, secret) + + metadataCtx := &contexts.MetadataContext{ + Metadata: LaunchAgentExecutionMetadata{ + Agent: &AgentMetadata{ + ID: "agent-123", + Status: "RUNNING", + }, + }, + } + executionStateCtx := &contexts.ExecutionStateContext{ + KVs: map[string]string{ + "agent_id": "agent-123", + }, + } + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + FindExecutionByKV: func(key, value string) (*core.ExecutionContext, error) { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionStateCtx, + Logger: logrus.NewEntry(logrus.New()), + }, nil + }, + } + + status, err := c.HandleWebhook(webhookCtx) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + + // Verify emit was called on default channel + assert.Equal(t, LaunchAgentDefaultChannel, executionStateCtx.Channel) + }) + + t.Run("missing agent ID -> bad request", func(t *testing.T) { + secret := "test-secret" + payload := launchAgentWebhookPayload{ + Status: "FINISHED", + } + body, _ := json.Marshal(payload) + signature := generateSignature(body, secret) + + webhookCtx := core.WebhookRequestContext{ + Body: body, + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + } + + status, err := c.HandleWebhook(webhookCtx) + require.Error(t, err) + assert.Equal(t, http.StatusBadRequest, status) + assert.Contains(t, err.Error(), "id missing") + }) + + t.Run("invalid JSON -> bad request", func(t *testing.T) { + secret := "test-secret" + signature := generateSignature([]byte("not json"), secret) + + webhookCtx := core.WebhookRequestContext{ + Body: []byte("not json"), + Headers: http.Header{LaunchAgentWebhookSignatureHeader: []string{signature}}, + Webhook: &contexts.WebhookContext{Secret: secret}, + } + + status, err := c.HandleWebhook(webhookCtx) + require.Error(t, err) + assert.Equal(t, http.StatusBadRequest, status) + }) +} + +func Test__LaunchAgent__OutputChannels(t *testing.T) { + c := &LaunchAgent{} + channels := c.OutputChannels(nil) + + assert.Len(t, channels, 1) + assert.Equal(t, LaunchAgentDefaultChannel, channels[0].Name) + assert.Equal(t, "Default", channels[0].Label) +} diff --git a/pkg/integrations/cursor/launch_agent_types.go b/pkg/integrations/cursor/launch_agent_types.go new file mode 100644 index 0000000000..d615eb53be --- /dev/null +++ b/pkg/integrations/cursor/launch_agent_types.go @@ -0,0 +1,190 @@ +package cursor + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "strings" + "time" +) + +// --- CONSTANTS --- + +const ( + LaunchAgentPayloadType = "cursor.launchAgent.finished" + LaunchAgentDefaultChannel = "default" + LaunchAgentStatusCreating = "CREATING" + LaunchAgentStatusRunning = "RUNNING" + LaunchAgentStatusFinished = "FINISHED" + LaunchAgentStatusDone = "done" + LaunchAgentStatusSucceeded = "succeeded" + LaunchAgentStatusFailed = "failed" + LaunchAgentStatusError = "error" + LaunchAgentDefaultBranch = "main" + LaunchAgentBranchPrefix = "cursor/agent-" + LaunchAgentSkipReviewerRequest = false + LaunchAgentInitialPollInterval = 30 * time.Second + LaunchAgentMaxPollInterval = 10 * time.Minute + LaunchAgentMaxPollAttempts = 100 + LaunchAgentMaxPollErrors = 5 + LaunchAgentWebhookSignatureHeader = "X-Webhook-Signature" +) + +// --- CONFIGURATION STRUCTS --- + +type LaunchAgentSpec struct { + Prompt string `json:"prompt" mapstructure:"prompt"` + Model string `json:"model" mapstructure:"model"` + SourceMode string `json:"sourceMode" mapstructure:"sourceMode"` + Repository string `json:"repository" mapstructure:"repository"` + Branch string `json:"branch" mapstructure:"branch"` + PrURL string `json:"prUrl" mapstructure:"prUrl"` + AutoCreatePr bool `json:"autoCreatePr" mapstructure:"autoCreatePr"` + UseCursorBot bool `json:"useCursorBot" mapstructure:"useCursorBot"` +} + +// --- STATE STRUCTS (DB PERSISTENCE) --- + +type LaunchAgentExecutionMetadata struct { + Agent *AgentMetadata `json:"agent,omitempty" mapstructure:"agent,omitempty"` + Target *TargetMetadata `json:"target,omitempty" mapstructure:"target,omitempty"` + Source *SourceMetadata `json:"source,omitempty" mapstructure:"source,omitempty"` +} + +type AgentMetadata struct { + ID string `json:"id" mapstructure:"id"` + Name string `json:"name,omitempty" mapstructure:"name,omitempty"` + Status string `json:"status" mapstructure:"status"` + URL string `json:"url,omitempty" mapstructure:"url,omitempty"` + Summary string `json:"summary,omitempty" mapstructure:"summary,omitempty"` +} + +type TargetMetadata struct { + BranchName string `json:"branchName,omitempty" mapstructure:"branchName,omitempty"` + PrURL string `json:"prUrl,omitempty" mapstructure:"prUrl,omitempty"` +} + +type SourceMetadata struct { + Repository string `json:"repository,omitempty" mapstructure:"repository,omitempty"` + Ref string `json:"ref,omitempty" mapstructure:"ref,omitempty"` +} + +// --- API DTOs (EXTERNAL CONTRACT) --- + +type launchAgentRequest struct { + Prompt launchAgentPrompt `json:"prompt"` + Model string `json:"model,omitempty"` + Source launchAgentSource `json:"source"` + Target launchAgentTarget `json:"target,omitempty"` + Webhook launchAgentWebhook `json:"webhook,omitempty"` +} + +type launchAgentPrompt struct { + Text string `json:"text"` +} + +type launchAgentSource struct { + Repository string `json:"repository,omitempty"` + Ref string `json:"ref,omitempty"` + PrURL string `json:"prUrl,omitempty"` +} + +type launchAgentTarget struct { + AutoCreatePr *bool `json:"autoCreatePr,omitempty"` + OpenAsCursorGithubApp *bool `json:"openAsCursorGithubApp,omitempty"` + BranchName string `json:"branchName,omitempty"` + AutoBranch *bool `json:"autoBranch,omitempty"` + SkipReviewerRequest *bool `json:"skipReviewerRequest,omitempty"` +} + +type launchAgentWebhook struct { + URL string `json:"url"` + Secret string `json:"secret,omitempty"` +} + +type LaunchAgentResponse struct { + ID string `json:"id"` + Name string `json:"name,omitempty"` + Status string `json:"status"` + Source *launchAgentSourceResponse `json:"source,omitempty"` + Target *launchAgentTargetResponse `json:"target,omitempty"` + Summary string `json:"summary,omitempty"` + CreatedAt string `json:"createdAt,omitempty"` +} + +type launchAgentSourceResponse struct { + Repository string `json:"repository,omitempty"` + Ref string `json:"ref,omitempty"` +} + +type launchAgentTargetResponse struct { + BranchName string `json:"branchName,omitempty"` + URL string `json:"url,omitempty"` + PrURL string `json:"prUrl,omitempty"` + AutoCreatePr *bool `json:"autoCreatePr,omitempty"` + OpenAsCursorGithubApp *bool `json:"openAsCursorGithubApp,omitempty"` + SkipReviewerRequest *bool `json:"skipReviewerRequest,omitempty"` +} + +type launchAgentWebhookPayload struct { + ID string `json:"id"` + Status string `json:"status"` + PrURL string `json:"prUrl,omitempty"` + Summary string `json:"summary,omitempty"` +} + +type LaunchAgentOutputPayload struct { + Status string `json:"status"` + AgentID string `json:"agentId"` + PrURL string `json:"prUrl,omitempty"` + Summary string `json:"summary,omitempty"` + BranchName string `json:"branchName,omitempty"` +} + +// --- HELPER FUNCTIONS --- + +func isSuccessStatus(status string) bool { + return status == LaunchAgentStatusFinished || + status == LaunchAgentStatusDone || + status == LaunchAgentStatusSucceeded +} + +func isFailureStatus(status string) bool { + return status == LaunchAgentStatusFailed || + status == LaunchAgentStatusError +} + +func isTerminalStatus(status string) bool { + return isSuccessStatus(status) || isFailureStatus(status) +} + +func buildOutputPayload(status, agentID, prURL, summary, branchName string) LaunchAgentOutputPayload { + return LaunchAgentOutputPayload{ + Status: status, + AgentID: agentID, + PrURL: prURL, + Summary: summary, + BranchName: branchName, + } +} + +func verifyWebhookSignature(body []byte, signature, secret string) bool { + if signature == "" || secret == "" { + return false + } + + // Cursor sends signature in format "sha256=" + // Strip the "sha256=" prefix if present + signature = strings.TrimPrefix(signature, "sha256=") + if signature == "" { + return false + } + + // Compute expected signature + mac := hmac.New(sha256.New, []byte(secret)) + mac.Write(body) + expectedSig := hex.EncodeToString(mac.Sum(nil)) + + // Compare signatures using constant-time comparison + return hmac.Equal([]byte(signature), []byte(expectedSig)) +} diff --git a/pkg/server/server.go b/pkg/server/server.go index c3c95d960a..b38e2a295c 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -36,6 +36,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/circleci" _ "github.com/superplanehq/superplane/pkg/integrations/claude" _ "github.com/superplanehq/superplane/pkg/integrations/cloudflare" + _ "github.com/superplanehq/superplane/pkg/integrations/cursor" _ "github.com/superplanehq/superplane/pkg/integrations/dash0" _ "github.com/superplanehq/superplane/pkg/integrations/datadog" _ "github.com/superplanehq/superplane/pkg/integrations/daytona" diff --git a/web_src/src/assets/icons/integrations/cursor.svg b/web_src/src/assets/icons/integrations/cursor.svg new file mode 100644 index 0000000000..1f8a7c338a --- /dev/null +++ b/web_src/src/assets/icons/integrations/cursor.svg @@ -0,0 +1,32 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/cursor/base.ts b/web_src/src/pages/workflowv2/mappers/cursor/base.ts new file mode 100644 index 0000000000..4334f33f1d --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/cursor/base.ts @@ -0,0 +1,70 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import cursorIcon from "@/assets/icons/integrations/cursor.svg"; +import { formatTimeAgo } from "@/utils/date"; + +export const baseMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "cursor"; + + return { + iconSrc: cursorIcon, + iconSlug: context.componentDefinition?.icon ?? "cpu", + collapsedBackground: "bg-white", + collapsed: context.node.isCollapsed, + title: context.node.name || context.componentDefinition?.label || context.componentDefinition?.name || "Cursor", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const payload = outputs?.default?.[0]; + + if (payload?.type) { + details["Event Type"] = payload.type; + } + + if (payload?.timestamp) { + details["Emitted At"] = new Date(payload.timestamp).toLocaleString(); + } + + return details; + }, + + subtitle(context: SubtitleContext): string { + const timestamp = context.execution.updatedAt || context.execution.createdAt; + return timestamp ? formatTimeAgo(new Date(timestamp)) : ""; + }, +}; + +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + const subtitleTimestamp = execution.updatedAt || execution.createdAt; + const eventSubtitle = subtitleTimestamp ? formatTimeAgo(new Date(subtitleTimestamp)) : ""; + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/cursor/get_daily_usage_data.ts b/web_src/src/pages/workflowv2/mappers/cursor/get_daily_usage_data.ts new file mode 100644 index 0000000000..8324357541 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/cursor/get_daily_usage_data.ts @@ -0,0 +1,103 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import cursorIcon from "@/assets/icons/integrations/cursor.svg"; +import { formatTimeAgo } from "@/utils/date"; + +type GetDailyUsageDataPayload = { + data?: unknown[]; + period?: { startDate?: number; endDate?: number }; +}; + +function formatPeriod(period: GetDailyUsageDataPayload["period"]): string | undefined { + if (!period) return undefined; + const start = period.startDate != null ? new Date(period.startDate) : null; + const end = period.endDate != null ? new Date(period.endDate) : null; + if (!start && !end) return undefined; + const startStr = start ? start.toLocaleDateString() : "?"; + const endStr = end ? end.toLocaleDateString() : "?"; + return `${startStr} – ${endStr}`; +} + +export const getDailyUsageDataMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "cursor"; + + return { + iconSrc: cursorIcon, + iconSlug: context.componentDefinition?.icon ?? "cpu", + collapsedBackground: "bg-white", + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition?.label || + context.componentDefinition?.name || + "Get Daily Usage Data", + eventSections: lastExecution + ? getDailyUsageDataEventSections(context.nodes, lastExecution, componentName) + : undefined, + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const payload = outputs?.default?.[0]; + const data = payload?.data as GetDailyUsageDataPayload | undefined; + + if (data?.period) { + const periodStr = formatPeriod(data.period); + if (periodStr) { + details["Period"] = periodStr; + } + } + + if (Array.isArray(data?.data)) { + details["Users"] = String(data.data.length); + } + + if (payload?.timestamp) { + details["Fetched At"] = new Date(payload.timestamp).toLocaleString(); + } + + return details; + }, + + subtitle(context: SubtitleContext): string { + const timestamp = context.execution.updatedAt || context.execution.createdAt; + return timestamp ? formatTimeAgo(new Date(timestamp)) : ""; + }, +}; + +function getDailyUsageDataEventSections( + nodes: NodeInfo[], + execution: ExecutionInfo, + componentName: string, +): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + const subtitleTimestamp = execution.updatedAt || execution.createdAt; + const eventSubtitle = subtitleTimestamp ? formatTimeAgo(new Date(subtitleTimestamp)) : ""; + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/cursor/index.ts b/web_src/src/pages/workflowv2/mappers/cursor/index.ts new file mode 100644 index 0000000000..d9ea382696 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/cursor/index.ts @@ -0,0 +1,16 @@ +import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; +import { buildActionStateRegistry } from "../utils"; +import { launchAgentMapper } from "./launch_agent"; +import { getDailyUsageDataMapper } from "./get_daily_usage_data"; + +export const componentMappers: Record = { + launchAgent: launchAgentMapper, + getDailyUsageData: getDailyUsageDataMapper, +}; + +export const triggerRenderers: Record = {}; + +export const eventStateRegistry: Record = { + launchAgent: buildActionStateRegistry("completed"), + getDailyUsageData: buildActionStateRegistry("completed"), +}; diff --git a/web_src/src/pages/workflowv2/mappers/cursor/launch_agent.ts b/web_src/src/pages/workflowv2/mappers/cursor/launch_agent.ts new file mode 100644 index 0000000000..12f3cc082c --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/cursor/launch_agent.ts @@ -0,0 +1,104 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import cursorIcon from "@/assets/icons/integrations/cursor.svg"; +import { formatTimeAgo } from "@/utils/date"; + +type LaunchAgentMetadata = { + agent?: { id?: string; url?: string; status?: string; summary?: string }; + target?: { prUrl?: string; branchName?: string }; +}; + +type LaunchAgentPayloadData = { + status?: string; + agentId?: string; + prUrl?: string; + summary?: string; + branchName?: string; +}; + +function addDetail(details: Record, key: string, value: string | undefined) { + if (value) { + details[key] = value; + } +} + +export const launchAgentMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "cursor"; + + return { + iconSrc: cursorIcon, + iconSlug: context.componentDefinition?.icon ?? "cpu", + collapsedBackground: "bg-white", + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition?.label || + context.componentDefinition?.name || + "Launch Cloud Agent", + eventSections: lastExecution ? launchAgentEventSections(context.nodes, lastExecution, componentName) : undefined, + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + const metadata = context.execution.metadata as LaunchAgentMetadata | undefined; + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const payload = outputs?.default?.[0]; + const data = payload?.data as LaunchAgentPayloadData | undefined; + + // Cloud Agent link (from metadata; API may set agent.url) + addDetail(details, "Cloud Agent", metadata?.agent?.url); + + // PR link (if created) + addDetail(details, "Pull Request", metadata?.target?.prUrl ?? data?.prUrl); + + // Branch name + addDetail(details, "Branch", metadata?.target?.branchName ?? data?.branchName); + + // Status and summary for context + addDetail(details, "Status", data?.status ?? metadata?.agent?.status); + addDetail(details, "Summary", data?.summary ?? metadata?.agent?.summary); + + if (payload?.timestamp) { + details["Emitted At"] = new Date(payload.timestamp).toLocaleString(); + } + + return details; + }, + + subtitle(context: SubtitleContext): string { + const timestamp = context.execution.updatedAt || context.execution.createdAt; + return timestamp ? formatTimeAgo(new Date(timestamp)) : ""; + }, +}; + +function launchAgentEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + const subtitleTimestamp = execution.updatedAt || execution.createdAt; + const eventSubtitle = subtitleTimestamp ? formatTimeAgo(new Date(subtitleTimestamp)) : ""; + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index a3867f53bd..41673f72b5 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -106,6 +106,11 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; +import { + componentMappers as cursorComponentMappers, + triggerRenderers as cursorTriggerRenderers, + eventStateRegistry as cursorEventStateRegistry, +} from "./cursor/index"; import { componentMappers as dockerhubComponentMappers, customFieldRenderers as dockerhubCustomFieldRenderers, @@ -167,6 +172,7 @@ const appMappers: Record> = { openai: openaiComponentMappers, circleci: circleCIComponentMappers, claude: claudeComponentMappers, + cursor: cursorComponentMappers, dockerhub: dockerhubComponentMappers, grafana: grafanaComponentMappers, }; @@ -190,6 +196,7 @@ const appTriggerRenderers: Record> = { openai: openaiTriggerRenderers, circleci: circleCITriggerRenderers, claude: claudeTriggerRenderers, + cursor: cursorTriggerRenderers, dockerhub: dockerhubTriggerRenderers, grafana: grafanaTriggerRenderers, }; @@ -212,6 +219,7 @@ const appEventStateRegistries: Record circleci: circleCIEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, + cursor: cursorEventStateRegistry, gitlab: gitlabEventStateRegistry, dockerhub: dockerhubEventStateRegistry, grafana: grafanaEventStateRegistry, diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index c7832bcfb7..408b5b030d 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -24,6 +24,7 @@ import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; +import cursorIcon from "@/assets/icons/integrations/cursor.svg"; import pagerDutyIcon from "@/assets/icons/integrations/pagerduty.svg"; import slackIcon from "@/assets/icons/integrations/slack.svg"; import awsIcon from "@/assets/icons/integrations/aws.svg"; @@ -406,6 +407,7 @@ function CategorySection({ openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, + cursor: cursorIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, @@ -480,6 +482,7 @@ function CategorySection({ openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, + cursor: cursorIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 4a122cc8fd..8b1e1f30bc 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -13,6 +13,7 @@ import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; +import cursorIcon from "@/assets/icons/integrations/cursor.svg"; import pagerDutyIcon from "@/assets/icons/integrations/pagerduty.svg"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import slackIcon from "@/assets/icons/integrations/slack.svg"; @@ -37,6 +38,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, + cursor: cursorIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, @@ -61,6 +63,7 @@ export const APP_LOGO_MAP: Record> = { openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, + cursor: cursorIcon, pagerduty: pagerDutyIcon, rootly: rootlyIcon, semaphore: SemaphoreLogo, diff --git a/web_src/src/utils/integrationDisplayName.ts b/web_src/src/utils/integrationDisplayName.ts index 64487766ea..1e8f0fb34c 100644 --- a/web_src/src/utils/integrationDisplayName.ts +++ b/web_src/src/utils/integrationDisplayName.ts @@ -7,6 +7,7 @@ const INTEGRATION_TYPE_DISPLAY_NAMES: Record = { gitlab: "GitLab", openai: "OpenAI", claude: "Claude", + cursor: "Cursor", pagerduty: "PagerDuty", slack: "Slack", discord: "Discord", From 7035a34102078f7a9f9f3bbf44f9a9b5f82c8399 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Thu, 12 Feb 2026 14:29:16 +0100 Subject: [PATCH 061/160] feat: Add new cursor.getLastMessage component (#3075) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Cursor.mdx | 51 +++++ pkg/integrations/cursor/client.go | 30 +++ pkg/integrations/cursor/cursor.go | 1 + pkg/integrations/cursor/cursor_test.go | 3 +- pkg/integrations/cursor/get_last_message.go | 187 ++++++++++++++++++ .../mappers/cursor/get_last_message.ts | 114 +++++++++++ .../pages/workflowv2/mappers/cursor/index.ts | 3 + 7 files changed, 388 insertions(+), 1 deletion(-) create mode 100644 pkg/integrations/cursor/get_last_message.go create mode 100644 web_src/src/pages/workflowv2/mappers/cursor/get_last_message.ts diff --git a/docs/components/Cursor.mdx b/docs/components/Cursor.mdx index 3c869807c7..37f746ed29 100644 --- a/docs/components/Cursor.mdx +++ b/docs/components/Cursor.mdx @@ -10,6 +10,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -86,6 +87,56 @@ The output includes per-user daily metrics: } ``` + + +## Get Last Message + +The Get Last Message component retrieves the last message from a Cursor Cloud Agent's conversation history. + +### Use Cases + +- **Message tracking**: Get the latest response or prompt from an agent conversation +- **Workflow automation**: Use the last message as input for downstream components +- **Status monitoring**: Check what the agent last communicated + +### How It Works + +1. Fetches the conversation history for the specified agent ID +2. Extracts the last message from the conversation +3. Returns the message details including ID, type (user_message or assistant_message), and text + +### Configuration + +- **Agent ID**: The unique identifier for the cloud agent (e.g., bc_abc123) + +### Output + +The output includes: +- **Agent ID**: The identifier of the agent +- **Message**: The last message object containing: + - **ID**: Unique message identifier + - **Type**: Either "user_message" or "assistant_message" + - **Text**: The message content + +### Notes + +- Requires a valid Cursor Cloud Agent API key configured in the integration +- If the agent has been deleted, the conversation cannot be accessed +- Returns nil if the conversation has no messages + +### Example Output + +```json +{ + "agentId": "bc_abc123", + "message": { + "id": "msg_005", + "text": "I've added a troubleshooting section to the README.", + "type": "assistant_message" + } +} +``` + ## Launch Cloud Agent diff --git a/pkg/integrations/cursor/client.go b/pkg/integrations/cursor/client.go index 53e3e017dc..4842cbb907 100644 --- a/pkg/integrations/cursor/client.go +++ b/pkg/integrations/cursor/client.go @@ -52,6 +52,17 @@ type ModelsResponse struct { Models []string `json:"models"` } +type ConversationMessage struct { + ID string `json:"id"` + Type string `json:"type"` + Text string `json:"text"` +} + +type ConversationResponse struct { + ID string `json:"id"` + Messages []ConversationMessage `json:"messages"` +} + func (c *Client) ListModels() ([]string, error) { if c.LaunchAgentKey == "" { return nil, fmt.Errorf("Cloud Agent API key is not configured") @@ -163,6 +174,25 @@ func (c *Client) CancelAgent(agentID string) error { return err } +func (c *Client) GetAgentConversation(agentID string) (*ConversationResponse, error) { + if c.LaunchAgentKey == "" { + return nil, fmt.Errorf("Cloud Agent API key is not configured") + } + + url := fmt.Sprintf("%s/v0/agents/%s/conversation", c.BaseURL, agentID) + responseBody, err := c.execRequest(http.MethodGet, url, nil, c.LaunchAgentKey) + if err != nil { + return nil, err + } + + var response ConversationResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal conversation response: %w", err) + } + + return &response, nil +} + func (c *Client) execRequest(method, URL string, body io.Reader, apiKey string) ([]byte, error) { req, err := http.NewRequest(method, URL, body) if err != nil { diff --git a/pkg/integrations/cursor/cursor.go b/pkg/integrations/cursor/cursor.go index 611ad102e7..c24e3743b5 100644 --- a/pkg/integrations/cursor/cursor.go +++ b/pkg/integrations/cursor/cursor.go @@ -96,6 +96,7 @@ func (i *Cursor) Components() []core.Component { return []core.Component{ &LaunchAgent{}, &GetDailyUsageData{}, + &GetLastMessage{}, } } diff --git a/pkg/integrations/cursor/cursor_test.go b/pkg/integrations/cursor/cursor_test.go index bcb7a4a5d4..21e038f4eb 100644 --- a/pkg/integrations/cursor/cursor_test.go +++ b/pkg/integrations/cursor/cursor_test.go @@ -177,7 +177,7 @@ func Test__Cursor__Components(t *testing.T) { c := &Cursor{} components := c.Components() - assert.Len(t, components, 2) + assert.Len(t, components, 3) names := make([]string, len(components)) for i, comp := range components { @@ -186,6 +186,7 @@ func Test__Cursor__Components(t *testing.T) { assert.Contains(t, names, "cursor.launchAgent") assert.Contains(t, names, "cursor.getDailyUsageData") + assert.Contains(t, names, "cursor.getLastMessage") } func Test__Cursor__ListResources(t *testing.T) { diff --git a/pkg/integrations/cursor/get_last_message.go b/pkg/integrations/cursor/get_last_message.go new file mode 100644 index 0000000000..fd2c7d1ec3 --- /dev/null +++ b/pkg/integrations/cursor/get_last_message.go @@ -0,0 +1,187 @@ +package cursor + +import ( + "fmt" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + GetLastMessagePayloadType = "cursor.getLastMessage.result" +) + +type GetLastMessage struct{} + +type GetLastMessageSpec struct { + AgentID string `json:"agentId" mapstructure:"agentId"` +} + +type GetLastMessageOutput struct { + AgentID string `json:"agentId"` + Message *ConversationMessage `json:"message"` +} + +func (c *GetLastMessage) Name() string { + return "cursor.getLastMessage" +} + +func (c *GetLastMessage) Label() string { + return "Get Last Message" +} + +func (c *GetLastMessage) Description() string { + return "Retrieves the last message from a Cursor Cloud Agent conversation." +} + +func (c *GetLastMessage) Documentation() string { + return `The Get Last Message component retrieves the last message from a Cursor Cloud Agent's conversation history. + +## Use Cases + +- **Message tracking**: Get the latest response or prompt from an agent conversation +- **Workflow automation**: Use the last message as input for downstream components +- **Status monitoring**: Check what the agent last communicated + +## How It Works + +1. Fetches the conversation history for the specified agent ID +2. Extracts the last message from the conversation +3. Returns the message details including ID, type (user_message or assistant_message), and text + +## Configuration + +- **Agent ID**: The unique identifier for the cloud agent (e.g., bc_abc123) + +## Output + +The output includes: +- **Agent ID**: The identifier of the agent +- **Message**: The last message object containing: + - **ID**: Unique message identifier + - **Type**: Either "user_message" or "assistant_message" + - **Text**: The message content + +## Notes + +- Requires a valid Cursor Cloud Agent API key configured in the integration +- If the agent has been deleted, the conversation cannot be accessed +- Returns nil if the conversation has no messages` +} + +func (c *GetLastMessage) Icon() string { + return "message-square" +} + +func (c *GetLastMessage) Color() string { + return "#3B82F6" +} + +func (c *GetLastMessage) ExampleOutput() map[string]any { + return map[string]any{ + "agentId": "bc_abc123", + "message": map[string]any{ + "id": "msg_005", + "type": "assistant_message", + "text": "I've added a troubleshooting section to the README.", + }, + } +} + +func (c *GetLastMessage) OutputChannels(config any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetLastMessage) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "agentId", + Label: "Agent ID", + Type: configuration.FieldTypeString, + Required: true, + Placeholder: `{{ $["cursor.launchAgent"].data.agentId }}`, + }, + } +} + +func (c *GetLastMessage) Setup(ctx core.SetupContext) error { + spec := GetLastMessageSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if spec.AgentID == "" { + return fmt.Errorf("agent ID is required") + } + + return nil +} + +func (c *GetLastMessage) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetLastMessage) Execute(ctx core.ExecutionContext) error { + spec := GetLastMessageSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if spec.AgentID == "" { + return fmt.Errorf("agent ID is required") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create cursor client: %w", err) + } + + if client.LaunchAgentKey == "" { + return fmt.Errorf("cloud agent API key is not configured in the integration") + } + + ctx.Logger.Infof("Fetching conversation for agent %s", spec.AgentID) + + conversation, err := client.GetAgentConversation(spec.AgentID) + if err != nil { + return fmt.Errorf("failed to fetch conversation: %w", err) + } + + output := GetLastMessageOutput{ + AgentID: spec.AgentID, + Message: nil, + } + + // Extract the last message from the messages array + if conversation.Messages != nil && len(conversation.Messages) > 0 { + lastMessage := conversation.Messages[len(conversation.Messages)-1] + output.Message = &lastMessage + ctx.Logger.Infof("Retrieved last message: %s (type: %s)", lastMessage.ID, lastMessage.Type) + } else { + ctx.Logger.Infof("No messages found in conversation") + } + + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, GetLastMessagePayloadType, []any{output}) +} + +func (c *GetLastMessage) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetLastMessage) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetLastMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *GetLastMessage) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetLastMessage) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/web_src/src/pages/workflowv2/mappers/cursor/get_last_message.ts b/web_src/src/pages/workflowv2/mappers/cursor/get_last_message.ts new file mode 100644 index 0000000000..4c1dbe9079 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/cursor/get_last_message.ts @@ -0,0 +1,114 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import cursorIcon from "@/assets/icons/integrations/cursor.svg"; +import { formatTimeAgo } from "@/utils/date"; + +type GetLastMessagePayload = { + agentId?: string; + message?: { + id?: string; + type?: string; + text?: string; + }; +}; + +function formatMessageType(type: string | undefined): string | undefined { + if (!type) return undefined; + if (type === "user_message") return "User"; + if (type === "assistant_message") return "Assistant"; + return type; +} + +export const getLastMessageMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "cursor"; + + return { + iconSrc: cursorIcon, + iconSlug: context.componentDefinition?.icon ?? "cpu", + collapsedBackground: "bg-white", + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition?.label || + context.componentDefinition?.name || + "Get Last Message", + eventSections: lastExecution + ? getLastMessageEventSections(context.nodes, lastExecution, componentName) + : undefined, + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const payload = outputs?.default?.[0]; + const data = payload?.data as GetLastMessagePayload | undefined; + + if (data?.agentId) { + details["Agent ID"] = data.agentId; + } + + if (data?.message?.id) { + details["Message ID"] = data.message.id; + } + + if (data?.message?.type) { + const messageType = formatMessageType(data.message.type); + if (messageType) { + details["Message Type"] = messageType; + } + } + + if (data?.message?.text) { + const text = data.message.text; + const truncated = text.length > 100 ? text.substring(0, 100) + "..." : text; + details["Message Text"] = truncated; + } + + if (payload?.timestamp) { + details["Fetched At"] = new Date(payload.timestamp).toLocaleString(); + } + + return details; + }, + + subtitle(context: SubtitleContext): string { + const timestamp = context.execution.updatedAt || context.execution.createdAt; + return timestamp ? formatTimeAgo(new Date(timestamp)) : ""; + }, +}; + +function getLastMessageEventSections( + nodes: NodeInfo[], + execution: ExecutionInfo, + componentName: string, +): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + const subtitleTimestamp = execution.updatedAt || execution.createdAt; + const eventSubtitle = subtitleTimestamp ? formatTimeAgo(new Date(subtitleTimestamp)) : ""; + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle, + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/cursor/index.ts b/web_src/src/pages/workflowv2/mappers/cursor/index.ts index d9ea382696..455e80de74 100644 --- a/web_src/src/pages/workflowv2/mappers/cursor/index.ts +++ b/web_src/src/pages/workflowv2/mappers/cursor/index.ts @@ -2,10 +2,12 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { buildActionStateRegistry } from "../utils"; import { launchAgentMapper } from "./launch_agent"; import { getDailyUsageDataMapper } from "./get_daily_usage_data"; +import { getLastMessageMapper } from "./get_last_message"; export const componentMappers: Record = { launchAgent: launchAgentMapper, getDailyUsageData: getDailyUsageDataMapper, + getLastMessage: getLastMessageMapper, }; export const triggerRenderers: Record = {}; @@ -13,4 +15,5 @@ export const triggerRenderers: Record = {}; export const eventStateRegistry: Record = { launchAgent: buildActionStateRegistry("completed"), getDailyUsageData: buildActionStateRegistry("completed"), + getLastMessage: buildActionStateRegistry("completed"), }; From 55c58d9f331d5e1eae6491966c75f17e2568cd2b Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Thu, 12 Feb 2026 12:54:21 -0300 Subject: [PATCH 062/160] feat: aws.cloudwatch.onAlarm trigger (#3023) Bootstrapping the AWS CloudWatch integration, adding a `aws.cloudwatch.onAlarm` trigger to receive events when AWS CloudWatch alarms transitions to a different state. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- docs/components/AWS.mdx | 60 ++++ pkg/integrations/aws/aws.go | 2 + pkg/integrations/aws/cloudwatch/common.go | 24 ++ pkg/integrations/aws/cloudwatch/example.go | 18 + .../aws/cloudwatch/example_data_on_alarm.json | 29 ++ pkg/integrations/aws/cloudwatch/on_alarm.go | 333 ++++++++++++++++++ .../aws/cloudwatch/on_alarm_test.go | 278 +++++++++++++++ .../icons/integrations/aws.cloudwatch.svg | 10 + .../mappers/aws/cloudwatch/on_alarm.ts | 103 ++++++ .../mappers/aws/cloudwatch/types.ts | 19 + .../src/pages/workflowv2/mappers/aws/index.ts | 2 + .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 2 + 13 files changed, 883 insertions(+) create mode 100644 pkg/integrations/aws/cloudwatch/common.go create mode 100644 pkg/integrations/aws/cloudwatch/example.go create mode 100644 pkg/integrations/aws/cloudwatch/example_data_on_alarm.json create mode 100644 pkg/integrations/aws/cloudwatch/on_alarm.go create mode 100644 pkg/integrations/aws/cloudwatch/on_alarm_test.go create mode 100644 web_src/src/assets/icons/integrations/aws.cloudwatch.svg create mode 100644 web_src/src/pages/workflowv2/mappers/aws/cloudwatch/on_alarm.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/cloudwatch/types.ts diff --git a/docs/components/AWS.mdx b/docs/components/AWS.mdx index 9241b9e2e2..2204b13964 100644 --- a/docs/components/AWS.mdx +++ b/docs/components/AWS.mdx @@ -7,6 +7,7 @@ Manage resources and execute AWS commands in workflows ## Triggers + @@ -34,6 +35,65 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; Initially, you can leave the **"IAM Role ARN"** field empty, as you will be guided through the identity provider and IAM role creation process. + + +## CloudWatch • On Alarm + +The On Alarm trigger starts a workflow execution when a CloudWatch alarm transitions to the ALARM state. + +### Use Cases + +- **Incident response**: Notify responders and open incidents when alarms fire +- **Auto-remediation**: Execute rollback or recovery workflows immediately +- **Audit and reporting**: Track alarm transitions over time + +### Configuration + +- **Region**: AWS region where alarms are evaluated +- **Alarms**: Optional alarm name filters (supports equals, not-equals, and regex matches) +- **State**: Only trigger for alarms in the specified state (OK, ALARM, or INSUFFICIENT_DATA) + +### Event Data + +Each alarm event includes: +- **detail.alarmName**: CloudWatch alarm name +- **detail.state.value**: Current alarm state +- **detail.previousState.value**: Previous alarm state + +### Example Data + +```json +{ + "data": { + "account": "123456789012", + "detail": { + "alarmName": "HighCPUUtilization", + "previousState": { + "reason": "Threshold Crossed: 1 datapoint [35.0 (20/11/24 20:29:00)] was not greater than or equal to the threshold (90.0).", + "timestamp": "2024-11-20T20:30:33.000+0000", + "value": "OK" + }, + "state": { + "reason": "Threshold Crossed: 1 datapoint [95.0 (20/11/24 20:34:00)] was greater than or equal to the threshold (90.0).", + "timestamp": "2024-11-20T20:35:33.000+0000", + "value": "ALARM" + } + }, + "detail-type": "CloudWatch Alarm State Change", + "id": "2f1ecf5c-8bc9-4b7d-9e76-8df420e8e1a7", + "region": "us-east-1", + "resources": [ + "arn:aws:cloudwatch:us-east-1:123456789012:alarm:HighCPUUtilization" + ], + "source": "aws.cloudwatch", + "time": "2024-11-20T20:35:33Z", + "version": "0" + }, + "timestamp": "2026-02-10T12:00:00Z", + "type": "aws.cloudwatch.alarm" +} +``` + ## CodeArtifact • On Package Version diff --git a/pkg/integrations/aws/aws.go b/pkg/integrations/aws/aws.go index a90e3ec94c..135915f9a5 100644 --- a/pkg/integrations/aws/aws.go +++ b/pkg/integrations/aws/aws.go @@ -16,6 +16,7 @@ import ( "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" "github.com/superplanehq/superplane/pkg/crypto" + "github.com/superplanehq/superplane/pkg/integrations/aws/cloudwatch" "github.com/superplanehq/superplane/pkg/integrations/aws/codeartifact" "github.com/superplanehq/superplane/pkg/integrations/aws/common" "github.com/superplanehq/superplane/pkg/integrations/aws/ecr" @@ -146,6 +147,7 @@ func (a *AWS) Components() []core.Component { func (a *AWS) Triggers() []core.Trigger { return []core.Trigger{ + &cloudwatch.OnAlarm{}, &codeartifact.OnPackageVersion{}, &ecr.OnImageScan{}, &ecr.OnImagePush{}, diff --git a/pkg/integrations/aws/cloudwatch/common.go b/pkg/integrations/aws/cloudwatch/common.go new file mode 100644 index 0000000000..9c13f54a61 --- /dev/null +++ b/pkg/integrations/aws/cloudwatch/common.go @@ -0,0 +1,24 @@ +package cloudwatch + +import "github.com/superplanehq/superplane/pkg/configuration" + +const ( + AlarmStateOK = "OK" + AlarmStateAlarm = "ALARM" + AlarmStateInsufficientData = "INSUFFICIENT_DATA" +) + +var AllAlarmStates = []configuration.FieldOption{ + { + Label: "OK", + Value: AlarmStateOK, + }, + { + Label: "ALARM", + Value: AlarmStateAlarm, + }, + { + Label: "INSUFFICIENT_DATA", + Value: AlarmStateInsufficientData, + }, +} diff --git a/pkg/integrations/aws/cloudwatch/example.go b/pkg/integrations/aws/cloudwatch/example.go new file mode 100644 index 0000000000..5e5ad28fa0 --- /dev/null +++ b/pkg/integrations/aws/cloudwatch/example.go @@ -0,0 +1,18 @@ +package cloudwatch + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_data_on_alarm.json +var exampleDataOnAlarmBytes []byte + +var exampleDataOnAlarmOnce sync.Once +var exampleDataOnAlarm map[string]any + +func (t *OnAlarm) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnAlarmOnce, exampleDataOnAlarmBytes, &exampleDataOnAlarm) +} diff --git a/pkg/integrations/aws/cloudwatch/example_data_on_alarm.json b/pkg/integrations/aws/cloudwatch/example_data_on_alarm.json new file mode 100644 index 0000000000..39e5afb428 --- /dev/null +++ b/pkg/integrations/aws/cloudwatch/example_data_on_alarm.json @@ -0,0 +1,29 @@ +{ + "data": { + "version": "0", + "id": "2f1ecf5c-8bc9-4b7d-9e76-8df420e8e1a7", + "detail-type": "CloudWatch Alarm State Change", + "source": "aws.cloudwatch", + "account": "123456789012", + "time": "2024-11-20T20:35:33Z", + "region": "us-east-1", + "resources": [ + "arn:aws:cloudwatch:us-east-1:123456789012:alarm:HighCPUUtilization" + ], + "detail": { + "alarmName": "HighCPUUtilization", + "state": { + "value": "ALARM", + "reason": "Threshold Crossed: 1 datapoint [95.0 (20/11/24 20:34:00)] was greater than or equal to the threshold (90.0).", + "timestamp": "2024-11-20T20:35:33.000+0000" + }, + "previousState": { + "value": "OK", + "reason": "Threshold Crossed: 1 datapoint [35.0 (20/11/24 20:29:00)] was not greater than or equal to the threshold (90.0).", + "timestamp": "2024-11-20T20:30:33.000+0000" + } + } + }, + "timestamp": "2026-02-10T12:00:00Z", + "type": "aws.cloudwatch.alarm" +} diff --git a/pkg/integrations/aws/cloudwatch/on_alarm.go b/pkg/integrations/aws/cloudwatch/on_alarm.go new file mode 100644 index 0000000000..ad729063a0 --- /dev/null +++ b/pkg/integrations/aws/cloudwatch/on_alarm.go @@ -0,0 +1,333 @@ +package cloudwatch + +import ( + "fmt" + "net/http" + "slices" + "strings" + "time" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +const ( + Source = "aws.cloudwatch" + DetailTypeAlarmStateChange = "CloudWatch Alarm State Change" +) + +type OnAlarm struct{} + +type OnAlarmConfiguration struct { + Region string `json:"region" mapstructure:"region"` + Alarms []configuration.Predicate `json:"alarms" mapstructure:"alarms"` + State string `json:"state" mapstructure:"state"` +} + +type OnAlarmMetadata struct { + Region string `json:"region" mapstructure:"region"` + SubscriptionID string `json:"subscriptionId" mapstructure:"subscriptionId"` +} + +type AlarmStateChangeDetail struct { + AlarmName string `json:"alarmName" mapstructure:"alarmName"` + State AlarmState `json:"state" mapstructure:"state"` + PreviousState AlarmState `json:"previousState" mapstructure:"previousState"` +} + +type AlarmState struct { + Value string `json:"value" mapstructure:"value"` + Reason string `json:"reason" mapstructure:"reason"` + Timestamp string `json:"timestamp" mapstructure:"timestamp"` +} + +func (p *OnAlarm) Name() string { + return "aws.cloudwatch.onAlarm" +} + +func (p *OnAlarm) Label() string { + return "CloudWatch • On Alarm" +} + +func (p *OnAlarm) Description() string { + return "Listen to AWS CloudWatch alarm state change events" +} + +func (p *OnAlarm) Documentation() string { + return `The On Alarm trigger starts a workflow execution when a CloudWatch alarm transitions to the ALARM state. + +## Use Cases + +- **Incident response**: Notify responders and open incidents when alarms fire +- **Auto-remediation**: Execute rollback or recovery workflows immediately +- **Audit and reporting**: Track alarm transitions over time + +## Configuration + +- **Region**: AWS region where alarms are evaluated +- **Alarms**: Optional alarm name filters (supports equals, not-equals, and regex matches) +- **State**: Only trigger for alarms in the specified state (OK, ALARM, or INSUFFICIENT_DATA) + +## Event Data + +Each alarm event includes: +- **detail.alarmName**: CloudWatch alarm name +- **detail.state.value**: Current alarm state +- **detail.previousState.value**: Previous alarm state +` +} + +func (p *OnAlarm) Icon() string { + return "aws" +} + +func (p *OnAlarm) Color() string { + return "gray" +} + +func (p *OnAlarm) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "region", + Label: "Region", + Type: configuration.FieldTypeSelect, + Required: true, + Default: "us-east-1", + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: common.AllRegions, + }, + }, + }, + { + Name: "state", + Label: "Alarm State", + Type: configuration.FieldTypeSelect, + Required: true, + Default: AlarmStateAlarm, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: AllAlarmStates, + }, + }, + }, + { + Name: "alarms", + Label: "Alarms", + Type: configuration.FieldTypeAnyPredicateList, + Required: false, + TypeOptions: &configuration.TypeOptions{ + AnyPredicateList: &configuration.AnyPredicateListTypeOptions{ + Operators: configuration.AllPredicateOperators, + }, + }, + }, + } +} + +func (p *OnAlarm) Setup(ctx core.TriggerContext) error { + metadata := OnAlarmMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + config := OnAlarmConfiguration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if strings.TrimSpace(config.Region) == "" { + return fmt.Errorf("region is required") + } + + if metadata.SubscriptionID != "" { + return nil + } + + integrationMetadata := common.IntegrationMetadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &integrationMetadata); err != nil { + return fmt.Errorf("failed to decode integration metadata: %w", err) + } + + if integrationMetadata.EventBridge == nil { + return fmt.Errorf("event bridge metadata is not configured") + } + + rule, ok := integrationMetadata.EventBridge.Rules[Source] + if !ok || !slices.Contains(rule.DetailTypes, DetailTypeAlarmStateChange) { + if err := ctx.Metadata.Set(OnAlarmMetadata{Region: config.Region}); err != nil { + return fmt.Errorf("failed to set metadata: %w", err) + } + + return p.provisionRule(ctx.Integration, ctx.Requests, config.Region) + } + + subscriptionID, err := ctx.Integration.Subscribe(p.subscriptionPattern(config.Region)) + if err != nil { + return fmt.Errorf("failed to subscribe: %w", err) + } + + return ctx.Metadata.Set(OnAlarmMetadata{ + Region: config.Region, + SubscriptionID: subscriptionID.String(), + }) +} + +func (p *OnAlarm) provisionRule(integration core.IntegrationContext, requests core.RequestContext, region string) error { + err := integration.ScheduleActionCall( + "provisionRule", + common.ProvisionRuleParameters{ + Region: region, + Source: Source, + DetailType: DetailTypeAlarmStateChange, + }, + time.Second, + ) + if err != nil { + return fmt.Errorf("failed to schedule rule provisioning for integration: %w", err) + } + + return requests.ScheduleActionCall( + "checkRuleAvailability", + map[string]any{}, + 5*time.Second, + ) +} + +func (p *OnAlarm) subscriptionPattern(region string) *common.EventBridgeEvent { + return &common.EventBridgeEvent{ + Region: region, + DetailType: DetailTypeAlarmStateChange, + Source: Source, + } +} + +func (p *OnAlarm) Actions() []core.Action { + return []core.Action{ + { + Name: "checkRuleAvailability", + Description: "Check if the EventBridge rule is available", + }, + } +} + +func (p *OnAlarm) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + switch ctx.Name { + case "checkRuleAvailability": + return p.checkRuleAvailability(ctx) + + default: + return nil, fmt.Errorf("unknown action: %s", ctx.Name) + } +} + +func (p *OnAlarm) checkRuleAvailability(ctx core.TriggerActionContext) (map[string]any, error) { + metadata := OnAlarmMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return nil, fmt.Errorf("failed to decode metadata: %w", err) + } + + integrationMetadata := common.IntegrationMetadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &integrationMetadata); err != nil { + return nil, fmt.Errorf("failed to decode integration metadata: %w", err) + } + + if integrationMetadata.EventBridge == nil { + ctx.Logger.Infof("EventBridge metadata not found - checking again in 10 seconds") + return nil, ctx.Requests.ScheduleActionCall( + "checkRuleAvailability", + map[string]any{}, + 10*time.Second, + ) + } + + rule, ok := integrationMetadata.EventBridge.Rules[Source] + if !ok { + ctx.Logger.Infof("Rule not found for source %s - checking again in 10 seconds", Source) + return nil, ctx.Requests.ScheduleActionCall( + "checkRuleAvailability", + map[string]any{}, + 10*time.Second, + ) + } + + if !slices.Contains(rule.DetailTypes, DetailTypeAlarmStateChange) { + ctx.Logger.Infof("Rule does not have detail type '%s' - checking again in 10 seconds", DetailTypeAlarmStateChange) + return nil, ctx.Requests.ScheduleActionCall( + "checkRuleAvailability", + map[string]any{}, + 10*time.Second, + ) + } + + subscriptionID, err := ctx.Integration.Subscribe(p.subscriptionPattern(metadata.Region)) + if err != nil { + return nil, fmt.Errorf("failed to subscribe: %w", err) + } + + metadata.SubscriptionID = subscriptionID.String() + return nil, ctx.Metadata.Set(metadata) +} + +func (p *OnAlarm) OnIntegrationMessage(ctx core.IntegrationMessageContext) error { + metadata := OnAlarmMetadata{} + if err := mapstructure.Decode(ctx.NodeMetadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + config := OnAlarmConfiguration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + event := common.EventBridgeEvent{} + if err := mapstructure.Decode(ctx.Message, &event); err != nil { + return fmt.Errorf("failed to decode message: %w", err) + } + + if metadata.Region != "" && event.Region != metadata.Region { + ctx.Logger.Infof("Skipping event for region %s, expected %s", event.Region, metadata.Region) + return nil + } + + detail := AlarmStateChangeDetail{} + if err := mapstructure.Decode(event.Detail, &detail); err != nil { + return fmt.Errorf("failed to decode event detail: %w", err) + } + + alarmName := strings.TrimSpace(detail.AlarmName) + if alarmName == "" { + return fmt.Errorf("missing alarm name in event") + } + + state := strings.TrimSpace(detail.State.Value) + if state == "" { + return fmt.Errorf("missing alarm state in event") + } + + if state != config.State { + ctx.Logger.Infof("Skipping event for alarm %s with state %s", alarmName, state) + return nil + } + + if len(config.Alarms) > 0 { + if !configuration.MatchesAnyPredicate(config.Alarms, alarmName) { + ctx.Logger.Infof("Skipping event for alarm %s, does not match any predicate: %v", alarmName, config.Alarms) + return nil + } + } + + return ctx.Events.Emit("aws.cloudwatch.alarm", ctx.Message) +} + +func (p *OnAlarm) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + // no-op, since events are received through the integration + // and routed to OnIntegrationMessage() + return http.StatusOK, nil +} + +func (p *OnAlarm) Cleanup(ctx core.TriggerContext) error { + return nil +} diff --git a/pkg/integrations/aws/cloudwatch/on_alarm_test.go b/pkg/integrations/aws/cloudwatch/on_alarm_test.go new file mode 100644 index 0000000000..e719bc0441 --- /dev/null +++ b/pkg/integrations/aws/cloudwatch/on_alarm_test.go @@ -0,0 +1,278 @@ +package cloudwatch + +import ( + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnAlarm__Setup(t *testing.T) { + trigger := &OnAlarm{} + + t.Run("rule missing -> schedules provisioning and check", func(t *testing.T) { + metadata := &contexts.MetadataContext{} + requests := &contexts.RequestContext{} + integrationCtx := &contexts.IntegrationContext{ + Metadata: common.IntegrationMetadata{ + EventBridge: &common.EventBridgeMetadata{ + Rules: map[string]common.EventBridgeRuleMetadata{}, + }, + }, + } + + err := trigger.Setup(core.TriggerContext{ + Logger: logrus.NewEntry(logrus.New()), + Integration: integrationCtx, + Metadata: metadata, + Requests: requests, + Configuration: OnAlarmConfiguration{Region: "us-east-1"}, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.ActionRequests, 1) + assert.Equal(t, "provisionRule", integrationCtx.ActionRequests[0].ActionName) + + params := integrationCtx.ActionRequests[0].Parameters.(common.ProvisionRuleParameters) + assert.Equal(t, "us-east-1", params.Region) + assert.Equal(t, Source, params.Source) + assert.Equal(t, DetailTypeAlarmStateChange, params.DetailType) + + assert.Equal(t, "checkRuleAvailability", requests.Action) + assert.Equal(t, 5*time.Second, requests.Duration) + + stored, ok := metadata.Get().(OnAlarmMetadata) + require.True(t, ok) + assert.Equal(t, "us-east-1", stored.Region) + assert.Empty(t, stored.SubscriptionID) + }) + + t.Run("rule available -> subscribes", func(t *testing.T) { + metadata := &contexts.MetadataContext{} + integrationCtx := &contexts.IntegrationContext{ + Metadata: common.IntegrationMetadata{ + EventBridge: &common.EventBridgeMetadata{ + Rules: map[string]common.EventBridgeRuleMetadata{ + Source: { + Source: Source, + DetailTypes: []string{DetailTypeAlarmStateChange}, + }, + }, + }, + }, + } + + err := trigger.Setup(core.TriggerContext{ + Logger: logrus.NewEntry(logrus.New()), + Integration: integrationCtx, + Metadata: metadata, + Configuration: OnAlarmConfiguration{Region: "us-east-1"}, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.Subscriptions, 1) + + stored, ok := metadata.Get().(OnAlarmMetadata) + require.True(t, ok) + assert.Equal(t, "us-east-1", stored.Region) + assert.NotEmpty(t, stored.SubscriptionID) + }) +} + +func Test__OnAlarm__HandleAction(t *testing.T) { + trigger := &OnAlarm{} + + t.Run("rule missing -> reschedules check", func(t *testing.T) { + requests := &contexts.RequestContext{} + _, err := trigger.HandleAction(core.TriggerActionContext{ + Name: "checkRuleAvailability", + Logger: logrus.NewEntry(logrus.New()), + Requests: requests, + Metadata: &contexts.MetadataContext{ + Metadata: OnAlarmMetadata{Region: "us-east-1"}, + }, + Integration: &contexts.IntegrationContext{ + Metadata: common.IntegrationMetadata{ + EventBridge: &common.EventBridgeMetadata{ + Rules: map[string]common.EventBridgeRuleMetadata{}, + }, + }, + }, + }) + + require.NoError(t, err) + assert.Equal(t, "checkRuleAvailability", requests.Action) + assert.Equal(t, 10*time.Second, requests.Duration) + }) + + t.Run("rule available -> subscribes", func(t *testing.T) { + requests := &contexts.RequestContext{} + metadata := &contexts.MetadataContext{ + Metadata: OnAlarmMetadata{Region: "us-east-1"}, + } + integrationCtx := &contexts.IntegrationContext{ + Metadata: common.IntegrationMetadata{ + EventBridge: &common.EventBridgeMetadata{ + Rules: map[string]common.EventBridgeRuleMetadata{ + Source: { + Source: Source, + DetailTypes: []string{DetailTypeAlarmStateChange}, + }, + }, + }, + }, + } + + _, err := trigger.HandleAction(core.TriggerActionContext{ + Name: "checkRuleAvailability", + Logger: logrus.NewEntry(logrus.New()), + Requests: requests, + Metadata: metadata, + Integration: integrationCtx, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.Subscriptions, 1) + + stored, ok := metadata.Get().(OnAlarmMetadata) + require.True(t, ok) + assert.NotEmpty(t, stored.SubscriptionID) + }) +} + +func Test__OnAlarm__OnIntegrationMessage(t *testing.T) { + trigger := &OnAlarm{} + + t.Run("region mismatch -> no event", func(t *testing.T) { + eventContext := &contexts.EventContext{} + err := trigger.OnIntegrationMessage(core.IntegrationMessageContext{ + Logger: logrus.NewEntry(logrus.New()), + Events: eventContext, + NodeMetadata: &contexts.MetadataContext{ + Metadata: OnAlarmMetadata{Region: "us-east-1"}, + }, + Configuration: OnAlarmConfiguration{ + Alarms: []configuration.Predicate{ + { + Type: configuration.PredicateTypeMatches, + Value: ".*", + }, + }, + }, + Message: common.EventBridgeEvent{ + Region: "us-west-2", + Detail: map[string]any{ + "alarmName": "HighCPUUtilization", + "state": map[string]any{ + "value": "ALARM", + }, + }, + }, + }) + + require.NoError(t, err) + assert.Equal(t, 0, eventContext.Count()) + }) + + t.Run("state mismatch -> no event", func(t *testing.T) { + eventContext := &contexts.EventContext{} + err := trigger.OnIntegrationMessage(core.IntegrationMessageContext{ + Logger: logrus.NewEntry(logrus.New()), + Events: eventContext, + NodeMetadata: &contexts.MetadataContext{ + Metadata: OnAlarmMetadata{Region: "us-east-1"}, + }, + Configuration: OnAlarmConfiguration{ + State: AlarmStateAlarm, + Alarms: []configuration.Predicate{ + { + Type: configuration.PredicateTypeMatches, + Value: ".*", + }, + }, + }, + Message: common.EventBridgeEvent{ + Region: "us-east-1", + Detail: map[string]any{ + "alarmName": "HighCPUUtilization", + "state": map[string]any{ + "value": "OK", + }, + }, + }, + }) + + require.NoError(t, err) + assert.Equal(t, 0, eventContext.Count()) + }) + + t.Run("alarm does not match predicates -> no event", func(t *testing.T) { + eventContext := &contexts.EventContext{} + err := trigger.OnIntegrationMessage(core.IntegrationMessageContext{ + Logger: logrus.NewEntry(logrus.New()), + Events: eventContext, + NodeMetadata: &contexts.MetadataContext{ + Metadata: OnAlarmMetadata{Region: "us-east-1"}, + }, + Configuration: OnAlarmConfiguration{ + Alarms: []configuration.Predicate{ + { + Type: configuration.PredicateTypeEquals, + Value: "APIErrorRateHigh", + }, + }, + }, + Message: common.EventBridgeEvent{ + Region: "us-east-1", + Detail: map[string]any{ + "alarmName": "HighCPUUtilization", + "state": map[string]any{ + "value": "ALARM", + }, + }, + }, + }) + + require.NoError(t, err) + assert.Equal(t, 0, eventContext.Count()) + }) + + t.Run("matching alarm -> emits event", func(t *testing.T) { + eventContext := &contexts.EventContext{} + err := trigger.OnIntegrationMessage(core.IntegrationMessageContext{ + Logger: logrus.NewEntry(logrus.New()), + Events: eventContext, + NodeMetadata: &contexts.MetadataContext{ + Metadata: OnAlarmMetadata{Region: "us-east-1"}, + }, + Configuration: OnAlarmConfiguration{ + State: AlarmStateAlarm, + Alarms: []configuration.Predicate{ + { + Type: configuration.PredicateTypeEquals, + Value: "HighCPUUtilization", + }, + }, + }, + Message: common.EventBridgeEvent{ + Region: "us-east-1", + Detail: map[string]any{ + "alarmName": "HighCPUUtilization", + "state": map[string]any{ + "value": "ALARM", + }, + }, + }, + }) + + require.NoError(t, err) + assert.Equal(t, 1, eventContext.Count()) + assert.Equal(t, "aws.cloudwatch.alarm", eventContext.Payloads[0].Type) + }) +} diff --git a/web_src/src/assets/icons/integrations/aws.cloudwatch.svg b/web_src/src/assets/icons/integrations/aws.cloudwatch.svg new file mode 100644 index 0000000000..97a7e5fe9e --- /dev/null +++ b/web_src/src/assets/icons/integrations/aws.cloudwatch.svg @@ -0,0 +1,10 @@ + + + Icon-Architecture/64/Arch_Amazon-CloudWatch_64 + + + + + + + \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/aws/cloudwatch/on_alarm.ts b/web_src/src/pages/workflowv2/mappers/aws/cloudwatch/on_alarm.ts new file mode 100644 index 0000000000..78de66147c --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/cloudwatch/on_alarm.ts @@ -0,0 +1,103 @@ +import { TriggerProps } from "@/ui/trigger"; +import { MetadataItem } from "@/ui/metadataList"; +import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; +import { formatTimeAgo } from "@/utils/date"; +import { getBackgroundColorClass } from "@/utils/colors"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../../types"; +import { Predicate, formatPredicate, stringOrDash } from "../../utils"; +import { CloudWatchAlarmEvent } from "./types"; + +interface Configuration { + region?: string; + state?: string; + alarms?: Predicate[]; +} + +function buildMetadataItems(configuration?: Configuration): MetadataItem[] { + const items: MetadataItem[] = []; + const region = configuration?.region; + if (region) { + items.push({ + icon: "globe", + label: region, + }); + } + + if (configuration?.state) { + items.push({ + icon: "bell", + label: configuration.state, + }); + } + + if (configuration?.alarms && configuration.alarms?.length > 0) { + items.push({ + icon: "funnel", + label: configuration.alarms?.map(formatPredicate).join(", "), + }); + } + + return items; +} + +/** + * Renderer for the "aws.cloudwatch.onAlarm" trigger + */ +export const onAlarmTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as CloudWatchAlarmEvent; + const detail = eventData?.detail; + const alarmName = detail?.alarmName; + const state = detail?.state?.value; + const previousState = detail?.previousState?.value; + + let title = "CloudWatch alarm"; + if (alarmName && state && previousState) { + title = `${alarmName} - ${previousState} → ${state}`; + } else if (alarmName) { + title = alarmName; + } + + const subtitle = context.event?.createdAt ? formatTimeAgo(new Date(context.event?.createdAt || "")) : ""; + return { title, subtitle }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as CloudWatchAlarmEvent; + const detail = eventData?.detail; + + return { + Alarm: stringOrDash(detail?.alarmName), + State: stringOrDash(detail?.state?.value), + "Previous State": stringOrDash(detail?.previousState?.value), + Region: stringOrDash(eventData?.region), + Account: stringOrDash(eventData?.account), + }; + }, + + getTriggerProps: (context: TriggerRendererContext) => { + const { node, definition, lastEvent } = context; + const configuration = node.configuration as Configuration | undefined; + const metadataItems = buildMetadataItems(configuration); + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: awsCloudwatchIcon, + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const { title, subtitle } = onAlarmTriggerRenderer.getTitleAndSubtitle({ event: lastEvent }); + props.lastEventData = { + title, + subtitle, + receivedAt: new Date(lastEvent.createdAt), + state: "triggered", + eventId: lastEvent.id, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/aws/cloudwatch/types.ts b/web_src/src/pages/workflowv2/mappers/aws/cloudwatch/types.ts new file mode 100644 index 0000000000..760d666f2a --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/cloudwatch/types.ts @@ -0,0 +1,19 @@ +export interface CloudWatchAlarmState { + value?: string; + reason?: string; + timestamp?: string; +} + +export interface CloudWatchAlarmDetail { + alarmName?: string; + state?: CloudWatchAlarmState; + previousState?: CloudWatchAlarmState; +} + +export interface CloudWatchAlarmEvent { + account?: string; + region?: string; + time?: string; + "detail-type"?: string; + detail?: CloudWatchAlarmDetail; +} diff --git a/web_src/src/pages/workflowv2/mappers/aws/index.ts b/web_src/src/pages/workflowv2/mappers/aws/index.ts index c773e40462..e431554a64 100644 --- a/web_src/src/pages/workflowv2/mappers/aws/index.ts +++ b/web_src/src/pages/workflowv2/mappers/aws/index.ts @@ -14,6 +14,7 @@ import { deletePackageVersionsMapper } from "./codeartifact/delete_package_versi import { deleteRepositoryMapper } from "./codeartifact/delete_repository"; import { disposePackageVersionsMapper } from "./codeartifact/dispose_package_versions"; import { updatePackageVersionsStatusMapper } from "./codeartifact/update_package_versions_status"; +import { onAlarmTriggerRenderer } from "./cloudwatch/on_alarm"; export const componentMappers: Record = { "lambda.runFunction": runFunctionMapper, @@ -30,6 +31,7 @@ export const componentMappers: Record = { }; export const triggerRenderers: Record = { + "cloudwatch.onAlarm": onAlarmTriggerRenderer, "codeArtifact.onPackageVersion": onPackageVersionTriggerRenderer, "ecr.onImagePush": onImagePushTriggerRenderer, "ecr.onImageScan": onImageScanTriggerRenderer, diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index 408b5b030d..c2c4344dae 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -31,6 +31,7 @@ import awsIcon from "@/assets/icons/integrations/aws.svg"; import awsLambdaIcon from "@/assets/icons/integrations/aws.lambda.svg"; import awsEcrIcon from "@/assets/icons/integrations/aws.ecr.svg"; import awsCodeArtifactIcon from "@/assets/icons/integrations/aws.codeartifact.svg"; +import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; @@ -417,6 +418,7 @@ function CategorySection({ dockerhub: dockerIcon, aws: { codeArtifact: awsIcon, + cloudwatch: awsCloudwatchIcon, lambda: awsLambdaIcon, ecr: awsEcrIcon, }, @@ -492,6 +494,7 @@ function CategorySection({ dockerhub: dockerIcon, aws: { codeArtifact: awsCodeArtifactIcon, + cloudwatch: awsCloudwatchIcon, ecr: awsEcrIcon, lambda: awsLambdaIcon, }, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 8b1e1f30bc..48d2f7c91d 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -3,6 +3,7 @@ import React from "react"; import awsIcon from "@/assets/icons/integrations/aws.svg"; import awsLambdaIcon from "@/assets/icons/integrations/aws.lambda.svg"; import circleciIcon from "@/assets/icons/integrations/circleci.svg"; +import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; import cloudflareIcon from "@/assets/icons/integrations/cloudflare.svg"; import dash0Icon from "@/assets/icons/integrations/dash0.svg"; import datadogIcon from "@/assets/icons/integrations/datadog.svg"; @@ -72,6 +73,7 @@ export const APP_LOGO_MAP: Record> = { render: renderIcon, dockerhub: dockerIcon, aws: { + cloudwatch: awsCloudwatchIcon, lambda: awsLambdaIcon, }, }; From 8d65b7b5cc50c5cf3d368b19a1ae40f83f61e25a Mon Sep 17 00:00:00 2001 From: Manideep Chopperla <130681531+Manideepchopperla@users.noreply.github.com> Date: Thu, 12 Feb 2026 22:17:24 +0530 Subject: [PATCH 063/160] feat: Add rootly.updateIncident action (#2978) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Closes #2541 Adds `rootly.updateIncident` component that updates existing incidents in Rootly via their REST API (`PUT /v1/incidents/{id}`). ### Backend - **UpdateIncident component** with 8 config fields: `incidentId` (required), `title`, `summary`, `status` (select), `severity` (integration resource), `services` (multi), `teams` (multi), `labels` (key-value list) - **UpdateIncident client method** with JSON:API request/response handling - **ListTeams client method** for team resource picker - **`incidentFromData()` helper** to eliminate field mapping duplication across constructors - Extended `Incident` struct with `sequential_id`, `slug`, `updated_at` fields - Added `team` resource type to `list_resources.go` - **9 tests** (7 Setup + 2 Execute) using `test/support/contexts` ### Frontend - `update_incident.ts` mapper with metadata display (incident ID + fields being updated) - Registered in `componentMappers` and `eventStateRegistry` - Extended `Incident` type with new fields - Added `Updated At` to `getDetailsForIncident` helper ### Docs - Auto-generated `docs/components/Rootly.mdx` ## Demo https://github.com/user-attachments/assets/7c691754-1ba3-4b9d-b0a1-6d2ef1bda4af --------- Signed-off-by: Manideep Co-authored-by: Pedro Leão <60622592+forestileao@users.noreply.github.com> Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Rootly.mdx | 75 ++++- pkg/integrations/rootly/client.go | 261 ++++++++++++--- pkg/integrations/rootly/example.go | 10 + .../example_output_create_incident.json | 16 +- .../example_output_update_incident.json | 17 + pkg/integrations/rootly/list_resources.go | 124 +++++-- pkg/integrations/rootly/rootly.go | 1 + pkg/integrations/rootly/update_incident.go | 315 ++++++++++++++++++ .../rootly/update_incident_test.go | 258 ++++++++++++++ .../pages/workflowv2/mappers/rootly/base.ts | 26 +- .../workflowv2/mappers/rootly/create_event.ts | 23 +- .../mappers/rootly/create_incident.ts | 23 +- .../pages/workflowv2/mappers/rootly/index.ts | 3 + .../workflowv2/mappers/rootly/on_incident.ts | 4 +- .../pages/workflowv2/mappers/rootly/types.ts | 12 +- .../mappers/rootly/update_incident.ts | 93 ++++++ 16 files changed, 1108 insertions(+), 153 deletions(-) create mode 100644 pkg/integrations/rootly/example_output_update_incident.json create mode 100644 pkg/integrations/rootly/update_incident.go create mode 100644 pkg/integrations/rootly/update_incident_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/rootly/update_incident.ts diff --git a/docs/components/Rootly.mdx b/docs/components/Rootly.mdx index 29c1d4ee7b..e107de59a2 100644 --- a/docs/components/Rootly.mdx +++ b/docs/components/Rootly.mdx @@ -17,6 +17,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -146,18 +147,74 @@ Returns the created incident object including: ```json { "data": { - "incident": { - "id": "abc123-def456", - "severity": "sev1", - "started_at": "2026-01-19T12:00:00Z", - "status": "started", - "summary": "Users are experiencing slow database queries and connection timeouts.", - "title": "Database connection issues", - "url": "https://app.rootly.com/incidents/abc123-def456" - } + "id": "abc123-def456", + "severity": "sev1", + "started_at": "2026-01-19T12:00:00Z", + "status": "started", + "summary": "Users are experiencing slow database queries and connection timeouts.", + "title": "Database connection issues", + "url": "https://app.rootly.com/incidents/abc123-def456" }, "timestamp": "2026-01-19T12:00:00Z", "type": "rootly.incident" } ``` + + +## Update Incident + +The Update Incident component updates an existing incident in Rootly. + +### Use Cases + +- **Status updates**: Update incident status when new information arrives +- **Severity changes**: Adjust severity based on impact assessment +- **Service association**: Attach affected services to an incident +- **Team assignment**: Assign teams to respond to an incident +- **Metadata updates**: Add labels to categorize incidents + +### Configuration + +- **Incident ID**: The UUID of the incident to update (required, supports expressions) +- **Title**: Update the incident title (optional, supports expressions) +- **Summary**: Update the incident summary (optional, supports expressions) +- **Status**: Update the incident status (optional) +- **Sub-Status**: Update the incident sub-status (optional, required by some Rootly accounts when changing status) +- **Severity**: Update the incident severity level (optional) +- **Services**: Services to attach to the incident (optional) +- **Teams**: Teams to attach to the incident (optional) +- **Labels**: Key-value labels for the incident (optional) + +### Output + +Returns the updated incident object including: +- **id**: Incident UUID +- **sequential_id**: Sequential incident number +- **title**: Incident title +- **slug**: URL-friendly slug +- **status**: Current incident status +- **updated_at**: Last update timestamp + +### Example Output + +```json +{ + "data": { + "id": "abc123-def456", + "mitigated_at": "2026-01-19T13:30:00Z", + "sequential_id": 42, + "severity": "sev1", + "slug": "database-connection-issues", + "started_at": "2026-01-19T12:00:00Z", + "status": "mitigated", + "summary": "Root cause identified. Connection pool exhausted.", + "title": "Database connection issues - Updated", + "updated_at": "2026-01-19T13:30:00Z", + "url": "https://app.rootly.com/incidents/abc123-def456" + }, + "timestamp": "2026-01-19T13:30:00Z", + "type": "rootly.incident" +} +``` + diff --git a/pkg/integrations/rootly/client.go b/pkg/integrations/rootly/client.go index 04670a525b..ad5688af2c 100644 --- a/pkg/integrations/rootly/client.go +++ b/pkg/integrations/rootly/client.go @@ -191,15 +191,18 @@ func (c *Client) ListSeverities() ([]Severity, error) { // Incident represents a Rootly incident type Incident struct { - ID string `json:"id"` - Title string `json:"title"` - Summary string `json:"summary"` - Status string `json:"status"` - Severity string `json:"severity"` - StartedAt string `json:"started_at"` - ResolvedAt string `json:"resolved_at"` - MitigatedAt string `json:"mitigated_at"` - URL string `json:"url"` + ID string `json:"id"` + SequentialID int `json:"sequential_id"` + Title string `json:"title"` + Slug string `json:"slug"` + Summary string `json:"summary"` + Status string `json:"status"` + Severity string `json:"severity"` + StartedAt string `json:"started_at"` + ResolvedAt string `json:"resolved_at"` + MitigatedAt string `json:"mitigated_at"` + UpdatedAt string `json:"updated_at"` + URL string `json:"url"` } type IncidentData struct { @@ -209,14 +212,17 @@ type IncidentData struct { } type IncidentAttributes struct { - Title string `json:"title"` - Summary string `json:"summary"` - Status string `json:"status"` - Severity string `json:"severity"` - StartedAt string `json:"started_at"` - ResolvedAt string `json:"resolved_at"` - MitigatedAt string `json:"mitigated_at"` - URL string `json:"url"` + Title string `json:"title"` + SequentialID int `json:"sequential_id"` + Slug string `json:"slug"` + Summary string `json:"summary"` + Status string `json:"status"` + Severity any `json:"severity"` + StartedAt string `json:"started_at"` + ResolvedAt string `json:"resolved_at"` + MitigatedAt string `json:"mitigated_at"` + UpdatedAt string `json:"updated_at"` + URL string `json:"url"` } type IncidentResponse struct { @@ -253,6 +259,42 @@ type IncidentEventResponse struct { Data IncidentEventData `json:"data"` } +// severityString extracts the severity slug from the API response. +// Rootly returns severity as a string (slug) or an object with slug/name fields. +func severityString(v any) string { + switch s := v.(type) { + case string: + return s + case map[string]any: + if slug, ok := s["slug"].(string); ok { + return slug + } + if name, ok := s["name"].(string); ok { + return name + } + } + + return "" +} + +// incidentFromData converts a JSON:API IncidentData to a flat Incident struct. +func incidentFromData(data IncidentData) *Incident { + return &Incident{ + ID: data.ID, + SequentialID: data.Attributes.SequentialID, + Title: data.Attributes.Title, + Slug: data.Attributes.Slug, + Summary: data.Attributes.Summary, + Status: data.Attributes.Status, + Severity: severityString(data.Attributes.Severity), + StartedAt: data.Attributes.StartedAt, + ResolvedAt: data.Attributes.ResolvedAt, + MitigatedAt: data.Attributes.MitigatedAt, + UpdatedAt: data.Attributes.UpdatedAt, + URL: data.Attributes.URL, + } +} + // CreateIncidentRequest represents the request to create an incident type CreateIncidentRequest struct { Data CreateIncidentData `json:"data"` @@ -298,17 +340,7 @@ func (c *Client) CreateIncident(title, summary, severity string) (*Incident, err return nil, fmt.Errorf("error parsing response: %v", err) } - return &Incident{ - ID: response.Data.ID, - Title: response.Data.Attributes.Title, - Summary: response.Data.Attributes.Summary, - Status: response.Data.Attributes.Status, - Severity: response.Data.Attributes.Severity, - StartedAt: response.Data.Attributes.StartedAt, - ResolvedAt: response.Data.Attributes.ResolvedAt, - MitigatedAt: response.Data.Attributes.MitigatedAt, - URL: response.Data.Attributes.URL, - }, nil + return incidentFromData(response.Data), nil } // CreateIncidentEventRequest represents the request to create an incident event. @@ -376,17 +408,7 @@ func (c *Client) GetIncident(id string) (*Incident, error) { return nil, fmt.Errorf("error parsing response: %v", err) } - return &Incident{ - ID: response.Data.ID, - Title: response.Data.Attributes.Title, - Summary: response.Data.Attributes.Summary, - Status: response.Data.Attributes.Status, - Severity: response.Data.Attributes.Severity, - StartedAt: response.Data.Attributes.StartedAt, - ResolvedAt: response.Data.Attributes.ResolvedAt, - MitigatedAt: response.Data.Attributes.MitigatedAt, - URL: response.Data.Attributes.URL, - }, nil + return incidentFromData(response.Data), nil } func (c *Client) ListIncidents() ([]Incident, error) { @@ -404,20 +426,161 @@ func (c *Client) ListIncidents() ([]Incident, error) { incidents := make([]Incident, 0, len(response.Data)) for _, data := range response.Data { - incidents = append(incidents, Incident{ + incidents = append(incidents, *incidentFromData(data)) + } + + return incidents, nil +} + +// Team represents a Rootly team (group) +type Team struct { + ID string `json:"id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` +} + +type TeamData struct { + ID string `json:"id"` + Type string `json:"type"` + Attributes TeamAttributes `json:"attributes"` +} + +type TeamAttributes struct { + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` +} + +type TeamsResponse struct { + Data []TeamData `json:"data"` +} + +func (c *Client) ListTeams() ([]Team, error) { + url := fmt.Sprintf("%s/teams", c.BaseURL) + responseBody, err := c.execRequest(http.MethodGet, url, nil) + if err != nil { + return nil, err + } + + var response TeamsResponse + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + teams := make([]Team, 0, len(response.Data)) + for _, data := range response.Data { + teams = append(teams, Team{ ID: data.ID, - Title: data.Attributes.Title, - Summary: data.Attributes.Summary, - Status: data.Attributes.Status, - Severity: data.Attributes.Severity, - StartedAt: data.Attributes.StartedAt, - ResolvedAt: data.Attributes.ResolvedAt, - MitigatedAt: data.Attributes.MitigatedAt, - URL: data.Attributes.URL, + Name: data.Attributes.Name, + Slug: data.Attributes.Slug, + Description: data.Attributes.Description, }) } - return incidents, nil + return teams, nil +} + +// SubStatus represents a Rootly sub-status (custom status) +type SubStatus struct { + ID string `json:"id"` + Name string `json:"name"` + Slug string `json:"slug"` + ParentStatus string `json:"parent_status"` +} + +type SubStatusData struct { + ID string `json:"id"` + Type string `json:"type"` + Attributes SubStatusAttributes `json:"attributes"` +} + +type SubStatusAttributes struct { + Name string `json:"name"` + Slug string `json:"slug"` + ParentStatus string `json:"parent_status"` +} + +type SubStatusesResponse struct { + Data []SubStatusData `json:"data"` +} + +func (c *Client) ListSubStatuses() ([]SubStatus, error) { + url := fmt.Sprintf("%s/sub_statuses", c.BaseURL) + responseBody, err := c.execRequest(http.MethodGet, url, nil) + if err != nil { + return nil, err + } + + var response SubStatusesResponse + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + subStatuses := make([]SubStatus, 0, len(response.Data)) + for _, data := range response.Data { + subStatuses = append(subStatuses, SubStatus{ + ID: data.ID, + Name: data.Attributes.Name, + Slug: data.Attributes.Slug, + ParentStatus: data.Attributes.ParentStatus, + }) + } + + return subStatuses, nil +} + +// UpdateIncidentRequest represents the JSON:API request to update an incident +type UpdateIncidentRequest struct { + Data UpdateIncidentData `json:"data"` +} + +type UpdateIncidentData struct { + ID string `json:"id"` + Type string `json:"type"` + Attributes UpdateIncidentAttributes `json:"attributes"` +} + +type UpdateIncidentAttributes struct { + Title string `json:"title,omitempty"` + Summary string `json:"summary,omitempty"` + Status string `json:"status,omitempty"` + SubStatusID string `json:"sub_status_id,omitempty"` + SeverityID string `json:"severity_id,omitempty"` + ServiceIDs []string `json:"service_ids,omitempty"` + GroupIDs []string `json:"group_ids,omitempty"` + Labels map[string]string `json:"labels,omitempty"` +} + +func (c *Client) UpdateIncident(id string, attrs UpdateIncidentAttributes) (*Incident, error) { + request := UpdateIncidentRequest{ + Data: UpdateIncidentData{ + ID: id, + Type: "incidents", + Attributes: attrs, + }, + } + + body, err := json.Marshal(request) + if err != nil { + return nil, fmt.Errorf("error marshaling request: %v", err) + } + + url := fmt.Sprintf("%s/incidents/%s", c.BaseURL, id) + responseBody, err := c.execRequest(http.MethodPatch, url, bytes.NewReader(body)) + if err != nil { + return nil, err + } + + var response IncidentResponse + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + return incidentFromData(response.Data), nil } // WebhookEndpoint represents a Rootly webhook endpoint diff --git a/pkg/integrations/rootly/example.go b/pkg/integrations/rootly/example.go index 21d8e4741b..89e88c19bf 100644 --- a/pkg/integrations/rootly/example.go +++ b/pkg/integrations/rootly/example.go @@ -19,6 +19,12 @@ var exampleOutputCreateEventBytes []byte var exampleOutputCreateEventOnce sync.Once var exampleOutputCreateEvent map[string]any +//go:embed example_output_update_incident.json +var exampleOutputUpdateIncidentBytes []byte + +var exampleOutputUpdateIncidentOnce sync.Once +var exampleOutputUpdateIncident map[string]any + //go:embed example_data_on_incident.json var exampleDataOnIncidentBytes []byte @@ -33,6 +39,10 @@ func (c *CreateEvent) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateEventOnce, exampleOutputCreateEventBytes, &exampleOutputCreateEvent) } +func (c *UpdateIncident) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputUpdateIncidentOnce, exampleOutputUpdateIncidentBytes, &exampleOutputUpdateIncident) +} + func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/rootly/example_output_create_incident.json b/pkg/integrations/rootly/example_output_create_incident.json index e4308baba6..519af294a4 100644 --- a/pkg/integrations/rootly/example_output_create_incident.json +++ b/pkg/integrations/rootly/example_output_create_incident.json @@ -1,15 +1,13 @@ { "type": "rootly.incident", "data": { - "incident": { - "id": "abc123-def456", - "title": "Database connection issues", - "summary": "Users are experiencing slow database queries and connection timeouts.", - "status": "started", - "severity": "sev1", - "started_at": "2026-01-19T12:00:00Z", - "url": "https://app.rootly.com/incidents/abc123-def456" - } + "id": "abc123-def456", + "title": "Database connection issues", + "summary": "Users are experiencing slow database queries and connection timeouts.", + "status": "started", + "severity": "sev1", + "started_at": "2026-01-19T12:00:00Z", + "url": "https://app.rootly.com/incidents/abc123-def456" }, "timestamp": "2026-01-19T12:00:00Z" } diff --git a/pkg/integrations/rootly/example_output_update_incident.json b/pkg/integrations/rootly/example_output_update_incident.json new file mode 100644 index 0000000000..ff0a8f2a93 --- /dev/null +++ b/pkg/integrations/rootly/example_output_update_incident.json @@ -0,0 +1,17 @@ +{ + "type": "rootly.incident", + "data": { + "id": "abc123-def456", + "sequential_id": 42, + "title": "Database connection issues - Updated", + "slug": "database-connection-issues", + "summary": "Root cause identified. Connection pool exhausted.", + "status": "mitigated", + "severity": "sev1", + "started_at": "2026-01-19T12:00:00Z", + "mitigated_at": "2026-01-19T13:30:00Z", + "updated_at": "2026-01-19T13:30:00Z", + "url": "https://app.rootly.com/incidents/abc123-def456" + }, + "timestamp": "2026-01-19T13:30:00Z" +} diff --git a/pkg/integrations/rootly/list_resources.go b/pkg/integrations/rootly/list_resources.go index 4563044e1c..0d7ee909a0 100644 --- a/pkg/integrations/rootly/list_resources.go +++ b/pkg/integrations/rootly/list_resources.go @@ -10,43 +10,97 @@ import ( func (r *Rootly) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { switch resourceType { case "service": - metadata := Metadata{} - if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { - return nil, fmt.Errorf("failed to decode application metadata: %w", err) - } - - resources := make([]core.IntegrationResource, 0, len(metadata.Services)) - for _, service := range metadata.Services { - resources = append(resources, core.IntegrationResource{ - Type: resourceType, - Name: service.Name, - ID: service.ID, - }) - } - return resources, nil - + return listResourcesForService(ctx) case "severity": - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return nil, fmt.Errorf("failed to create client: %w", err) - } - - severities, err := client.ListSeverities() - if err != nil { - return nil, fmt.Errorf("failed to list severities: %w", err) - } - - resources := make([]core.IntegrationResource, 0, len(severities)) - for _, severity := range severities { - resources = append(resources, core.IntegrationResource{ - Type: resourceType, - Name: severity.Name, - ID: severity.ID, - }) - } - return resources, nil - + return listResourcesForSeverity(ctx) + case "team": + return listResourcesForTeam(ctx) + case "sub_status": + return listResourcesForSubStatus(ctx) default: return []core.IntegrationResource{}, nil } } + +func listResourcesForService(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + metadata := Metadata{} + if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { + return nil, fmt.Errorf("failed to decode application metadata: %w", err) + } + + resources := make([]core.IntegrationResource, 0, len(metadata.Services)) + for _, service := range metadata.Services { + resources = append(resources, core.IntegrationResource{ + Type: "service", + Name: service.Name, + ID: service.ID, + }) + } + return resources, nil +} + +func listResourcesForSeverity(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to create client: %w", err) + } + + severities, err := client.ListSeverities() + if err != nil { + return nil, fmt.Errorf("failed to list severities: %w", err) + } + + resources := make([]core.IntegrationResource, 0, len(severities)) + for _, severity := range severities { + resources = append(resources, core.IntegrationResource{ + Type: "severity", + Name: severity.Name, + ID: severity.ID, + }) + } + return resources, nil +} + +func listResourcesForTeam(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to create client: %w", err) + } + + teams, err := client.ListTeams() + if err != nil { + return nil, fmt.Errorf("failed to list teams: %w", err) + } + + resources := make([]core.IntegrationResource, 0, len(teams)) + for _, team := range teams { + resources = append(resources, core.IntegrationResource{ + Type: "team", + Name: team.Name, + ID: team.ID, + }) + } + return resources, nil +} + +func listResourcesForSubStatus(ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to create client: %w", err) + } + + subStatuses, err := client.ListSubStatuses() + if err != nil { + return nil, fmt.Errorf("failed to list sub-statuses: %w", err) + } + + resources := make([]core.IntegrationResource, 0, len(subStatuses)) + for _, subStatus := range subStatuses { + resources = append(resources, core.IntegrationResource{ + Type: "sub_status", + Name: subStatus.Name, + ID: subStatus.ID, + }) + } + return resources, nil +} diff --git a/pkg/integrations/rootly/rootly.go b/pkg/integrations/rootly/rootly.go index ba96ead14e..cf32d97772 100644 --- a/pkg/integrations/rootly/rootly.go +++ b/pkg/integrations/rootly/rootly.go @@ -63,6 +63,7 @@ func (r *Rootly) Components() []core.Component { return []core.Component{ &CreateIncident{}, &CreateEvent{}, + &UpdateIncident{}, } } diff --git a/pkg/integrations/rootly/update_incident.go b/pkg/integrations/rootly/update_incident.go new file mode 100644 index 0000000000..a2036d4e6a --- /dev/null +++ b/pkg/integrations/rootly/update_incident.go @@ -0,0 +1,315 @@ +package rootly + +import ( + "errors" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type UpdateIncident struct{} + +type UpdateIncidentSpec struct { + IncidentID string `json:"incidentId"` + Title string `json:"title"` + Summary string `json:"summary"` + Status string `json:"status"` + SubStatus string `json:"subStatus"` + Severity string `json:"severity"` + Services []string `json:"services"` + Teams []string `json:"teams"` + Labels []LabelEntry `json:"labels"` +} + +type LabelEntry struct { + Key string `json:"key"` + Value string `json:"value"` +} + +func (c *UpdateIncident) Name() string { + return "rootly.updateIncident" +} + +func (c *UpdateIncident) Label() string { + return "Update Incident" +} + +func (c *UpdateIncident) Description() string { + return "Update an existing incident in Rootly" +} + +func (c *UpdateIncident) Documentation() string { + return `The Update Incident component updates an existing incident in Rootly. + +## Use Cases + +- **Status updates**: Update incident status when new information arrives +- **Severity changes**: Adjust severity based on impact assessment +- **Service association**: Attach affected services to an incident +- **Team assignment**: Assign teams to respond to an incident +- **Metadata updates**: Add labels to categorize incidents + +## Configuration + +- **Incident ID**: The UUID of the incident to update (required, supports expressions) +- **Title**: Update the incident title (optional, supports expressions) +- **Summary**: Update the incident summary (optional, supports expressions) +- **Status**: Update the incident status (optional) +- **Sub-Status**: Update the incident sub-status (optional, required by some Rootly accounts when changing status) +- **Severity**: Update the incident severity level (optional) +- **Services**: Services to attach to the incident (optional) +- **Teams**: Teams to attach to the incident (optional) +- **Labels**: Key-value labels for the incident (optional) + +## Output + +Returns the updated incident object including: +- **id**: Incident UUID +- **sequential_id**: Sequential incident number +- **title**: Incident title +- **slug**: URL-friendly slug +- **status**: Current incident status +- **updated_at**: Last update timestamp` +} + +func (c *UpdateIncident) Icon() string { + return "edit" +} + +func (c *UpdateIncident) Color() string { + return "gray" +} + +func (c *UpdateIncident) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *UpdateIncident) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "incidentId", + Label: "Incident ID", + Type: configuration.FieldTypeString, + Required: true, + Placeholder: "e.g., abc123-def456", + Description: "The UUID of the incident to update", + }, + { + Name: "title", + Label: "Title", + Type: configuration.FieldTypeString, + Required: false, + Description: "Update the incident title", + }, + { + Name: "summary", + Label: "Summary", + Type: configuration.FieldTypeText, + Required: false, + Description: "Update the incident summary", + }, + { + Name: "status", + Label: "Status", + Type: configuration.FieldTypeSelect, + Required: false, + Description: "Update the incident status", + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "In Triage", Value: "in_triage"}, + {Label: "Started", Value: "started"}, + {Label: "Detected", Value: "detected"}, + {Label: "Acknowledged", Value: "acknowledged"}, + {Label: "Mitigated", Value: "mitigated"}, + {Label: "Resolved", Value: "resolved"}, + {Label: "Closed", Value: "closed"}, + {Label: "Cancelled", Value: "cancelled"}, + }, + }, + }, + }, + { + Name: "subStatus", + Label: "Sub-Status", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + Description: "Update the incident sub-status (required by some accounts when changing status)", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "sub_status", + }, + }, + }, + { + Name: "severity", + Label: "Severity", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + Description: "Update the incident severity", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "severity", + }, + }, + }, + { + Name: "services", + Label: "Services", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + Description: "Services to attach to the incident", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "service", + Multi: true, + }, + }, + }, + { + Name: "teams", + Label: "Teams", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + Description: "Teams to attach to the incident", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "team", + Multi: true, + }, + }, + }, + { + Name: "labels", + Label: "Labels", + Type: configuration.FieldTypeList, + Required: false, + Description: "Key-value labels for the incident", + TypeOptions: &configuration.TypeOptions{ + List: &configuration.ListTypeOptions{ + ItemLabel: "Label", + ItemDefinition: &configuration.ListItemDefinition{ + Type: configuration.FieldTypeObject, + Schema: []configuration.Field{ + { + Name: "key", + Label: "Key", + Type: configuration.FieldTypeString, + Required: true, + DisallowExpression: true, + Description: "Label key", + }, + { + Name: "value", + Label: "Value", + Type: configuration.FieldTypeString, + Required: true, + Description: "Label value", + }, + }, + }, + }, + }, + }, + } +} + +func (c *UpdateIncident) Setup(ctx core.SetupContext) error { + spec := UpdateIncidentSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + if spec.IncidentID == "" { + return errors.New("incidentId is required") + } + + hasUpdate := spec.Title != "" || spec.Summary != "" || + spec.Status != "" || spec.SubStatus != "" || spec.Severity != "" || + len(spec.Services) > 0 || len(spec.Teams) > 0 || + len(spec.Labels) > 0 + + if !hasUpdate { + return errors.New("at least one field to update must be provided") + } + + return ctx.Metadata.Set(NodeMetadata{}) +} + +func (c *UpdateIncident) Execute(ctx core.ExecutionContext) error { + spec := UpdateIncidentSpec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return fmt.Errorf("error decoding configuration: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("error creating client: %v", err) + } + + attrs := UpdateIncidentAttributes{ + Title: spec.Title, + Summary: spec.Summary, + Status: spec.Status, + SubStatusID: spec.SubStatus, + SeverityID: spec.Severity, + } + + // Only set list/map fields when non-empty so that empty arrays from the frontend + // do not clear existing services, teams, or labels in Rootly (omitempty omits nil + // but not empty slices in JSON). + if len(spec.Services) > 0 { + attrs.ServiceIDs = spec.Services + } + if len(spec.Teams) > 0 { + attrs.GroupIDs = spec.Teams + } + if len(spec.Labels) > 0 { + labels := make(map[string]string, len(spec.Labels)) + for _, l := range spec.Labels { + labels[l.Key] = l.Value + } + attrs.Labels = labels + } + + incident, err := client.UpdateIncident(spec.IncidentID, attrs) + if err != nil { + return fmt.Errorf("failed to update incident: %v", err) + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + "rootly.incident", + []any{incident}, + ) +} + +func (c *UpdateIncident) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *UpdateIncident) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *UpdateIncident) Actions() []core.Action { + return []core.Action{} +} + +func (c *UpdateIncident) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *UpdateIncident) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *UpdateIncident) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/rootly/update_incident_test.go b/pkg/integrations/rootly/update_incident_test.go new file mode 100644 index 0000000000..c02dc78406 --- /dev/null +++ b/pkg/integrations/rootly/update_incident_test.go @@ -0,0 +1,258 @@ +package rootly + +import ( + "encoding/json" + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__UpdateIncident__Setup(t *testing.T) { + component := &UpdateIncident{} + + t.Run("valid configuration with all fields", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "incidentId": "abc123-def456", + "title": "Updated title", + "summary": "Updated summary", + "status": "mitigated", + "subStatus": "sub-status-uuid-1", + "severity": "sev-uuid-123", + "services": []string{"svc-uuid-1"}, + "teams": []string{"team-uuid-1"}, + "labels": []map[string]any{ + {"key": "env", "value": "production"}, + }, + }, + }) + + require.NoError(t, err) + }) + + t.Run("missing incidentId returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "title": "Updated title", + }, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) + + t.Run("empty incidentId returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "incidentId": "", + "title": "Updated title", + }, + }) + + require.ErrorContains(t, err, "incidentId is required") + }) + + t.Run("no update fields returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "incidentId": "abc123-def456", + }, + }) + + require.ErrorContains(t, err, "at least one field to update must be provided") + }) + + t.Run("incidentId with title only", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "incidentId": "abc123-def456", + "title": "New title", + }, + }) + + require.NoError(t, err) + }) + + t.Run("incidentId with status only", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "incidentId": "abc123-def456", + "status": "resolved", + }, + }) + + require.NoError(t, err) + }) + + t.Run("incidentId with subStatus only", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: map[string]any{ + "incidentId": "abc123-def456", + "subStatus": "sub-status-uuid-1", + }, + }) + + require.NoError(t, err) + }) + + t.Run("invalid configuration format -> decode error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Metadata: &contexts.MetadataContext{}, + Configuration: "invalid-config", + }) + + require.ErrorContains(t, err, "error decoding configuration") + }) +} + +func Test__UpdateIncident__Execute(t *testing.T) { + component := &UpdateIncident{} + + t.Run("successful update emits incident", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{ + "data": { + "id": "inc-uuid-123", + "type": "incidents", + "attributes": { + "title": "Updated Incident", + "sequential_id": 42, + "slug": "updated-incident", + "summary": "Updated summary", + "status": "mitigated", + "severity": "sev1", + "started_at": "2026-01-19T12:00:00Z", + "mitigated_at": "2026-01-19T13:30:00Z", + "updated_at": "2026-01-19T13:30:00Z", + "url": "https://app.rootly.com/incidents/inc-uuid-123" + } + } + }`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "test-api-key", + }, + } + + execState := &contexts.ExecutionStateContext{ + KVs: make(map[string]string), + } + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "inc-uuid-123", + "title": "Updated Incident", + "summary": "Updated summary", + "status": "mitigated", + "subStatus": "sub-status-uuid-1", + "severity": "sev-uuid-1", + "services": []string{"svc-uuid-1", "svc-uuid-2"}, + "teams": []string{"team-uuid-1"}, + "labels": []map[string]any{ + {"key": "env", "value": "production"}, + }, + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execState, + }) + + require.NoError(t, err) + assert.True(t, execState.Passed) + assert.Equal(t, core.DefaultOutputChannel.Name, execState.Channel) + assert.Equal(t, "rootly.incident", execState.Type) + assert.Len(t, execState.Payloads, 1) + + // Verify request + require.Len(t, httpContext.Requests, 1) + req := httpContext.Requests[0] + assert.Equal(t, http.MethodPatch, req.Method) + assert.Contains(t, req.URL.String(), "/incidents/inc-uuid-123") + assert.Equal(t, "application/vnd.api+json", req.Header.Get("Content-Type")) + + // Verify request body + body, err := io.ReadAll(req.Body) + require.NoError(t, err) + + var reqBody map[string]any + require.NoError(t, json.Unmarshal(body, &reqBody)) + + data := reqBody["data"].(map[string]any) + assert.Equal(t, "inc-uuid-123", data["id"]) + assert.Equal(t, "incidents", data["type"]) + + attrs := data["attributes"].(map[string]any) + assert.Equal(t, "Updated Incident", attrs["title"]) + assert.Equal(t, "Updated summary", attrs["summary"]) + assert.Equal(t, "mitigated", attrs["status"]) + assert.Equal(t, "sub-status-uuid-1", attrs["sub_status_id"]) + assert.Equal(t, "sev-uuid-1", attrs["severity_id"]) + + serviceIDs := attrs["service_ids"].([]any) + assert.Len(t, serviceIDs, 2) + assert.Equal(t, "svc-uuid-1", serviceIDs[0]) + assert.Equal(t, "svc-uuid-2", serviceIDs[1]) + + groupIDs := attrs["group_ids"].([]any) + assert.Len(t, groupIDs, 1) + assert.Equal(t, "team-uuid-1", groupIDs[0]) + + labels := attrs["labels"].(map[string]any) + assert.Equal(t, "production", labels["env"]) + }) + + t.Run("API error returns error and does not emit", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusNotFound, + Body: io.NopCloser(strings.NewReader(`{"errors": [{"title": "Record not found"}]}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "apiKey": "test-api-key", + }, + } + + execState := &contexts.ExecutionStateContext{ + KVs: make(map[string]string), + } + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "incidentId": "nonexistent-id", + "title": "Updated title", + }, + HTTP: httpContext, + Integration: integrationCtx, + ExecutionState: execState, + }) + + require.Error(t, err) + assert.ErrorContains(t, err, "failed to update incident") + assert.False(t, execState.Passed) + assert.Empty(t, execState.Channel) + }) +} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/base.ts b/web_src/src/pages/workflowv2/mappers/rootly/base.ts index 179fe1e4d5..92b5905dc3 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/base.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/base.ts @@ -1,5 +1,25 @@ +import { EventSection } from "@/ui/componentBase"; +import { getState, getTriggerRenderer } from ".."; +import { ExecutionInfo, NodeInfo } from "../types"; +import { formatTimeAgo } from "@/utils/date"; import { Incident, IncidentEvent } from "./types"; +export function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} + export function getDetailsForIncident(incident: Incident): Record { const details: Record = {}; @@ -7,7 +27,7 @@ export function getDetailsForIncident(incident: Incident): Record n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); - const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); - - return [ - { - receivedAt: new Date(execution.createdAt!), - eventTitle: title, - eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), - eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id!, - }, - ]; -} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/create_incident.ts b/web_src/src/pages/workflowv2/mappers/rootly/create_incident.ts index 962944dcc7..2c13835ce3 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/create_incident.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/create_incident.ts @@ -1,11 +1,10 @@ -import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { ComponentBaseProps } from "@/ui/componentBase"; import { getBackgroundColorClass } from "@/utils/colors"; -import { getState, getStateMap, getTriggerRenderer } from ".."; +import { getStateMap } from ".."; import { ComponentBaseContext, ComponentBaseMapper, ExecutionDetailsContext, - ExecutionInfo, NodeInfo, OutputPayload, SubtitleContext, @@ -13,7 +12,7 @@ import { import { MetadataItem } from "@/ui/metadataList"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import { Incident } from "./types"; -import { getDetailsForIncident } from "./base"; +import { baseEventSections, getDetailsForIncident } from "./base"; import { formatTimeAgo } from "@/utils/date"; export const createIncidentMapper: ComponentBaseMapper = { @@ -62,19 +61,3 @@ function metadataList(node: NodeInfo): MetadataItem[] { return metadata; } - -function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { - const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); - const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); - - return [ - { - receivedAt: new Date(execution.createdAt!), - eventTitle: title, - eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), - eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id!, - }, - ]; -} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/index.ts b/web_src/src/pages/workflowv2/mappers/rootly/index.ts index d4176f273d..a02e55f3fd 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/index.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/index.ts @@ -2,11 +2,13 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { onIncidentTriggerRenderer } from "./on_incident"; import { createIncidentMapper } from "./create_incident"; import { createEventMapper } from "./create_event"; +import { updateIncidentMapper } from "./update_incident"; import { buildActionStateRegistry } from "../utils"; export const componentMappers: Record = { createIncident: createIncidentMapper, createEvent: createEventMapper, + updateIncident: updateIncidentMapper, }; export const triggerRenderers: Record = { @@ -16,4 +18,5 @@ export const triggerRenderers: Record = { export const eventStateRegistry: Record = { createIncident: buildActionStateRegistry("created"), createEvent: buildActionStateRegistry("created"), + updateIncident: buildActionStateRegistry("updated"), }; diff --git a/web_src/src/pages/workflowv2/mappers/rootly/on_incident.ts b/web_src/src/pages/workflowv2/mappers/rootly/on_incident.ts index e8f216f763..075d1403ca 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/on_incident.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/on_incident.ts @@ -35,7 +35,7 @@ export const onIncidentTriggerRenderer: TriggerRenderer = { getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { const eventData = context.event?.data as OnIncidentEventData; const incident = eventData?.incident; - const contentParts = [incident?.severity?.name, incident?.status].filter(Boolean).join(" · "); + const contentParts = [incident?.severity, incident?.status].filter(Boolean).join(" · "); const subtitle = buildSubtitle(contentParts, context.event?.createdAt); return { @@ -72,7 +72,7 @@ export const onIncidentTriggerRenderer: TriggerRenderer = { if (lastEvent) { const eventData = lastEvent.data as OnIncidentEventData; const incident = eventData?.incident; - const contentParts = [incident?.severity?.name, incident?.status].filter(Boolean).join(" · "); + const contentParts = [incident?.severity, incident?.status].filter(Boolean).join(" · "); const subtitle = buildSubtitle(contentParts, lastEvent.createdAt); props.lastEventData = { diff --git a/web_src/src/pages/workflowv2/mappers/rootly/types.ts b/web_src/src/pages/workflowv2/mappers/rootly/types.ts index 9b33908b22..caf7044389 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/types.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/types.ts @@ -7,22 +7,18 @@ export interface BaseNodeMetadata { }; } -export interface Severity { - id?: string; - name?: string; - slug?: string; - color?: string; -} - export interface Incident { id?: string; + sequential_id?: number; title?: string; + slug?: string; summary?: string; status?: string; - severity?: Severity; + severity?: string; started_at?: string; resolved_at?: string; mitigated_at?: string; + updated_at?: string; url?: string; } diff --git a/web_src/src/pages/workflowv2/mappers/rootly/update_incident.ts b/web_src/src/pages/workflowv2/mappers/rootly/update_incident.ts new file mode 100644 index 0000000000..b5a7f7ff6b --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/rootly/update_incident.ts @@ -0,0 +1,93 @@ +import { ComponentBaseProps } from "@/ui/componentBase"; +import { getBackgroundColorClass } from "@/utils/colors"; +import { getStateMap } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import { MetadataItem } from "@/ui/metadataList"; +import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; +import { Incident } from "./types"; +import { baseEventSections, getDetailsForIncident } from "./base"; +import { formatTimeAgo } from "@/utils/date"; + +export const updateIncidentMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name || "unknown"; + + return { + iconSrc: rootlyIcon, + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition.label || + context.componentDefinition.name || + "Unnamed component", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + metadata: metadataList(context.node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default: OutputPayload[] }; + if (!outputs?.default || outputs.default.length === 0) { + return {}; + } + const incident = outputs.default[0].data as Incident; + return getDetailsForIncident(incident); + }, + + subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) return ""; + return formatTimeAgo(new Date(context.execution.createdAt)); + }, +}; + +function metadataList(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as Record; + + if (configuration?.incidentId) { + metadata.push({ icon: "alert-triangle", label: `Incident: ${configuration.incidentId}` }); + } + + const updates: string[] = []; + if (configuration?.status) { + updates.push(`Status: ${configuration.status}`); + } + if (configuration?.subStatus) { + updates.push("Sub-Status"); + } + if (configuration?.severity) { + updates.push("Severity"); + } + if (configuration?.title) { + updates.push("Title"); + } + if (configuration?.summary) { + updates.push("Summary"); + } + if (Array.isArray(configuration?.services) && (configuration.services as unknown[]).length > 0) { + updates.push(`Services (${(configuration.services as unknown[]).length})`); + } + if (Array.isArray(configuration?.teams) && (configuration.teams as unknown[]).length > 0) { + updates.push(`Teams (${(configuration.teams as unknown[]).length})`); + } + if (Array.isArray(configuration?.labels) && (configuration.labels as unknown[]).length > 0) { + updates.push(`Labels (${(configuration.labels as unknown[]).length})`); + } + + if (updates.length > 0) { + metadata.push({ icon: "funnel", label: `Updating: ${updates.join(", ")}` }); + } + + return metadata; +} From 1a535ba1694f257704971495933d2773e35ce935 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Thu, 12 Feb 2026 13:58:50 -0300 Subject: [PATCH 064/160] feat: Prometheus Base integration (#3068) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Add a new **Prometheus** integration to SuperPlane with: - Prometheus connection setup (`baseURL` + `none/basic/bearer` API auth) - `On Alert` trigger from Alertmanager webhooks (with optional bearer-secret validation) - `Get Alert` action to fetch an alert by name/state from Prometheus ## What’s included - Backend integration (`pkg/integrations/prometheus/*`) - Prometheus API client (`/api/v1/alerts`, `/api/v1/query`) with auth support, response-size limits, and error handling - `On Alert` trigger pipeline: - Alertmanager webhook parsing - status + alert name filtering - one `prometheus.alert` event emitted per matching alert - Webhook auth validation using `Authorization: Bearer ` with constant-time token comparison - Frontend mappers + icon + registry wiring for Prometheus trigger/action - New docs page: `docs/components/Prometheus.mdx` - Server integration registration (`pkg/server/server.go`) - Integration state-description size increase to 1024 chars (to preserve longer sync/error messages) - Test coverage for client, sync, trigger, webhook handling, and action behavior https://github.com/user-attachments/assets/055a987b-c07c-4a16-a930-0caeaad2c21c image image --------- Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- ...lations-state-description-to-1024.down.sql | 0 ...allations-state-description-to-1024.up.sql | 2 + db/structure.sql | 4 +- docs/components/Prometheus.mdx | 137 ++++++ .../organizations/create_integration.go | 12 +- pkg/integrations/prometheus/client.go | 235 ++++++++++ pkg/integrations/prometheus/client_test.go | 187 ++++++++ pkg/integrations/prometheus/example.go | 28 ++ .../prometheus/example_data_on_alert.json | 33 ++ .../prometheus/example_output_get_alert.json | 17 + pkg/integrations/prometheus/get_alert.go | 185 ++++++++ pkg/integrations/prometheus/get_alert_test.go | 128 ++++++ pkg/integrations/prometheus/on_alert.go | 418 ++++++++++++++++++ pkg/integrations/prometheus/on_alert_test.go | 222 ++++++++++ pkg/integrations/prometheus/prometheus.go | 223 ++++++++++ .../prometheus/prometheus_test.go | 116 +++++ .../prometheus/webhook_handler.go | 21 + .../prometheus/webhook_handler_test.go | 53 +++ pkg/server/server.go | 1 + .../assets/icons/integrations/prometheus.svg | 50 +++ web_src/src/pages/workflowv2/mappers/index.ts | 10 + .../workflowv2/mappers/prometheus/base.ts | 151 +++++++ .../mappers/prometheus/get_alert.ts | 6 + .../workflowv2/mappers/prometheus/index.ts | 20 + .../mappers/prometheus/on_alert.tsx | 205 +++++++++ .../workflowv2/mappers/prometheus/types.ts | 31 ++ .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + 28 files changed, 2496 insertions(+), 5 deletions(-) create mode 100644 db/migrations/20260212033945_increase-app-installations-state-description-to-1024.down.sql create mode 100644 db/migrations/20260212033945_increase-app-installations-state-description-to-1024.up.sql create mode 100644 docs/components/Prometheus.mdx create mode 100644 pkg/integrations/prometheus/client.go create mode 100644 pkg/integrations/prometheus/client_test.go create mode 100644 pkg/integrations/prometheus/example.go create mode 100644 pkg/integrations/prometheus/example_data_on_alert.json create mode 100644 pkg/integrations/prometheus/example_output_get_alert.json create mode 100644 pkg/integrations/prometheus/get_alert.go create mode 100644 pkg/integrations/prometheus/get_alert_test.go create mode 100644 pkg/integrations/prometheus/on_alert.go create mode 100644 pkg/integrations/prometheus/on_alert_test.go create mode 100644 pkg/integrations/prometheus/prometheus.go create mode 100644 pkg/integrations/prometheus/prometheus_test.go create mode 100644 pkg/integrations/prometheus/webhook_handler.go create mode 100644 pkg/integrations/prometheus/webhook_handler_test.go create mode 100644 web_src/src/assets/icons/integrations/prometheus.svg create mode 100644 web_src/src/pages/workflowv2/mappers/prometheus/base.ts create mode 100644 web_src/src/pages/workflowv2/mappers/prometheus/get_alert.ts create mode 100644 web_src/src/pages/workflowv2/mappers/prometheus/index.ts create mode 100644 web_src/src/pages/workflowv2/mappers/prometheus/on_alert.tsx create mode 100644 web_src/src/pages/workflowv2/mappers/prometheus/types.ts diff --git a/db/migrations/20260212033945_increase-app-installations-state-description-to-1024.down.sql b/db/migrations/20260212033945_increase-app-installations-state-description-to-1024.down.sql new file mode 100644 index 0000000000..e69de29bb2 diff --git a/db/migrations/20260212033945_increase-app-installations-state-description-to-1024.up.sql b/db/migrations/20260212033945_increase-app-installations-state-description-to-1024.up.sql new file mode 100644 index 0000000000..21535f5999 --- /dev/null +++ b/db/migrations/20260212033945_increase-app-installations-state-description-to-1024.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE app_installations +ALTER COLUMN state_description TYPE character varying(1024); diff --git a/db/structure.sql b/db/structure.sql index b1ef54953e..4fbec6df86 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -140,7 +140,7 @@ CREATE TABLE public.app_installations ( app_name character varying(255) NOT NULL, installation_name character varying(255) NOT NULL, state character varying(32) NOT NULL, - state_description character varying(255), + state_description character varying(1024), configuration jsonb DEFAULT '{}'::jsonb NOT NULL, metadata jsonb DEFAULT '{}'::jsonb NOT NULL, browser_action jsonb, @@ -1515,7 +1515,7 @@ SET row_security = off; -- COPY public.schema_migrations (version, dirty) FROM stdin; -20260131134819 f +20260212033945 f \. diff --git a/docs/components/Prometheus.mdx b/docs/components/Prometheus.mdx new file mode 100644 index 0000000000..5597b3e5a0 --- /dev/null +++ b/docs/components/Prometheus.mdx @@ -0,0 +1,137 @@ +--- +title: "Prometheus" +--- + +Monitor alerts from Prometheus and Alertmanager + +## Triggers + + + + + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + +## Instructions + +### Connection + +Configure this integration with: +- **Prometheus Base URL**: URL of your Prometheus server (e.g., `https://prometheus.example.com`) +- **API Auth**: `none`, `basic`, or `bearer` for Prometheus API requests +- **Webhook Secret** (recommended): If set, Alertmanager must send `Authorization: Bearer ` on webhook requests + +### Alertmanager Setup (manual) + +The trigger setup panel in SuperPlane shows the generated webhook URL. +Use the On Alert trigger setup instructions in the workflow sidebar for the exact `alertmanager.yml` snippet. + +After editing config, reload Alertmanager (for example `POST /-/reload` when lifecycle reload is enabled). + + + +## On Alert + +The On Alert trigger starts a workflow execution when Alertmanager sends alerts to SuperPlane. + +### What this trigger does + +- Receives Alertmanager webhook payloads +- Optionally validates bearer auth when **Webhook Secret** is configured +- Emits one event per matching alert as `prometheus.alert` +- Filters by selected statuses (`firing` and/or `resolved`) + +### Configuration + +- **Statuses**: Required list of alert statuses to emit +- **Alert Names**: Optional exact `alertname` filters + +### Alertmanager setup (manual) + +When the node is saved, SuperPlane generates a webhook URL shown in the trigger setup panel. Copy that URL into your Alertmanager receiver. + +Receiver registration in upstream Alertmanager is config-based (not API-created by SuperPlane). Use the setup instructions shown in the workflow sidebar for the exact `alertmanager.yml` snippet. + +After updating Alertmanager config, reload it (for example `POST /-/reload` when lifecycle reload is enabled). + +### Example Data + +```json +{ + "data": { + "annotations": { + "description": "Demo alert from local Prometheus setup", + "summary": "SuperPlane test alert is firing" + }, + "commonAnnotations": { + "description": "Demo alert from local Prometheus setup", + "summary": "SuperPlane test alert is firing" + }, + "commonLabels": { + "alertname": "SuperplaneTestAlert", + "severity": "warning" + }, + "endsAt": "0001-01-01T00:00:00Z", + "externalURL": "http://localhost:9093", + "fingerprint": "aac3b474e2c0658c", + "generatorURL": "http://fd66aa456472:9090/graph?g0.expr=vector%281%29\u0026g0.tab=1", + "groupKey": "{}:{alertname=\"SuperplaneTestAlert\"}", + "groupLabels": { + "alertname": "SuperplaneTestAlert" + }, + "labels": { + "alertname": "SuperplaneTestAlert", + "severity": "warning" + }, + "receiver": "superplane", + "startsAt": "2026-02-12T16:08:39Z", + "status": "firing" + }, + "timestamp": "2026-02-12T16:18:03.362582388Z", + "type": "prometheus.alert" +} +``` + + + +## Get Alert + +The Get Alert component fetches active alerts from Prometheus (`/api/v1/alerts`) and returns the first alert that matches. + +### Configuration + +- **Alert Name**: Required `labels.alertname` value to search for (supports expressions) +- **State**: Optional filter (`any`, `firing`, `pending`, `inactive`) + +### Output + +Emits one `prometheus.alert` payload with labels, annotations, state, and timing fields. + +### Example Output + +```json +{ + "data": { + "annotations": { + "description": "Demo alert from local Prometheus setup", + "summary": "SuperPlane test alert is firing" + }, + "labels": { + "alertname": "SuperplaneTestAlert", + "severity": "warning" + }, + "startsAt": "2026-02-12T16:08:09.000517289Z", + "status": "firing", + "value": "1e+00" + }, + "timestamp": "2026-02-12T16:18:05.943610583Z", + "type": "prometheus.alert" +} +``` + diff --git a/pkg/grpc/actions/organizations/create_integration.go b/pkg/grpc/actions/organizations/create_integration.go index 92c6726d14..7ee63e0ca4 100644 --- a/pkg/grpc/actions/organizations/create_integration.go +++ b/pkg/grpc/actions/organizations/create_integration.go @@ -43,14 +43,20 @@ func CreateIntegration(ctx context.Context, registry *registry.Registry, oidcPro // We must encrypt the sensitive configuration fields before storing // installationID := uuid.New() + integrationLogger := logging.ForIntegration(models.Integration{ + ID: installationID, + AppName: integrationName, + }) configuration, err := encryptConfigurationIfNeeded(ctx, registry, integration, appConfig.AsMap(), installationID, nil) if err != nil { - return nil, status.Errorf(codes.Internal, "failed to encrypt sensitive configuration: %v", err) + integrationLogger.WithError(err).Error("failed to encrypt sensitive configuration") + return nil, status.Error(codes.Internal, "failed to encrypt sensitive configuration") } - newIntegration, err := models.CreateIntegration(installationID, uuid.MustParse(orgID), integrationName, name, configuration) + newIntegration, err := models.CreateIntegration(installationID, org, integrationName, name, configuration) if err != nil { - return nil, status.Errorf(codes.Internal, "failed to create integration: %v", err) + integrationLogger.WithError(err).Error("failed to create integration") + return nil, status.Error(codes.Internal, "failed to create integration") } integrationCtx := contexts.NewIntegrationContext( diff --git a/pkg/integrations/prometheus/client.go b/pkg/integrations/prometheus/client.go new file mode 100644 index 0000000000..7eb22f4ead --- /dev/null +++ b/pkg/integrations/prometheus/client.go @@ -0,0 +1,235 @@ +package prometheus + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/superplanehq/superplane/pkg/core" +) + +const MaxResponseSize = 1 * 1024 * 1024 // 1MB + +type Client struct { + baseURL string + authType string + username string + password string + bearerToken string + http core.HTTPContext +} + +type prometheusResponse[T any] struct { + Status string `json:"status"` + Data T `json:"data"` + ErrorType string `json:"errorType,omitempty"` + Error string `json:"error,omitempty"` +} + +type PrometheusAlertsData struct { + Alerts []PrometheusAlert `json:"alerts"` +} + +type PrometheusAlert struct { + Labels map[string]string `json:"labels"` + Annotations map[string]string `json:"annotations"` + State string `json:"state"` + ActiveAt string `json:"activeAt,omitempty"` + Value string `json:"value,omitempty"` +} + +func NewClient(httpContext core.HTTPContext, integration core.IntegrationContext) (*Client, error) { + baseURL, err := requiredConfig(integration, "baseURL") + if err != nil { + return nil, err + } + + authType, err := requiredConfig(integration, "authType") + if err != nil { + return nil, err + } + + client := &Client{ + baseURL: normalizeBaseURL(baseURL), + authType: authType, + http: httpContext, + } + + switch authType { + case AuthTypeNone: + return client, nil + case AuthTypeBasic: + username, err := requiredConfig(integration, "username") + if err != nil { + return nil, fmt.Errorf("username is required when authType is basic") + } + password, err := requiredConfig(integration, "password") + if err != nil { + return nil, fmt.Errorf("password is required when authType is basic") + } + + client.username = username + client.password = password + return client, nil + case AuthTypeBearer: + bearerToken, err := requiredConfig(integration, "bearerToken") + if err != nil { + return nil, fmt.Errorf("bearerToken is required when authType is bearer") + } + + client.bearerToken = bearerToken + return client, nil + default: + return nil, fmt.Errorf("invalid authType %q", authType) + } +} + +func requiredConfig(ctx core.IntegrationContext, name string) (string, error) { + value, err := ctx.GetConfig(name) + if err != nil { + return "", fmt.Errorf("%s is required", name) + } + + s := string(value) + if s == "" { + return "", fmt.Errorf("%s is required", name) + } + + return s, nil +} + +func normalizeBaseURL(baseURL string) string { + if baseURL == "/" { + return baseURL + } + + for len(baseURL) > 0 && strings.HasSuffix(baseURL, "/") { + baseURL = baseURL[:len(baseURL)-1] + } + + return baseURL +} + +func (c *Client) GetAlertsFromPrometheus() ([]PrometheusAlert, error) { + body, err := c.execRequest(http.MethodGet, "/api/v1/alerts") + if err != nil { + return nil, err + } + + response := prometheusResponse[PrometheusAlertsData]{} + if err := decodeResponse(body, &response); err != nil { + return nil, err + } + + if response.Status != "success" { + return nil, formatPrometheusError(response.ErrorType, response.Error) + } + + return response.Data.Alerts, nil +} + +func (c *Client) Query(query string) (map[string]any, error) { + apiPath := fmt.Sprintf("/api/v1/query?query=%s", url.QueryEscape(query)) + body, err := c.execRequest(http.MethodGet, apiPath) + if err != nil { + return nil, err + } + + response := prometheusResponse[map[string]any]{} + if err := decodeResponse(body, &response); err != nil { + return nil, err + } + + if response.Status != "success" { + return nil, formatPrometheusError(response.ErrorType, response.Error) + } + + return response.Data, nil +} + +func (c *Client) execRequest(method string, path string) ([]byte, error) { + apiURL := c.baseURL + if strings.HasPrefix(path, "/") { + apiURL += path + } else { + apiURL += "/" + path + } + + req, err := http.NewRequest(method, apiURL, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Accept", "application/json") + if err := c.setAuth(req); err != nil { + return nil, err + } + + res, err := c.http.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %w", err) + } + defer res.Body.Close() + + limitedReader := io.LimitReader(res.Body, MaxResponseSize+1) + body, err := io.ReadAll(limitedReader) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + + if len(body) > MaxResponseSize { + return nil, fmt.Errorf("response too large: exceeds maximum size of %d bytes", MaxResponseSize) + } + + if res.StatusCode < 200 || res.StatusCode >= 300 { + return nil, fmt.Errorf("request failed with status %d: %s", res.StatusCode, string(body)) + } + + return body, nil +} + +func (c *Client) setAuth(req *http.Request) error { + switch c.authType { + case AuthTypeNone: + return nil + case AuthTypeBasic: + req.SetBasicAuth(c.username, c.password) + return nil + case AuthTypeBearer: + req.Header.Set("Authorization", "Bearer "+c.bearerToken) + return nil + default: + return fmt.Errorf("invalid authType %q", c.authType) + } +} + +func decodeResponse[T any](body []byte, out *T) error { + if len(body) == 0 { + return fmt.Errorf("empty response body") + } + + if err := json.Unmarshal(body, out); err != nil { + return fmt.Errorf("failed to decode response JSON: %w", err) + } + + return nil +} + +func formatPrometheusError(errorType string, errorMessage string) error { + if errorType == "" && errorMessage == "" { + return fmt.Errorf("prometheus API returned non-success status") + } + + if errorType == "" { + return fmt.Errorf("prometheus API error: %s", errorMessage) + } + + if errorMessage == "" { + return fmt.Errorf("prometheus API error type: %s", errorType) + } + + return fmt.Errorf("prometheus API error (%s): %s", errorType, errorMessage) +} diff --git a/pkg/integrations/prometheus/client_test.go b/pkg/integrations/prometheus/client_test.go new file mode 100644 index 0000000000..f70cd85a71 --- /dev/null +++ b/pkg/integrations/prometheus/client_test.go @@ -0,0 +1,187 @@ +package prometheus + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__NewClient(t *testing.T) { + httpCtx := &contexts.HTTPContext{} + + t.Run("missing baseURL returns error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{"authType": AuthTypeNone}} + _, err := NewClient(httpCtx, integrationCtx) + require.ErrorContains(t, err, "baseURL is required") + }) + + t.Run("invalid auth type returns error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": "invalid", + }} + _, err := NewClient(httpCtx, integrationCtx) + require.ErrorContains(t, err, "invalid authType") + }) + + t.Run("basic auth requires username and password", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeBasic, + }} + _, err := NewClient(httpCtx, integrationCtx) + require.ErrorContains(t, err, "username is required") + }) + + t.Run("creates bearer client", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com/", + "authType": AuthTypeBearer, + "bearerToken": "secret-token", + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + assert.Equal(t, "https://prometheus.example.com", client.baseURL) + assert.Equal(t, AuthTypeBearer, client.authType) + }) +} + +func Test__Client__GetAlertsFromPrometheus(t *testing.T) { + t.Run("adds bearer auth header", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + {"status":"success","data":{"alerts":[{"state":"firing","labels":{"alertname":"HighLatency"}}]}} + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeBearer, + "bearerToken": "token-1", + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + alerts, err := client.GetAlertsFromPrometheus() + require.NoError(t, err) + require.Len(t, alerts, 1) + assert.Equal(t, "HighLatency", alerts[0].Labels["alertname"]) + + require.Len(t, httpCtx.Requests, 1) + assert.Equal(t, "Bearer token-1", httpCtx.Requests[0].Header.Get("Authorization")) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/api/v1/alerts") + }) + + t.Run("adds basic auth header", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader(`{"status":"success","data":{"alerts":[]}}`))}}, + } + + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeBasic, + "username": "admin", + "password": "password", + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + _, err = client.GetAlertsFromPrometheus() + require.NoError(t, err) + + require.Len(t, httpCtx.Requests, 1) + username, password, ok := httpCtx.Requests[0].BasicAuth() + require.True(t, ok) + assert.Equal(t, "admin", username) + assert.Equal(t, "password", password) + }) + + t.Run("non-2xx returns error", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{{StatusCode: http.StatusUnauthorized, Body: io.NopCloser(strings.NewReader(`unauthorized`))}}, + } + + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + _, err = client.GetAlertsFromPrometheus() + require.ErrorContains(t, err, "status 401") + }) + + t.Run("response too large returns error", func(t *testing.T) { + largeBody := strings.Repeat("x", MaxResponseSize+1) + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader(largeBody))}}, + } + + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + _, err = client.GetAlertsFromPrometheus() + require.ErrorContains(t, err, "response too large") + }) + + t.Run("invalid json returns decode error", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader(`not-json`))}}, + } + + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + _, err = client.GetAlertsFromPrometheus() + require.ErrorContains(t, err, "failed to decode response JSON") + }) +} + +func Test__Client__Query(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"status":"success","data":{"resultType":"vector","result":[]}}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }} + + client, err := NewClient(httpCtx, integrationCtx) + require.NoError(t, err) + + _, err = client.Query("up") + require.NoError(t, err) + require.Len(t, httpCtx.Requests, 1) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/api/v1/query?query=up") +} diff --git a/pkg/integrations/prometheus/example.go b/pkg/integrations/prometheus/example.go new file mode 100644 index 0000000000..6b27e2ce05 --- /dev/null +++ b/pkg/integrations/prometheus/example.go @@ -0,0 +1,28 @@ +package prometheus + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_data_on_alert.json +var exampleDataOnAlertBytes []byte + +//go:embed example_output_get_alert.json +var exampleOutputGetAlertBytes []byte + +var exampleDataOnAlertOnce sync.Once +var exampleDataOnAlert map[string]any + +var exampleOutputGetAlertOnce sync.Once +var exampleOutputGetAlert map[string]any + +func (t *OnAlert) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnAlertOnce, exampleDataOnAlertBytes, &exampleDataOnAlert) +} + +func (c *GetAlert) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputGetAlertOnce, exampleOutputGetAlertBytes, &exampleOutputGetAlert) +} diff --git a/pkg/integrations/prometheus/example_data_on_alert.json b/pkg/integrations/prometheus/example_data_on_alert.json new file mode 100644 index 0000000000..c87cd783ae --- /dev/null +++ b/pkg/integrations/prometheus/example_data_on_alert.json @@ -0,0 +1,33 @@ +{ + "data": { + "annotations": { + "description": "Demo alert from local Prometheus setup", + "summary": "SuperPlane test alert is firing" + }, + "commonAnnotations": { + "description": "Demo alert from local Prometheus setup", + "summary": "SuperPlane test alert is firing" + }, + "commonLabels": { + "alertname": "SuperplaneTestAlert", + "severity": "warning" + }, + "endsAt": "0001-01-01T00:00:00Z", + "externalURL": "http://localhost:9093", + "fingerprint": "aac3b474e2c0658c", + "generatorURL": "http://fd66aa456472:9090/graph?g0.expr=vector%281%29&g0.tab=1", + "groupKey": "{}:{alertname=\"SuperplaneTestAlert\"}", + "groupLabels": { + "alertname": "SuperplaneTestAlert" + }, + "labels": { + "alertname": "SuperplaneTestAlert", + "severity": "warning" + }, + "receiver": "superplane", + "startsAt": "2026-02-12T16:08:39Z", + "status": "firing" + }, + "timestamp": "2026-02-12T16:18:03.362582388Z", + "type": "prometheus.alert" +} \ No newline at end of file diff --git a/pkg/integrations/prometheus/example_output_get_alert.json b/pkg/integrations/prometheus/example_output_get_alert.json new file mode 100644 index 0000000000..363fe81943 --- /dev/null +++ b/pkg/integrations/prometheus/example_output_get_alert.json @@ -0,0 +1,17 @@ +{ + "data": { + "annotations": { + "description": "Demo alert from local Prometheus setup", + "summary": "SuperPlane test alert is firing" + }, + "labels": { + "alertname": "SuperplaneTestAlert", + "severity": "warning" + }, + "startsAt": "2026-02-12T16:08:09.000517289Z", + "status": "firing", + "value": "1e+00" + }, + "timestamp": "2026-02-12T16:18:05.943610583Z", + "type": "prometheus.alert" +} \ No newline at end of file diff --git a/pkg/integrations/prometheus/get_alert.go b/pkg/integrations/prometheus/get_alert.go new file mode 100644 index 0000000000..1cf4ba9cc6 --- /dev/null +++ b/pkg/integrations/prometheus/get_alert.go @@ -0,0 +1,185 @@ +package prometheus + +import ( + "fmt" + "net/http" + "strings" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type GetAlert struct{} + +type GetAlertConfiguration struct { + AlertName string `json:"alertName" mapstructure:"alertName"` + State string `json:"state" mapstructure:"state"` +} + +func (c *GetAlert) Name() string { + return "prometheus.getAlert" +} + +func (c *GetAlert) Label() string { + return "Get Alert" +} + +func (c *GetAlert) Description() string { + return "Get a Prometheus alert by name" +} + +func (c *GetAlert) Documentation() string { + return `The Get Alert component fetches active alerts from Prometheus (` + "`/api/v1/alerts`" + `) and returns the first alert that matches. + +## Configuration + +- **Alert Name**: Required ` + "`labels.alertname`" + ` value to search for (supports expressions) +- **State**: Optional filter (` + "`any`" + `, ` + "`firing`" + `, ` + "`pending`" + `, ` + "`inactive`" + `) + +## Output + +Emits one ` + "`prometheus.alert`" + ` payload with labels, annotations, state, and timing fields.` +} + +func (c *GetAlert) Icon() string { + return "prometheus" +} + +func (c *GetAlert) Color() string { + return "gray" +} + +func (c *GetAlert) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetAlert) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "alertName", + Label: "Alert Name", + Type: configuration.FieldTypeString, + Required: true, + Description: "labels.alertname value to match", + }, + { + Name: "state", + Label: "State", + Type: configuration.FieldTypeSelect, + Required: false, + Default: AlertStateAny, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Any", Value: AlertStateAny}, + {Label: "Firing", Value: AlertStateFiring}, + {Label: "Pending", Value: AlertStatePending}, + {Label: "Inactive", Value: AlertStateInactive}, + }, + }, + }, + }, + } +} + +func (c *GetAlert) Setup(ctx core.SetupContext) error { + config := GetAlertConfiguration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + config = sanitizeGetAlertConfiguration(config) + + if config.AlertName == "" { + return fmt.Errorf("alertName is required") + } + + state := config.State + if state == "" { + return nil + } + + switch state { + case AlertStateAny, AlertStateFiring, AlertStatePending, AlertStateInactive: + return nil + default: + return fmt.Errorf("invalid state %q", config.State) + } +} + +func (c *GetAlert) Execute(ctx core.ExecutionContext) error { + config := GetAlertConfiguration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + config = sanitizeGetAlertConfiguration(config) + + alertName := config.AlertName + state := config.State + if state == "" { + state = AlertStateAny + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create Prometheus client: %w", err) + } + + alerts, err := client.GetAlertsFromPrometheus() + if err != nil { + return fmt.Errorf("failed to fetch alerts: %w", err) + } + + for _, alert := range alerts { + if alert.Labels["alertname"] != alertName { + continue + } + + if state != AlertStateAny && !strings.EqualFold(alert.State, state) { + continue + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + PrometheusAlertPayloadType, + []any{buildAlertPayloadFromPrometheusAlert(alert)}, + ) + } + + if state == AlertStateAny { + return fmt.Errorf("alert %q was not found", alertName) + } + + return fmt.Errorf("alert %q with state %q was not found", alertName, state) +} + +func (c *GetAlert) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetAlert) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetAlert) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetAlert) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetAlert) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetAlert) Cleanup(ctx core.SetupContext) error { + return nil +} + +func sanitizeGetAlertConfiguration(config GetAlertConfiguration) GetAlertConfiguration { + config.AlertName = strings.TrimSpace(config.AlertName) + config.State = strings.ToLower(strings.TrimSpace(config.State)) + return config +} diff --git a/pkg/integrations/prometheus/get_alert_test.go b/pkg/integrations/prometheus/get_alert_test.go new file mode 100644 index 0000000000..20b4b307b9 --- /dev/null +++ b/pkg/integrations/prometheus/get_alert_test.go @@ -0,0 +1,128 @@ +package prometheus + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetAlert__Setup(t *testing.T) { + component := &GetAlert{} + + t.Run("alertName is required", func(t *testing.T) { + err := component.Setup(core.SetupContext{Configuration: map[string]any{"alertName": ""}}) + require.ErrorContains(t, err, "alertName is required") + }) + + t.Run("invalid state returns error", func(t *testing.T) { + err := component.Setup(core.SetupContext{Configuration: map[string]any{"alertName": "HighLatency", "state": "unknown"}}) + require.ErrorContains(t, err, "invalid state") + }) + + t.Run("valid setup", func(t *testing.T) { + err := component.Setup(core.SetupContext{Configuration: map[string]any{"alertName": "HighLatency", "state": AlertStateFiring}}) + require.NoError(t, err) + }) +} + +func Test__GetAlert__Execute(t *testing.T) { + component := &GetAlert{} + + t.Run("matching alert is emitted", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + {"status":"success","data":{"alerts":[ + {"state":"pending","labels":{"alertname":"OtherAlert"},"annotations":{"summary":"other"}}, + {"state":"firing","labels":{"alertname":"HighLatency","instance":"api-1"},"annotations":{"summary":"latency"},"activeAt":"2026-01-19T12:00:00Z","value":"1"} + ]}} + `)), + }, + }, + } + + executionCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{"alertName": "HighLatency", "state": AlertStateFiring}, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }}, + ExecutionState: executionCtx, + }) + + require.NoError(t, err) + assert.True(t, executionCtx.Finished) + assert.True(t, executionCtx.Passed) + assert.Equal(t, PrometheusAlertPayloadType, executionCtx.Type) + require.Len(t, executionCtx.Payloads, 1) + payload := executionCtx.Payloads[0].(map[string]any)["data"].(map[string]any) + assert.Equal(t, "HighLatency", payload["labels"].(map[string]string)["alertname"]) + assert.Equal(t, "firing", payload["status"]) + }) + + t.Run("alert not found returns error", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"status":"success","data":{"alerts":[]}}`)), + }, + }, + } + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{"alertName": "HighLatency", "state": AlertStateAny}, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }}, + ExecutionState: &contexts.ExecutionStateContext{}, + }) + + require.ErrorContains(t, err, "was not found") + }) + + t.Run("execute sanitizes alertName and state", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + {"status":"success","data":{"alerts":[ + {"state":"firing","labels":{"alertname":"HighLatency"},"annotations":{"summary":"latency"},"activeAt":"2026-01-19T12:00:00Z","value":"1"} + ]}} + `)), + }, + }, + } + + executionCtx := &contexts.ExecutionStateContext{} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{"alertName": " HighLatency ", "state": " FIRING "}, + HTTP: httpCtx, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }}, + ExecutionState: executionCtx, + }) + + require.NoError(t, err) + assert.True(t, executionCtx.Passed) + require.Len(t, executionCtx.Payloads, 1) + payload := executionCtx.Payloads[0].(map[string]any)["data"].(map[string]any) + assert.Equal(t, "HighLatency", payload["labels"].(map[string]string)["alertname"]) + assert.Equal(t, "firing", payload["status"]) + }) +} diff --git a/pkg/integrations/prometheus/on_alert.go b/pkg/integrations/prometheus/on_alert.go new file mode 100644 index 0000000000..655407c24f --- /dev/null +++ b/pkg/integrations/prometheus/on_alert.go @@ -0,0 +1,418 @@ +package prometheus + +import ( + "crypto/subtle" + "encoding/json" + "errors" + "fmt" + "net/http" + "slices" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnAlert struct{} + +var errWebhookAuthConfig = errors.New("failed to read webhook auth configuration") + +type OnAlertConfiguration struct { + Statuses []string `json:"statuses" mapstructure:"statuses"` + AlertNames []string `json:"alertNames" mapstructure:"alertNames"` +} + +type OnAlertMetadata struct { + WebhookURL string `json:"webhookUrl" mapstructure:"webhookUrl"` + WebhookAuthEnabled bool `json:"webhookAuthEnabled,omitempty" mapstructure:"webhookAuthEnabled"` +} + +type AlertmanagerWebhookPayload struct { + Version string `json:"version"` + GroupKey string `json:"groupKey"` + Status string `json:"status"` + Receiver string `json:"receiver"` + GroupLabels map[string]string `json:"groupLabels"` + CommonLabels map[string]string `json:"commonLabels"` + CommonAnnotations map[string]string `json:"commonAnnotations"` + ExternalURL string `json:"externalURL"` + TruncatedAlerts int `json:"truncatedAlerts"` + Alerts []AlertmanagerAlert `json:"alerts"` +} + +type AlertmanagerAlert struct { + Status string `json:"status"` + Labels map[string]string `json:"labels"` + Annotations map[string]string `json:"annotations"` + StartsAt string `json:"startsAt"` + EndsAt string `json:"endsAt"` + GeneratorURL string `json:"generatorURL"` + Fingerprint string `json:"fingerprint"` +} + +func (t *OnAlert) Name() string { + return "prometheus.onAlert" +} + +func (t *OnAlert) Label() string { + return "On Alert" +} + +func (t *OnAlert) Description() string { + return "Listen to Alertmanager webhook alert events" +} + +func (t *OnAlert) Documentation() string { + return `The On Alert trigger starts a workflow execution when Alertmanager sends alerts to SuperPlane. + +## What this trigger does + +- Receives Alertmanager webhook payloads +- Optionally validates bearer auth when **Webhook Secret** is configured +- Emits one event per matching alert as ` + "`prometheus.alert`" + ` +- Filters by selected statuses (` + "`firing`" + ` and/or ` + "`resolved`" + `) + +## Configuration + +- **Statuses**: Required list of alert statuses to emit +- **Alert Names**: Optional exact ` + "`alertname`" + ` filters + +## Alertmanager setup (manual) + +When the node is saved, SuperPlane generates a webhook URL shown in the trigger setup panel. Copy that URL into your Alertmanager receiver. + +Receiver registration in upstream Alertmanager is config-based (not API-created by SuperPlane). Use the setup instructions shown in the workflow sidebar for the exact ` + "`alertmanager.yml`" + ` snippet. + +After updating Alertmanager config, reload it (for example ` + "`POST /-/reload`" + ` when lifecycle reload is enabled).` +} + +func (t *OnAlert) Icon() string { + return "prometheus" +} + +func (t *OnAlert) Color() string { + return "gray" +} + +func (t *OnAlert) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "statuses", + Label: "Statuses", + Type: configuration.FieldTypeMultiSelect, + Required: true, + Default: []string{AlertStateFiring}, + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Firing", Value: AlertStateFiring}, + {Label: "Resolved", Value: AlertStateResolved}, + }, + }, + }, + Description: "Only emit alerts with these statuses", + }, + { + Name: "alertNames", + Label: "Alert Names", + Type: configuration.FieldTypeList, + Required: false, + TypeOptions: &configuration.TypeOptions{ + List: &configuration.ListTypeOptions{ + ItemLabel: "Alert Name", + ItemDefinition: &configuration.ListItemDefinition{ + Type: configuration.FieldTypeString, + }, + }, + }, + Default: []string{"MyAlert"}, + Description: "Optional exact alertname filters", + }, + } +} + +func (t *OnAlert) Setup(ctx core.TriggerContext) error { + metadata := OnAlertMetadata{} + if ctx.Metadata != nil && ctx.Metadata.Get() != nil { + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + } + + if _, err := parseAndValidateOnAlertConfiguration(ctx.Configuration); err != nil { + return err + } + + if err := ctx.Integration.RequestWebhook(struct{}{}); err != nil { + return err + } + + if ctx.Webhook == nil { + return fmt.Errorf("missing webhook context") + } + + webhookURL, err := ctx.Webhook.Setup() + if err != nil { + return fmt.Errorf("failed to setup webhook URL: %w", err) + } + + metadata.WebhookURL = webhookURL + webhookBearerToken, _ := optionalIntegrationConfig(ctx.Integration, "webhookBearerToken") + metadata.WebhookAuthEnabled = webhookBearerToken != "" + + if ctx.Metadata == nil { + return nil + } + + return ctx.Metadata.Set(metadata) +} + +func (t *OnAlert) Actions() []core.Action { + return []core.Action{} +} + +func (t *OnAlert) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (t *OnAlert) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + config, statusCode, err := parseOnAlertWebhookConfiguration(ctx.Configuration) + if err != nil { + return statusCode, err + } + + if statusCode, err = authorizeOnAlertWebhook(ctx); err != nil { + return statusCode, err + } + + payload, statusCode, err := parseAlertmanagerWebhookPayload(ctx.Body) + if err != nil { + return statusCode, err + } + + if err := emitMatchingAlerts(ctx.Events, config, payload); err != nil { + return http.StatusInternalServerError, err + } + + return http.StatusOK, nil +} + +func (t *OnAlert) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func validateOnAlertConfiguration(config OnAlertConfiguration) error { + normalizedStatuses := normalizeStatuses(config.Statuses) + if len(normalizedStatuses) == 0 { + return fmt.Errorf("at least one status must be selected") + } + + for _, status := range normalizedStatuses { + if status != AlertStateFiring && status != AlertStateResolved { + return fmt.Errorf("invalid status %q, expected firing or resolved", status) + } + } + + return nil +} + +func normalizeStatuses(values []string) []string { + normalized := make([]string, 0, len(values)) + for _, value := range values { + if value == "" || slices.Contains(normalized, value) { + continue + } + normalized = append(normalized, value) + } + return normalized +} + +func filterEmpty(values []string) []string { + result := make([]string, 0, len(values)) + for _, value := range values { + if value == "" { + continue + } + result = append(result, value) + } + return result +} + +func validateWebhookAuth(ctx core.WebhookRequestContext) error { + if ctx.Integration == nil { + return nil + } + + webhookBearerToken, err := optionalIntegrationConfig(ctx.Integration, "webhookBearerToken") + if err != nil { + return fmt.Errorf("%w: %v", errWebhookAuthConfig, err) + } + + if webhookBearerToken == "" { + return nil + } + + authorization := ctx.Headers.Get("Authorization") + if !strings.HasPrefix(authorization, "Bearer ") { + return fmt.Errorf("missing bearer authorization") + } + + token := authorization[len("Bearer "):] + if subtle.ConstantTimeCompare([]byte(token), []byte(webhookBearerToken)) != 1 { + return fmt.Errorf("invalid bearer token") + } + + return nil +} + +func optionalIntegrationConfig(integration core.IntegrationContext, name string) (string, error) { + if integration == nil { + return "", nil + } + + value, err := integration.GetConfig(name) + if err != nil { + if isMissingIntegrationConfigError(err, name) { + return "", nil + } + return "", err + } + + return string(value), nil +} + +func isMissingIntegrationConfigError(err error, name string) bool { + if err == nil { + return false + } + + message := strings.ToLower(strings.TrimSpace(err.Error())) + return strings.Contains(message, strings.ToLower(name)) && strings.Contains(message, "not found") +} + +func buildAlertPayloadFromAlertmanager(alert AlertmanagerAlert, payload AlertmanagerWebhookPayload) map[string]any { + output := map[string]any{ + "status": alert.Status, + "labels": alert.Labels, + "annotations": alert.Annotations, + "startsAt": alert.StartsAt, + "endsAt": alert.EndsAt, + "generatorURL": alert.GeneratorURL, + "fingerprint": alert.Fingerprint, + "receiver": payload.Receiver, + "groupKey": payload.GroupKey, + "groupLabels": payload.GroupLabels, + "commonLabels": payload.CommonLabels, + "commonAnnotations": payload.CommonAnnotations, + "externalURL": payload.ExternalURL, + } + + if output["status"] == "" { + output["status"] = payload.Status + } + + return output +} + +func buildAlertPayloadFromPrometheusAlert(alert PrometheusAlert) map[string]any { + return map[string]any{ + "status": alert.State, + "labels": alert.Labels, + "annotations": alert.Annotations, + "startsAt": alert.ActiveAt, + "value": alert.Value, + } +} + +func sanitizeOnAlertConfiguration(config OnAlertConfiguration) OnAlertConfiguration { + for i := range config.Statuses { + config.Statuses[i] = strings.ToLower(strings.TrimSpace(config.Statuses[i])) + } + + for i := range config.AlertNames { + config.AlertNames[i] = strings.TrimSpace(config.AlertNames[i]) + } + + return config +} + +func parseOnAlertWebhookConfiguration(configuration any) (OnAlertConfiguration, int, error) { + config, err := parseAndValidateOnAlertConfiguration(configuration) + if err != nil { + return OnAlertConfiguration{}, http.StatusInternalServerError, err + } + + return config, http.StatusOK, nil +} + +func authorizeOnAlertWebhook(ctx core.WebhookRequestContext) (int, error) { + if err := validateWebhookAuth(ctx); err != nil { + if errors.Is(err, errWebhookAuthConfig) { + return http.StatusInternalServerError, err + } + return http.StatusForbidden, err + } + + return http.StatusOK, nil +} + +func parseAlertmanagerWebhookPayload(body []byte) (AlertmanagerWebhookPayload, int, error) { + payload := AlertmanagerWebhookPayload{} + if err := json.Unmarshal(body, &payload); err != nil { + return AlertmanagerWebhookPayload{}, http.StatusBadRequest, fmt.Errorf("failed to parse request body: %w", err) + } + + return payload, http.StatusOK, nil +} + +func emitMatchingAlerts(events core.EventContext, config OnAlertConfiguration, payload AlertmanagerWebhookPayload) error { + filteredNames := filterEmpty(config.AlertNames) + + for _, alert := range payload.Alerts { + alertStatus := alert.Status + if alertStatus == "" { + alertStatus = payload.Status + } + + if !containsStatus(config.Statuses, alertStatus) { + continue + } + + alertName := alert.Labels["alertname"] + if len(filteredNames) > 0 && !slices.Contains(filteredNames, alertName) { + continue + } + + if err := events.Emit(PrometheusAlertPayloadType, buildAlertPayloadFromAlertmanager(alert, payload)); err != nil { + return fmt.Errorf("failed to emit alert event: %w", err) + } + } + + return nil +} + +func parseAndValidateOnAlertConfiguration(configuration any) (OnAlertConfiguration, error) { + config := OnAlertConfiguration{} + if err := mapstructure.Decode(configuration, &config); err != nil { + return OnAlertConfiguration{}, fmt.Errorf("failed to decode configuration: %w", err) + } + + config = sanitizeOnAlertConfiguration(config) + if err := validateOnAlertConfiguration(config); err != nil { + return OnAlertConfiguration{}, err + } + config.Statuses = normalizeStatuses(config.Statuses) + + return config, nil +} + +func containsStatus(allowed []string, state string) bool { + for _, value := range allowed { + if strings.EqualFold(value, state) { + return true + } + } + + return false +} diff --git a/pkg/integrations/prometheus/on_alert_test.go b/pkg/integrations/prometheus/on_alert_test.go new file mode 100644 index 0000000000..52b80b5bd9 --- /dev/null +++ b/pkg/integrations/prometheus/on_alert_test.go @@ -0,0 +1,222 @@ +package prometheus + +import ( + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnAlert__Setup(t *testing.T) { + trigger := &OnAlert{} + + t.Run("at least one status is required", func(t *testing.T) { + err := trigger.Setup(core.TriggerContext{ + Configuration: map[string]any{"statuses": []string{}}, + Integration: &contexts.IntegrationContext{}, + Webhook: &contexts.WebhookContext{}, + }) + + require.ErrorContains(t, err, "at least one status") + }) + + t.Run("valid setup requests shared webhook and stores setup metadata", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "webhookBearerToken": "token-1", + }, + } + metadataCtx := &contexts.MetadataContext{} + + err := trigger.Setup(core.TriggerContext{ + Configuration: map[string]any{"statuses": []string{AlertStateFiring}}, + Integration: integrationCtx, + Metadata: metadataCtx, + Webhook: &setupWebhookContext{url: "https://superplane.example.com/api/v1/webhooks/wh_123"}, + }) + + require.NoError(t, err) + require.Len(t, integrationCtx.WebhookRequests, 1) + assert.IsType(t, struct{}{}, integrationCtx.WebhookRequests[0]) + + metadata, ok := metadataCtx.Metadata.(OnAlertMetadata) + require.True(t, ok) + assert.Equal(t, "https://superplane.example.com/api/v1/webhooks/wh_123", metadata.WebhookURL) + assert.True(t, metadata.WebhookAuthEnabled) + }) +} + +func Test__OnAlert__HandleWebhook(t *testing.T) { + trigger := &OnAlert{} + payload := []byte(` + { + "status":"firing", + "receiver":"superplane", + "groupKey":"{}:{alertname=\"HighRequestLatency\"}", + "groupLabels":{"alertname":"HighRequestLatency"}, + "commonLabels":{"alertname":"HighRequestLatency","severity":"critical"}, + "commonAnnotations":{"summary":"API latency above threshold"}, + "externalURL":"http://alertmanager.example.com", + "alerts":[ + { + "status":"firing", + "labels":{"alertname":"HighRequestLatency","instance":"api-1:9090","job":"api"}, + "annotations":{"summary":"API latency above threshold","description":"P95 latency above 500ms"}, + "startsAt":"2026-01-19T12:00:00Z", + "endsAt":"0001-01-01T00:00:00Z", + "generatorURL":"http://prometheus.example.com/graph?g0.expr=...", + "fingerprint":"abc123" + }, + { + "status":"resolved", + "labels":{"alertname":"DiskAlmostFull","instance":"node-1:9100","job":"node"}, + "annotations":{"summary":"Disk recovered"}, + "startsAt":"2026-01-19T10:00:00Z", + "endsAt":"2026-01-19T12:10:00Z", + "generatorURL":"http://prometheus.example.com/graph?g0.expr=...", + "fingerprint":"def456" + } + ] + } + `) + + t.Run("missing bearer auth returns 403", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: payload, + Headers: http.Header{}, + Configuration: map[string]any{"statuses": []string{AlertStateFiring}}, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{ + "webhookBearerToken": "token-1", + }}, + Events: eventsCtx, + }) + + assert.Equal(t, http.StatusForbidden, code) + require.ErrorContains(t, err, "missing bearer authorization") + assert.Len(t, eventsCtx.Payloads, 0) + }) + + t.Run("invalid body returns 400", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: []byte("not-json"), + Headers: http.Header{}, + Configuration: map[string]any{"statuses": []string{AlertStateFiring}}, + Integration: &contexts.IntegrationContext{}, + Events: eventsCtx, + }) + + assert.Equal(t, http.StatusBadRequest, code) + require.ErrorContains(t, err, "failed to parse request body") + assert.Len(t, eventsCtx.Payloads, 0) + }) + + t.Run("status filtered out returns 200 and no events", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: payload, + Headers: http.Header{}, + Configuration: map[string]any{"statuses": []string{AlertStateResolved}, "alertNames": []string{"OnlyOther"}}, + Integration: &contexts.IntegrationContext{}, + Events: eventsCtx, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + assert.Len(t, eventsCtx.Payloads, 0) + }) + + t.Run("webhook sanitizes statuses and alert names at runtime", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: payload, + Headers: http.Header{}, + Configuration: map[string]any{"statuses": []string{" FIRING "}, "alertNames": []string{" HighRequestLatency "}}, + Integration: &contexts.IntegrationContext{}, + Events: eventsCtx, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + require.Len(t, eventsCtx.Payloads, 1) + assert.Equal(t, "HighRequestLatency", eventsCtx.Payloads[0].Data.(map[string]any)["labels"].(map[string]string)["alertname"]) + }) + + t.Run("webhook auth config read errors fail closed", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: payload, + Headers: http.Header{}, + Configuration: map[string]any{"statuses": []string{AlertStateFiring}}, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{ + "webhookBearerToken": 123, + }}, + Events: eventsCtx, + }) + + assert.Equal(t, http.StatusInternalServerError, code) + require.ErrorContains(t, err, "failed to read webhook auth configuration") + assert.Len(t, eventsCtx.Payloads, 0) + }) + + t.Run("valid firing and resolved alerts are emitted with bearer auth", func(t *testing.T) { + eventsCtx := &contexts.EventContext{} + headers := http.Header{} + headers.Set("Authorization", "Bearer token-1") + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: payload, + Headers: headers, + Configuration: map[string]any{"statuses": []string{AlertStateFiring, AlertStateResolved}}, + Integration: &contexts.IntegrationContext{Configuration: map[string]any{ + "webhookBearerToken": "token-1", + }}, + Events: eventsCtx, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + require.Len(t, eventsCtx.Payloads, 2) + assert.Equal(t, PrometheusAlertPayloadType, eventsCtx.Payloads[0].Type) + assert.Equal(t, "HighRequestLatency", eventsCtx.Payloads[0].Data.(map[string]any)["labels"].(map[string]string)["alertname"]) + assert.Equal(t, "resolved", eventsCtx.Payloads[1].Data.(map[string]any)["status"]) + }) +} + +func Test__parseAndValidateOnAlertConfiguration__NormalizesStatuses(t *testing.T) { + config, err := parseAndValidateOnAlertConfiguration(map[string]any{ + "statuses": []string{" firing ", "", "firing", "resolved"}, + }) + + require.NoError(t, err) + assert.Equal(t, []string{AlertStateFiring, AlertStateResolved}, config.Statuses) +} + +type setupWebhookContext struct { + url string +} + +func (s *setupWebhookContext) GetSecret() ([]byte, error) { + return nil, nil +} + +func (s *setupWebhookContext) ResetSecret() ([]byte, []byte, error) { + return nil, nil, nil +} + +func (s *setupWebhookContext) Setup() (string, error) { + return s.url, nil +} + +func (s *setupWebhookContext) GetBaseURL() string { + return "https://superplane.example.com/api/v1" +} diff --git a/pkg/integrations/prometheus/prometheus.go b/pkg/integrations/prometheus/prometheus.go new file mode 100644 index 0000000000..0d3c521d29 --- /dev/null +++ b/pkg/integrations/prometheus/prometheus.go @@ -0,0 +1,223 @@ +package prometheus + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +const ( + AuthTypeNone = "none" + AuthTypeBasic = "basic" + AuthTypeBearer = "bearer" + + AlertStateAny = "any" + AlertStateFiring = "firing" + AlertStateResolved = "resolved" + AlertStatePending = "pending" + AlertStateInactive = "inactive" + + PrometheusAlertPayloadType = "prometheus.alert" +) + +func init() { + registry.RegisterIntegrationWithWebhookHandler("prometheus", &Prometheus{}, &PrometheusWebhookHandler{}) +} + +type Prometheus struct{} + +type Configuration struct { + BaseURL string `json:"baseURL" mapstructure:"baseURL"` + AuthType string `json:"authType" mapstructure:"authType"` + Username string `json:"username,omitempty" mapstructure:"username"` + Password string `json:"password,omitempty" mapstructure:"password"` + BearerToken string `json:"bearerToken,omitempty" mapstructure:"bearerToken"` + WebhookBearerToken string `json:"webhookBearerToken,omitempty" mapstructure:"webhookBearerToken"` +} + +type Metadata struct{} + +func (p *Prometheus) Name() string { + return "prometheus" +} + +func (p *Prometheus) Label() string { + return "Prometheus" +} + +func (p *Prometheus) Icon() string { + return "prometheus" +} + +func (p *Prometheus) Description() string { + return "Monitor alerts from Prometheus and Alertmanager" +} + +func (p *Prometheus) Instructions() string { + return `### Connection + +Configure this integration with: +- **Prometheus Base URL**: URL of your Prometheus server (e.g., ` + "`https://prometheus.example.com`" + `) +- **API Auth**: ` + "`none`" + `, ` + "`basic`" + `, or ` + "`bearer`" + ` for Prometheus API requests +- **Webhook Secret** (recommended): If set, Alertmanager must send ` + "`Authorization: Bearer `" + ` on webhook requests + +### Alertmanager Setup (manual) + +The trigger setup panel in SuperPlane shows the generated webhook URL. +Use the On Alert trigger setup instructions in the workflow sidebar for the exact ` + "`alertmanager.yml`" + ` snippet. + +After editing config, reload Alertmanager (for example ` + "`POST /-/reload`" + ` when lifecycle reload is enabled).` +} + +func (p *Prometheus) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "baseURL", + Label: "Prometheus Base URL", + Type: configuration.FieldTypeString, + Required: true, + Placeholder: "https://prometheus.example.com", + Description: "Base URL for Prometheus HTTP API", + }, + { + Name: "authType", + Label: "API Auth Type", + Type: configuration.FieldTypeSelect, + Required: true, + Default: AuthTypeNone, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "None", Value: AuthTypeNone}, + {Label: "Basic", Value: AuthTypeBasic}, + {Label: "Bearer", Value: AuthTypeBearer}, + }, + }, + }, + }, + { + Name: "username", + Label: "Username", + Type: configuration.FieldTypeString, + Required: false, + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "authType", Values: []string{AuthTypeBasic}}, + }, + }, + { + Name: "password", + Label: "Password", + Type: configuration.FieldTypeString, + Required: false, + Sensitive: true, + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "authType", Values: []string{AuthTypeBasic}}, + }, + }, + { + Name: "bearerToken", + Label: "Bearer Token", + Type: configuration.FieldTypeString, + Required: false, + Sensitive: true, + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "authType", Values: []string{AuthTypeBearer}}, + }, + }, + { + Name: "webhookBearerToken", + Label: "Webhook Secret", + Type: configuration.FieldTypeString, + Required: false, + Sensitive: true, + Description: "Secret required by incoming Alertmanager webhooks. Recommended for production environments.", + }, + } +} + +func (p *Prometheus) Components() []core.Component { + return []core.Component{ + &GetAlert{}, + } +} + +func (p *Prometheus) Triggers() []core.Trigger { + return []core.Trigger{ + &OnAlert{}, + } +} + +func (p *Prometheus) Sync(ctx core.SyncContext) error { + config := Configuration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := validateIntegrationConfiguration(config); err != nil { + return err + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return fmt.Errorf("failed to create Prometheus client: %w", err) + } + + if _, err := client.Query("up"); err != nil { + if _, fallbackErr := client.GetAlertsFromPrometheus(); fallbackErr != nil { + return fmt.Errorf("error validating connection: query failed (%v), alerts failed (%v)", err, fallbackErr) + } + } + + ctx.Integration.SetMetadata(Metadata{}) + ctx.Integration.Ready() + return nil +} + +func validateIntegrationConfiguration(config Configuration) error { + if config.BaseURL == "" { + return fmt.Errorf("baseURL is required") + } + + authType := config.AuthType + switch authType { + case AuthTypeNone: + case AuthTypeBasic: + if config.Username == "" { + return fmt.Errorf("username is required when authType is basic") + } + if config.Password == "" { + return fmt.Errorf("password is required when authType is basic") + } + case AuthTypeBearer: + if config.BearerToken == "" { + return fmt.Errorf("bearerToken is required when authType is bearer") + } + default: + return fmt.Errorf("authType must be one of: none, basic, bearer") + } + + return nil +} + +func (p *Prometheus) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (p *Prometheus) HandleRequest(ctx core.HTTPRequestContext) { + // no-op +} + +func (p *Prometheus) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + return []core.IntegrationResource{}, nil +} + +func (p *Prometheus) Actions() []core.Action { + return []core.Action{} +} + +func (p *Prometheus) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} diff --git a/pkg/integrations/prometheus/prometheus_test.go b/pkg/integrations/prometheus/prometheus_test.go new file mode 100644 index 0000000000..96d37a7d1e --- /dev/null +++ b/pkg/integrations/prometheus/prometheus_test.go @@ -0,0 +1,116 @@ +package prometheus + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Prometheus__Sync(t *testing.T) { + integration := &Prometheus{} + + t.Run("missing baseURL returns error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypeNone, + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + Integration: integrationCtx, + }) + + require.ErrorContains(t, err, "baseURL is required") + }) + + t.Run("missing basic auth values returns error", func(t *testing.T) { + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeBasic, + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + Integration: integrationCtx, + }) + + require.ErrorContains(t, err, "username is required when authType is basic") + }) + + t.Run("successful sync sets ready state", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + {"status":"success","data":{"resultType":"vector","result":[]}} + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeBearer, + "bearerToken": "token-123", + "webhookBearerToken": "wh-token", + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "ready", integrationCtx.State) + require.Len(t, httpCtx.Requests, 1) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/api/v1/query?query=up") + assert.Equal(t, "Bearer token-123", httpCtx.Requests[0].Header.Get("Authorization")) + }) + + t.Run("query fails and alerts fallback succeeds", func(t *testing.T) { + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusBadRequest, + Body: io.NopCloser(strings.NewReader(`{"status":"error","errorType":"bad_data","error":"parse error"}`)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"status":"success","data":{"alerts":[]}}`)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseURL": "https://prometheus.example.com", + "authType": AuthTypeNone, + }, + } + + err := integration.Sync(core.SyncContext{ + Configuration: integrationCtx.Configuration, + HTTP: httpCtx, + Integration: integrationCtx, + }) + + require.NoError(t, err) + assert.Equal(t, "ready", integrationCtx.State) + require.Len(t, httpCtx.Requests, 2) + assert.Contains(t, httpCtx.Requests[0].URL.String(), "/api/v1/query") + assert.Contains(t, httpCtx.Requests[1].URL.String(), "/api/v1/alerts") + }) +} diff --git a/pkg/integrations/prometheus/webhook_handler.go b/pkg/integrations/prometheus/webhook_handler.go new file mode 100644 index 0000000000..94088314c8 --- /dev/null +++ b/pkg/integrations/prometheus/webhook_handler.go @@ -0,0 +1,21 @@ +package prometheus + +import "github.com/superplanehq/superplane/pkg/core" + +type PrometheusWebhookHandler struct{} + +func (h *PrometheusWebhookHandler) CompareConfig(a any, b any) (bool, error) { + return true, nil +} + +func (h *PrometheusWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + return nil, nil +} + +func (h *PrometheusWebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + return nil +} + +func (h *PrometheusWebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} diff --git a/pkg/integrations/prometheus/webhook_handler_test.go b/pkg/integrations/prometheus/webhook_handler_test.go new file mode 100644 index 0000000000..3fd1d89a6f --- /dev/null +++ b/pkg/integrations/prometheus/webhook_handler_test.go @@ -0,0 +1,53 @@ +package prometheus + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +type testWebhookContext struct { + secret []byte + configuration any +} + +func (t *testWebhookContext) GetID() string { return "wh_123" } +func (t *testWebhookContext) GetURL() string { return "https://example.com/webhook" } +func (t *testWebhookContext) GetSecret() ([]byte, error) { return t.secret, nil } +func (t *testWebhookContext) GetMetadata() any { return nil } +func (t *testWebhookContext) GetConfiguration() any { return t.configuration } +func (t *testWebhookContext) SetSecret(secret []byte) error { t.secret = secret; return nil } + +func Test__PrometheusWebhookHandler__CompareConfig(t *testing.T) { + handler := &PrometheusWebhookHandler{} + + equal, err := handler.CompareConfig(struct{}{}, struct{}{}) + require.NoError(t, err) + assert.True(t, equal) + + equal, err = handler.CompareConfig(map[string]any{"a": "b"}, map[string]any{"x": "y"}) + require.NoError(t, err) + assert.True(t, equal) +} + +func Test__PrometheusWebhookHandler__Setup(t *testing.T) { + handler := &PrometheusWebhookHandler{} + webhookCtx := &testWebhookContext{configuration: struct{}{}} + integrationCtx := &contexts.IntegrationContext{} + + _, err := handler.Setup(core.WebhookHandlerContext{ + Webhook: webhookCtx, + Integration: integrationCtx, + }) + require.NoError(t, err) + assert.Empty(t, webhookCtx.secret) +} + +func Test__PrometheusWebhookHandler__Cleanup(t *testing.T) { + handler := &PrometheusWebhookHandler{} + err := handler.Cleanup(core.WebhookHandlerContext{}) + require.NoError(t, err) +} diff --git a/pkg/server/server.go b/pkg/server/server.go index b38e2a295c..2ad5d330be 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -48,6 +48,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/jira" _ "github.com/superplanehq/superplane/pkg/integrations/openai" _ "github.com/superplanehq/superplane/pkg/integrations/pagerduty" + _ "github.com/superplanehq/superplane/pkg/integrations/prometheus" _ "github.com/superplanehq/superplane/pkg/integrations/render" _ "github.com/superplanehq/superplane/pkg/integrations/rootly" _ "github.com/superplanehq/superplane/pkg/integrations/semaphore" diff --git a/web_src/src/assets/icons/integrations/prometheus.svg b/web_src/src/assets/icons/integrations/prometheus.svg new file mode 100644 index 0000000000..5c51f66d90 --- /dev/null +++ b/web_src/src/assets/icons/integrations/prometheus.svg @@ -0,0 +1,50 @@ + + + +image/svg+xml \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 41673f72b5..00a6d89d2c 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -106,6 +106,12 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; +import { + componentMappers as prometheusComponentMappers, + customFieldRenderers as prometheusCustomFieldRenderers, + triggerRenderers as prometheusTriggerRenderers, + eventStateRegistry as prometheusEventStateRegistry, +} from "./prometheus/index"; import { componentMappers as cursorComponentMappers, triggerRenderers as cursorTriggerRenderers, @@ -172,6 +178,7 @@ const appMappers: Record> = { openai: openaiComponentMappers, circleci: circleCIComponentMappers, claude: claudeComponentMappers, + prometheus: prometheusComponentMappers, cursor: cursorComponentMappers, dockerhub: dockerhubComponentMappers, grafana: grafanaComponentMappers, @@ -196,6 +203,7 @@ const appTriggerRenderers: Record> = { openai: openaiTriggerRenderers, circleci: circleCITriggerRenderers, claude: claudeTriggerRenderers, + prometheus: prometheusTriggerRenderers, cursor: cursorTriggerRenderers, dockerhub: dockerhubTriggerRenderers, grafana: grafanaTriggerRenderers, @@ -219,6 +227,7 @@ const appEventStateRegistries: Record circleci: circleCIEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, + prometheus: prometheusEventStateRegistry, cursor: cursorEventStateRegistry, gitlab: gitlabEventStateRegistry, dockerhub: dockerhubEventStateRegistry, @@ -248,6 +257,7 @@ const customFieldRenderers: Record = { const appCustomFieldRenderers: Record> = { github: githubCustomFieldRenderers, + prometheus: prometheusCustomFieldRenderers, dockerhub: dockerhubCustomFieldRenderers, }; diff --git a/web_src/src/pages/workflowv2/mappers/prometheus/base.ts b/web_src/src/pages/workflowv2/mappers/prometheus/base.ts new file mode 100644 index 0000000000..54cabf772a --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/prometheus/base.ts @@ -0,0 +1,151 @@ +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { MetadataItem } from "@/ui/metadataList"; +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; +import prometheusIcon from "@/assets/icons/integrations/prometheus.svg"; +import { getState, getStateMap, getTriggerRenderer } from ".."; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; +import { GetAlertConfiguration, PrometheusAlertPayload } from "./types"; + +export const baseAlertMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + return buildBaseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + }, + + subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) { + return ""; + } + + return formatTimeAgo(new Date(context.execution.createdAt)); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const details: Record = {}; + + if (context.execution.createdAt) { + details["Retrieved At"] = new Date(context.execution.createdAt).toLocaleString(); + } + + if (!outputs || !outputs.default || outputs.default.length === 0) { + return details; + } + + const alert = outputs.default[0].data as PrometheusAlertPayload; + return { + ...details, + ...getDetailsForAlert(alert), + }; + }, +}; + +export function buildBaseProps( + nodes: NodeInfo[], + node: NodeInfo, + componentDefinition: { name: string; label: string; color: string }, + lastExecutions: ExecutionInfo[], +): ComponentBaseProps { + const lastExecution = lastExecutions.length > 0 ? lastExecutions[0] : null; + const componentName = componentDefinition.name || node.componentName || "unknown"; + + return { + iconSrc: prometheusIcon, + iconColor: getColorClass(componentDefinition.color), + collapsedBackground: getBackgroundColorClass(componentDefinition.color), + collapsed: node.isCollapsed, + title: node.name || componentDefinition.label || "Unnamed component", + eventSections: lastExecution ? buildEventSections(nodes, lastExecution, componentName) : undefined, + metadata: getMetadata(node), + includeEmptyState: !lastExecution, + eventStateMap: getStateMap(componentName), + }; +} + +export function getDetailsForAlert(alert: PrometheusAlertPayload): Record { + const details: Record = {}; + + details["Alert Name"] = alert?.labels?.alertname || "-"; + details["State"] = alert?.status || "-"; + + if (alert?.labels?.instance) { + details["Instance"] = alert.labels.instance; + } + + if (alert?.labels?.job) { + details["Job"] = alert.labels.job; + } + + if (alert?.annotations?.summary) { + details["Summary"] = alert.annotations.summary; + } + + if (alert?.annotations?.description) { + details["Description"] = alert.annotations.description; + } + + if (alert?.startsAt) { + details["Starts At"] = new Date(alert.startsAt).toLocaleString(); + } + + if (alert?.endsAt && alert.endsAt !== "0001-01-01T00:00:00Z") { + details["Ends At"] = new Date(alert.endsAt).toLocaleString(); + } + + if (alert?.value) { + details["Value"] = alert.value; + } + + if (alert?.generatorURL) { + details["Generator URL"] = alert.generatorURL; + } + + if (alert?.fingerprint) { + details["Fingerprint"] = alert.fingerprint; + } + + if (alert?.externalURL) { + details["Alertmanager URL"] = alert.externalURL; + } + + return details; +} + +function getMetadata(node: NodeInfo): MetadataItem[] { + const metadata: MetadataItem[] = []; + const configuration = node.configuration as GetAlertConfiguration | undefined; + + if (configuration?.alertName) { + metadata.push({ icon: "bell", label: configuration.alertName }); + } + + if (configuration?.state && configuration.state !== "any") { + metadata.push({ icon: "funnel", label: `State: ${configuration.state}` }); + } + + return metadata.slice(0, 3); +} + +function buildEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: execution.createdAt ? formatTimeAgo(new Date(execution.createdAt)) : "", + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/prometheus/get_alert.ts b/web_src/src/pages/workflowv2/mappers/prometheus/get_alert.ts new file mode 100644 index 0000000000..e581b31c5b --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/prometheus/get_alert.ts @@ -0,0 +1,6 @@ +import { ComponentBaseMapper } from "../types"; +import { baseAlertMapper } from "./base"; + +export const getAlertMapper: ComponentBaseMapper = { + ...baseAlertMapper, +}; diff --git a/web_src/src/pages/workflowv2/mappers/prometheus/index.ts b/web_src/src/pages/workflowv2/mappers/prometheus/index.ts new file mode 100644 index 0000000000..418030ea1a --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/prometheus/index.ts @@ -0,0 +1,20 @@ +import { ComponentBaseMapper, CustomFieldRenderer, EventStateRegistry, TriggerRenderer } from "../types"; +import { getAlertMapper } from "./get_alert"; +import { onAlertCustomFieldRenderer, onAlertTriggerRenderer } from "./on_alert"; +import { buildActionStateRegistry } from "../utils"; + +export const componentMappers: Record = { + getAlert: getAlertMapper, +}; + +export const triggerRenderers: Record = { + onAlert: onAlertTriggerRenderer, +}; + +export const customFieldRenderers: Record = { + onAlert: onAlertCustomFieldRenderer, +}; + +export const eventStateRegistry: Record = { + getAlert: buildActionStateRegistry("retrieved"), +}; diff --git a/web_src/src/pages/workflowv2/mappers/prometheus/on_alert.tsx b/web_src/src/pages/workflowv2/mappers/prometheus/on_alert.tsx new file mode 100644 index 0000000000..8d467e8b0c --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/prometheus/on_alert.tsx @@ -0,0 +1,205 @@ +import { CustomFieldRenderer, NodeInfo, TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { TriggerProps } from "@/ui/trigger"; +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; +import prometheusIcon from "@/assets/icons/integrations/prometheus.svg"; +import { getDetailsForAlert } from "./base"; +import { OnAlertConfiguration, OnAlertMetadata, PrometheusAlertPayload } from "./types"; + +const statusLabels: Record = { + firing: "Firing", + resolved: "Resolved", +}; + +export const onAlertTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as PrometheusAlertPayload; + const title = buildEventTitle(eventData); + const subtitle = buildEventSubtitle(eventData, context.event?.createdAt); + + return { + title, + subtitle, + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as PrometheusAlertPayload; + return getDetailsForAlert(eventData); + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const configuration = node.configuration as OnAlertConfiguration | undefined; + const metadataItems = []; + const metadata = node.metadata as OnAlertMetadata | undefined; + + if (configuration?.statuses && configuration.statuses.length > 0) { + const formattedStatuses = configuration.statuses + .map((status) => statusLabels[status] || status) + .filter((status, index, values) => values.indexOf(status) === index); + + metadataItems.push({ + icon: "funnel", + label: `Statuses: ${formattedStatuses.join(", ")}`, + }); + } + + if (configuration?.alertNames && configuration.alertNames.length > 0) { + const alertNames = configuration.alertNames.filter((value) => value.trim().length > 0); + if (alertNames.length > 0) { + metadataItems.push({ + icon: "bell", + label: + alertNames.length > 3 + ? `Alert Names: ${alertNames.length} selected` + : `Alert Names: ${alertNames.join(", ")}`, + }); + } + } + + if (metadata?.webhookAuthEnabled) { + metadataItems.push({ + icon: "lock", + label: "Webhook Auth: Bearer", + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: prometheusIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems.slice(0, 3), + }; + + if (lastEvent) { + const eventData = lastEvent.data as PrometheusAlertPayload; + props.lastEventData = { + title: buildEventTitle(eventData), + subtitle: buildEventSubtitle(eventData, lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt), + state: "triggered", + eventId: lastEvent.id, + }; + } + + return props; + }, +}; + +export const onAlertCustomFieldRenderer: CustomFieldRenderer = { + render: (node: NodeInfo) => { + const metadata = node.metadata as OnAlertMetadata | undefined; + const webhookUrl = metadata?.webhookUrl || "[URL GENERATED ONCE THE CANVAS IS SAVED]"; + const webhookAuthEnabled = metadata?.webhookAuthEnabled || false; + const alertmanagerSnippet = buildAlertmanagerSnippet(webhookUrl, webhookAuthEnabled); + const authHint = buildAuthHint(webhookAuthEnabled); + + return ( +
    +
    +
    + Alertmanager Webhook Setup +
    +
      +
    1. Save the canvas to generate the webhook URL.
    2. +
    3. Copy the receiver snippet below into your `alertmanager.yml`.
    4. +
    5. Reload Alertmanager config (for example, POST /-/reload when lifecycle reload is enabled).
    6. +
    +

    + Receiver provisioning in upstream Alertmanager is config-based, so SuperPlane does not create receivers + by API. +

    +

    {authHint}

    +
    + Webhook URL +
    +                  {webhookUrl}
    +                
    +
    +
    + alertmanager.yml Snippet +
    +                  {alertmanagerSnippet}
    +                
    +
    +
    +
    + + Reload Alertmanager config + +
    +                    curl -X POST https://alertmanager.example.com/-/reload
    +                  
    +
    +
    +
    +
    +
    +
    + ); + }, +}; + +function buildEventTitle(eventData: PrometheusAlertPayload): string { + const alertName = eventData?.labels?.alertname || "Alert"; + const sourceParts = [eventData?.labels?.instance, eventData?.labels?.job].filter(Boolean); + + if (sourceParts.length > 0) { + return `Alert ${eventData?.status} · ${alertName} · ${sourceParts.join(" · ")}`; + } + + return `Alert ${eventData?.status} · ${alertName}`; +} + +function buildEventSubtitle(eventData: PrometheusAlertPayload, createdAt?: string): string { + const parts: string[] = []; + + const severity = eventData?.labels?.severity; + if (severity) { + parts.push(severity); + } + + if (createdAt) { + parts.push(formatTimeAgo(new Date(createdAt))); + } + + return parts.join(" · "); +} + +function buildAuthHint(webhookAuthEnabled: boolean): string { + if (webhookAuthEnabled) { + return "Use the same value from SuperPlane integration field Webhook Secret in Alertmanager http_config.authorization.credentials."; + } + + return "Webhook bearer auth is disabled, so no auth block is needed in Alertmanager."; +} + +function buildAlertmanagerSnippet(webhookUrl: string, webhookAuthEnabled: boolean): string { + if (webhookAuthEnabled) { + return `receivers: + - name: superplane + webhook_configs: + - url: ${webhookUrl} + send_resolved: true + http_config: + authorization: + type: Bearer + credentials: + +route: + receiver: superplane + # ... other config ...`; + } + + return `receivers: + - name: superplane + webhook_configs: + - url: ${webhookUrl} + send_resolved: true + +route: + receiver: superplane + # ... other config ...`; +} diff --git a/web_src/src/pages/workflowv2/mappers/prometheus/types.ts b/web_src/src/pages/workflowv2/mappers/prometheus/types.ts new file mode 100644 index 0000000000..ba0f4e915d --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/prometheus/types.ts @@ -0,0 +1,31 @@ +export interface PrometheusAlertPayload { + status?: string; + labels?: Record; + annotations?: Record; + startsAt?: string; + endsAt?: string; + value?: string; + generatorURL?: string; + fingerprint?: string; + receiver?: string; + groupKey?: string; + groupLabels?: Record; + commonLabels?: Record; + commonAnnotations?: Record; + externalURL?: string; +} + +export interface OnAlertConfiguration { + statuses?: string[]; + alertNames?: string[]; +} + +export interface OnAlertMetadata { + webhookUrl?: string; + webhookAuthEnabled?: boolean; +} + +export interface GetAlertConfiguration { + alertName?: string; + state?: string; +} diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index c2c4344dae..38d42dd310 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -35,6 +35,7 @@ import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; +import prometheusIcon from "@/assets/icons/integrations/prometheus.svg"; import renderIcon from "@/assets/icons/integrations/render.svg"; import dockerIcon from "@/assets/icons/integrations/docker.svg"; @@ -414,6 +415,7 @@ function CategorySection({ semaphore: SemaphoreLogo, slack: slackIcon, sendgrid: sendgridIcon, + prometheus: prometheusIcon, render: renderIcon, dockerhub: dockerIcon, aws: { @@ -490,6 +492,7 @@ function CategorySection({ semaphore: SemaphoreLogo, slack: slackIcon, sendgrid: sendgridIcon, + prometheus: prometheusIcon, render: renderIcon, dockerhub: dockerIcon, aws: { diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 48d2f7c91d..fbfa0360e4 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -21,6 +21,7 @@ import slackIcon from "@/assets/icons/integrations/slack.svg"; import smtpIcon from "@/assets/icons/integrations/smtp.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; +import prometheusIcon from "@/assets/icons/integrations/prometheus.svg"; import renderIcon from "@/assets/icons/integrations/render.svg"; import dockerIcon from "@/assets/icons/integrations/docker.svg"; @@ -46,6 +47,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { slack: slackIcon, smtp: smtpIcon, sendgrid: sendgridIcon, + prometheus: prometheusIcon, render: renderIcon, dockerhub: dockerIcon, }; @@ -70,6 +72,7 @@ export const APP_LOGO_MAP: Record> = { semaphore: SemaphoreLogo, slack: slackIcon, sendgrid: sendgridIcon, + prometheus: prometheusIcon, render: renderIcon, dockerhub: dockerIcon, aws: { From 3d9dff172274aefcfe3058139a5f104a79ff3370 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Thu, 12 Feb 2026 20:43:02 -0300 Subject: [PATCH 065/160] feat: GitLab triggers for merge requests, milestones, tags, releases and vulnerabilities (#3084) New GitLab triggers for multiple resources: - gitlab.onMergeRequest - for receiving events about merge requests - gitlab.onMilestone - for receiving events about milestones - gitlab.onTag - for receiving events about tags - gitlab.onRelease - for receiving events about releases - gitlab.onVulnerability - for receiving events about vulnerabilities --------- Signed-off-by: Cursor Agent Signed-off-by: Lucas Pinheiro Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitLab.mdx | 391 ++++++++++++++++++ pkg/integrations/gitlab/example.go | 50 +++ .../gitlab/example_data_on_merge_request.json | 77 ++++ .../gitlab/example_data_on_milestone.json | 40 ++ .../gitlab/example_data_on_release.json | 61 +++ .../gitlab/example_data_on_tag.json | 44 ++ .../gitlab/example_data_on_vulnerability.json | 65 +++ pkg/integrations/gitlab/gitlab.go | 5 + pkg/integrations/gitlab/gitlab_test.go | 13 + pkg/integrations/gitlab/hooks.go | 6 + pkg/integrations/gitlab/on_merge_request.go | 174 ++++++++ .../gitlab/on_merge_request_test.go | 111 +++++ pkg/integrations/gitlab/on_milestone.go | 183 ++++++++ pkg/integrations/gitlab/on_milestone_test.go | 70 ++++ pkg/integrations/gitlab/on_release.go | 182 ++++++++ pkg/integrations/gitlab/on_release_test.go | 50 +++ pkg/integrations/gitlab/on_tag.go | 169 ++++++++ pkg/integrations/gitlab/on_tag_test.go | 86 ++++ pkg/integrations/gitlab/on_vulnerability.go | 125 ++++++ .../gitlab/on_vulnerability_test.go | 48 +++ pkg/integrations/gitlab/webhook_handler.go | 4 + .../pages/workflowv2/mappers/gitlab/index.ts | 10 + .../mappers/gitlab/on_merge_request.ts | 115 ++++++ .../workflowv2/mappers/gitlab/on_milestone.ts | 109 +++++ .../workflowv2/mappers/gitlab/on_release.ts | 104 +++++ .../pages/workflowv2/mappers/gitlab/on_tag.ts | 99 +++++ .../mappers/gitlab/on_vulnerability.ts | 90 ++++ 27 files changed, 2481 insertions(+) create mode 100644 pkg/integrations/gitlab/example_data_on_merge_request.json create mode 100644 pkg/integrations/gitlab/example_data_on_milestone.json create mode 100644 pkg/integrations/gitlab/example_data_on_release.json create mode 100644 pkg/integrations/gitlab/example_data_on_tag.json create mode 100644 pkg/integrations/gitlab/example_data_on_vulnerability.json create mode 100644 pkg/integrations/gitlab/on_merge_request.go create mode 100644 pkg/integrations/gitlab/on_merge_request_test.go create mode 100644 pkg/integrations/gitlab/on_milestone.go create mode 100644 pkg/integrations/gitlab/on_milestone_test.go create mode 100644 pkg/integrations/gitlab/on_release.go create mode 100644 pkg/integrations/gitlab/on_release_test.go create mode 100644 pkg/integrations/gitlab/on_tag.go create mode 100644 pkg/integrations/gitlab/on_tag_test.go create mode 100644 pkg/integrations/gitlab/on_vulnerability.go create mode 100644 pkg/integrations/gitlab/on_vulnerability_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_merge_request.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_milestone.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_release.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_tag.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_vulnerability.ts diff --git a/docs/components/GitLab.mdx b/docs/components/GitLab.mdx index 3e56b7ae24..4264d7f665 100644 --- a/docs/components/GitLab.mdx +++ b/docs/components/GitLab.mdx @@ -8,6 +8,11 @@ Manage and react to changes in your GitLab repositories + + + + + import { CardGrid, LinkCard } from "@astrojs/starlight/components"; @@ -126,6 +131,392 @@ This trigger automatically sets up a GitLab webhook when configured. The webhook } ``` + + +## On Merge Request + +The On Merge Request trigger starts a workflow execution when merge request events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which merge request actions to listen for (open, close, merge, etc.). Default: open. + +### Outputs + +- **Default channel**: Emits merge request payload data with action, project, and object attributes + +### Example Data + +```json +{ + "data": { + "assignees": [ + { + "avatar_url": "https://www.gravatar.com/avatar/ab12cd34?s=80\u0026d=identicon", + "email": "jrivera@example.com", + "id": 4, + "name": "Jamie Rivera", + "username": "jrivera" + } + ], + "changes": { + "title": { + "current": "Add merge request trigger", + "previous": "Add trigger" + } + }, + "event_type": "merge_request", + "labels": [ + { + "id": 101, + "title": "backend" + } + ], + "object_attributes": { + "action": "open", + "description": "Adds support for additional GitLab webhook trigger types.", + "id": 93, + "iid": 12, + "state": "opened", + "title": "Add merge request trigger" + }, + "object_kind": "merge_request", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "main", + "description": "Project used to demonstrate merge request webhook payloads.", + "git_http_url": "https://gitlab.example.com/group/example.git", + "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", + "id": 1, + "name": "Example Project", + "namespace": "group", + "path_with_namespace": "group/example", + "visibility_level": 20, + "web_url": "https://gitlab.example.com/group/example" + }, + "repository": { + "description": "Project used to demonstrate merge request webhook payloads.", + "git_http_url": "https://gitlab.example.com/group/example.git", + "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", + "homepage": "https://gitlab.example.com/group/example", + "name": "Example Project", + "url": "ssh://git@gitlab.example.com/group/example.git", + "visibility_level": 20 + }, + "reviewers": [ + { + "avatar_url": "https://www.gravatar.com/avatar/ef56gh78?s=80\u0026d=identicon", + "email": "mlee@example.com", + "id": 6, + "name": "Morgan Lee", + "state": "unreviewed", + "username": "mlee" + } + ], + "user": { + "avatar_url": "https://www.gravatar.com/avatar/1a29da0ccd099482194440fac762f5ccb4ec53227761d1859979367644a889a5?s=80\u0026d=identicon", + "email": "agarcia@example.com", + "id": 1, + "name": "Alex Garcia", + "username": "agarcia" + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.mergeRequest" +} +``` + + + +## On Milestone + +The On Milestone trigger starts a workflow execution when milestone events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which milestone actions to listen for. Default: create. + +### Outputs + +- **Default channel**: Emits milestone payload data with action, project, and object attributes + +### Example Data + +```json +{ + "data": { + "action": "create", + "event_type": "milestone", + "object_attributes": { + "created_at": "2025-06-16 14:10:57 UTC", + "description": "First stable release", + "due_date": "2025-06-30", + "group_id": null, + "id": 61, + "iid": 10, + "project_id": 1, + "start_date": "2025-06-16", + "state": "active", + "title": "v1.0", + "updated_at": "2025-06-16 14:10:57 UTC" + }, + "object_kind": "milestone", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "master", + "description": "Aut reprehenderit ut est.", + "git_http_url": "http://example.com/gitlabhq/gitlab-test.git", + "git_ssh_url": "git@example.com:gitlabhq/gitlab-test.git", + "homepage": "http://example.com/gitlabhq/gitlab-test", + "http_url": "http://example.com/gitlabhq/gitlab-test.git", + "id": 1, + "name": "Gitlab Test", + "namespace": "GitlabHQ", + "path_with_namespace": "gitlabhq/gitlab-test", + "ssh_url": "git@example.com:gitlabhq/gitlab-test.git", + "url": "http://example.com/gitlabhq/gitlab-test.git", + "visibility_level": 20, + "web_url": "http://example.com/gitlabhq/gitlab-test" + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.milestone" +} +``` + + + +## On Release + +The On Release trigger starts a workflow execution when release events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which release actions to listen for. Default: create. + +### Outputs + +- **Default channel**: Emits release payload data with action and release metadata + +### Example Data + +```json +{ + "data": { + "action": "create", + "assets": { + "count": 2, + "links": [ + { + "id": 1, + "link_type": "other", + "name": "Changelog", + "url": "https://example.net/changelog" + } + ], + "sources": [ + { + "format": "zip", + "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.zip" + }, + { + "format": "tar.gz", + "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.tar.gz" + } + ] + }, + "commit": { + "author": { + "email": "user@example.com", + "name": "Example User" + }, + "id": "ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8", + "message": "Release v1.1", + "timestamp": "2020-10-31T14:58:32+11:00", + "title": "Release v1.1", + "url": "https://example.com/gitlab-org/release-webhook-example/-/commit/ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8" + }, + "created_at": "2020-11-02 12:55:12 UTC", + "description": "v1.1 has been released", + "id": 1, + "name": "v1.1", + "object_kind": "release", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "master", + "description": "", + "git_http_url": "https://example.com/gitlab-org/release-webhook-example.git", + "git_ssh_url": "ssh://git@example.com/gitlab-org/release-webhook-example.git", + "id": 1, + "name": "release-webhook-example", + "namespace": "Gitlab", + "path_with_namespace": "gitlab-org/release-webhook-example", + "visibility_level": 0, + "web_url": "https://example.com/gitlab-org/release-webhook-example" + }, + "released_at": "2020-11-02 12:55:12 UTC", + "tag": "v1.1", + "url": "https://example.com/gitlab-org/release-webhook-example/-/releases/v1.1" + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.release" +} +``` + + + +## On Tag + +The On Tag trigger starts a workflow execution when tag push events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Tags** (required): Configure tag filters using predicates. You can match full refs (refs/tags/v1.0.0) or tag names (v1.0.0). + +### Outputs + +- **Default channel**: Emits tag push payload data including ref, before/after SHA, and project information + +### Example Data + +```json +{ + "data": { + "after": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "before": "0000000000000000000000000000000000000000", + "checkout_sha": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "commits": [], + "event_name": "tag_push", + "message": "Tag message", + "object_kind": "tag_push", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "master", + "description": "", + "git_http_url": "http://example.com/jsmith/example.git", + "git_ssh_url": "git@example.com:jsmith/example.git", + "id": 1, + "name": "Example", + "namespace": "Jsmith", + "path_with_namespace": "jsmith/example", + "visibility_level": 0, + "web_url": "http://example.com/jsmith/example" + }, + "push_options": {}, + "ref": "refs/tags/v1.0.0", + "ref_protected": true, + "repository": { + "description": "", + "git_http_url": "http://example.com/jsmith/example.git", + "git_ssh_url": "git@example.com:jsmith/example.git", + "homepage": "http://example.com/jsmith/example", + "name": "Example", + "url": "ssh://git@example.com/jsmith/example.git", + "visibility_level": 0 + }, + "total_commits_count": 0, + "user_email": "john@example.com", + "user_id": 1, + "user_name": "John Smith", + "user_username": "jsmith" + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.tag" +} +``` + + + +## On Vulnerability + +The On Vulnerability trigger starts a workflow execution when vulnerability events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor + +### Outputs + +- **Default channel**: Emits vulnerability payload data including severity, state, location, and linked issues + +### Example Data + +```json +{ + "data": { + "object_attributes": { + "auto_resolved": false, + "confidence": "unknown", + "confidence_overridden": false, + "confirmed_at": "2025-01-08T00:46:14.413Z", + "confirmed_by_id": 1, + "created_at": "2025-01-08T00:46:14.413Z", + "cvss": [ + { + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + "vendor": "NVD" + } + ], + "dismissed_at": null, + "dismissed_by_id": null, + "identifiers": [ + { + "external_id": "29dce398-220a-4315-8c84-16cd8b6d9b05", + "external_type": "gemnasium", + "name": "Gemnasium-29dce398-220a-4315-8c84-16cd8b6d9b05", + "url": "https://gitlab.com/gitlab-org/security-products/gemnasium-db/-/blob/master/gem/rexml/CVE-2024-41123.yml" + }, + { + "external_id": "CVE-2024-41123", + "external_type": "cve", + "name": "CVE-2024-41123", + "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-41123" + } + ], + "issues": [ + { + "created_at": "2025-01-08T00:46:14.429Z", + "title": "REXML ReDoS vulnerability", + "updated_at": "2025-01-08T00:46:14.429Z", + "url": "https://example.com/flightjs/Flight/-/issues/1" + } + ], + "location": { + "dependency": { + "package": { + "name": "rexml" + }, + "version": "3.3.1" + }, + "file": "Gemfile.lock" + }, + "project_id": 1, + "report_type": "dependency_scanning", + "resolved_at": null, + "resolved_by_id": null, + "resolved_on_default_branch": false, + "severity": "high", + "severity_overridden": false, + "state": "confirmed", + "title": "REXML DoS vulnerability", + "updated_at": "2025-01-08T00:46:14.413Z", + "url": "https://example.com/flightjs/Flight/-/security/vulnerabilities/1" + }, + "object_kind": "vulnerability" + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.vulnerability" +} +``` + ## Create Issue diff --git a/pkg/integrations/gitlab/example.go b/pkg/integrations/gitlab/example.go index f328477b63..397360118d 100644 --- a/pkg/integrations/gitlab/example.go +++ b/pkg/integrations/gitlab/example.go @@ -10,9 +10,59 @@ import ( //go:embed example_data_on_issue.json var exampleDataOnIssueBytes []byte +//go:embed example_data_on_merge_request.json +var exampleDataOnMergeRequestBytes []byte + +//go:embed example_data_on_milestone.json +var exampleDataOnMilestoneBytes []byte + +//go:embed example_data_on_release.json +var exampleDataOnReleaseBytes []byte + +//go:embed example_data_on_tag.json +var exampleDataOnTagBytes []byte + +//go:embed example_data_on_vulnerability.json +var exampleDataOnVulnerabilityBytes []byte + var exampleDataOnIssueOnce sync.Once var exampleDataOnIssue map[string]any +var exampleDataOnMergeRequestOnce sync.Once +var exampleDataOnMergeRequest map[string]any + +var exampleDataOnMilestoneOnce sync.Once +var exampleDataOnMilestone map[string]any + +var exampleDataOnReleaseOnce sync.Once +var exampleDataOnRelease map[string]any + +var exampleDataOnTagOnce sync.Once +var exampleDataOnTag map[string]any + +var exampleDataOnVulnerabilityOnce sync.Once +var exampleDataOnVulnerability map[string]any + func (i *OnIssue) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIssueOnce, exampleDataOnIssueBytes, &exampleDataOnIssue) } + +func (m *OnMergeRequest) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnMergeRequestOnce, exampleDataOnMergeRequestBytes, &exampleDataOnMergeRequest) +} + +func (m *OnMilestone) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnMilestoneOnce, exampleDataOnMilestoneBytes, &exampleDataOnMilestone) +} + +func (r *OnRelease) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnReleaseOnce, exampleDataOnReleaseBytes, &exampleDataOnRelease) +} + +func (t *OnTag) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnTagOnce, exampleDataOnTagBytes, &exampleDataOnTag) +} + +func (v *OnVulnerability) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnVulnerabilityOnce, exampleDataOnVulnerabilityBytes, &exampleDataOnVulnerability) +} diff --git a/pkg/integrations/gitlab/example_data_on_merge_request.json b/pkg/integrations/gitlab/example_data_on_merge_request.json new file mode 100644 index 0000000000..81520c7483 --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_merge_request.json @@ -0,0 +1,77 @@ +{ + "data": { + "object_kind": "merge_request", + "event_type": "merge_request", + "user": { + "id": 1, + "name": "Alex Garcia", + "username": "agarcia", + "avatar_url": "https://www.gravatar.com/avatar/1a29da0ccd099482194440fac762f5ccb4ec53227761d1859979367644a889a5?s=80&d=identicon", + "email": "agarcia@example.com" + }, + "project": { + "id": 1, + "name": "Example Project", + "description": "Project used to demonstrate merge request webhook payloads.", + "web_url": "https://gitlab.example.com/group/example", + "avatar_url": null, + "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", + "git_http_url": "https://gitlab.example.com/group/example.git", + "namespace": "group", + "visibility_level": 20, + "path_with_namespace": "group/example", + "default_branch": "main", + "ci_config_path": null + }, + "object_attributes": { + "id": 93, + "iid": 12, + "title": "Add merge request trigger", + "description": "Adds support for additional GitLab webhook trigger types.", + "state": "opened", + "action": "open" + }, + "changes": { + "title": { + "previous": "Add trigger", + "current": "Add merge request trigger" + } + }, + "assignees": [ + { + "id": 4, + "name": "Jamie Rivera", + "username": "jrivera", + "avatar_url": "https://www.gravatar.com/avatar/ab12cd34?s=80&d=identicon", + "email": "jrivera@example.com" + } + ], + "reviewers": [ + { + "id": 6, + "name": "Morgan Lee", + "username": "mlee", + "state": "unreviewed", + "avatar_url": "https://www.gravatar.com/avatar/ef56gh78?s=80&d=identicon", + "email": "mlee@example.com" + } + ], + "labels": [ + { + "id": 101, + "title": "backend" + } + ], + "repository": { + "name": "Example Project", + "url": "ssh://git@gitlab.example.com/group/example.git", + "description": "Project used to demonstrate merge request webhook payloads.", + "homepage": "https://gitlab.example.com/group/example", + "git_http_url": "https://gitlab.example.com/group/example.git", + "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", + "visibility_level": 20 + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.mergeRequest" +} diff --git a/pkg/integrations/gitlab/example_data_on_milestone.json b/pkg/integrations/gitlab/example_data_on_milestone.json new file mode 100644 index 0000000000..97a6acef6b --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_milestone.json @@ -0,0 +1,40 @@ +{ + "data": { + "object_kind": "milestone", + "event_type": "milestone", + "action": "create", + "project": { + "id": 1, + "name": "Gitlab Test", + "description": "Aut reprehenderit ut est.", + "web_url": "http://example.com/gitlabhq/gitlab-test", + "avatar_url": null, + "git_ssh_url": "git@example.com:gitlabhq/gitlab-test.git", + "git_http_url": "http://example.com/gitlabhq/gitlab-test.git", + "namespace": "GitlabHQ", + "visibility_level": 20, + "path_with_namespace": "gitlabhq/gitlab-test", + "default_branch": "master", + "ci_config_path": null, + "homepage": "http://example.com/gitlabhq/gitlab-test", + "url": "http://example.com/gitlabhq/gitlab-test.git", + "ssh_url": "git@example.com:gitlabhq/gitlab-test.git", + "http_url": "http://example.com/gitlabhq/gitlab-test.git" + }, + "object_attributes": { + "id": 61, + "iid": 10, + "title": "v1.0", + "description": "First stable release", + "state": "active", + "created_at": "2025-06-16 14:10:57 UTC", + "updated_at": "2025-06-16 14:10:57 UTC", + "due_date": "2025-06-30", + "start_date": "2025-06-16", + "group_id": null, + "project_id": 1 + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.milestone" +} diff --git a/pkg/integrations/gitlab/example_data_on_release.json b/pkg/integrations/gitlab/example_data_on_release.json new file mode 100644 index 0000000000..9603abed5d --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_release.json @@ -0,0 +1,61 @@ +{ + "data": { + "id": 1, + "created_at": "2020-11-02 12:55:12 UTC", + "description": "v1.1 has been released", + "name": "v1.1", + "released_at": "2020-11-02 12:55:12 UTC", + "tag": "v1.1", + "object_kind": "release", + "action": "create", + "project": { + "id": 1, + "name": "release-webhook-example", + "description": "", + "web_url": "https://example.com/gitlab-org/release-webhook-example", + "avatar_url": null, + "git_ssh_url": "ssh://git@example.com/gitlab-org/release-webhook-example.git", + "git_http_url": "https://example.com/gitlab-org/release-webhook-example.git", + "namespace": "Gitlab", + "visibility_level": 0, + "path_with_namespace": "gitlab-org/release-webhook-example", + "default_branch": "master", + "ci_config_path": null + }, + "url": "https://example.com/gitlab-org/release-webhook-example/-/releases/v1.1", + "assets": { + "count": 2, + "links": [ + { + "id": 1, + "link_type": "other", + "name": "Changelog", + "url": "https://example.net/changelog" + } + ], + "sources": [ + { + "format": "zip", + "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.zip" + }, + { + "format": "tar.gz", + "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.tar.gz" + } + ] + }, + "commit": { + "id": "ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8", + "message": "Release v1.1", + "title": "Release v1.1", + "timestamp": "2020-10-31T14:58:32+11:00", + "url": "https://example.com/gitlab-org/release-webhook-example/-/commit/ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8", + "author": { + "name": "Example User", + "email": "user@example.com" + } + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.release" +} diff --git a/pkg/integrations/gitlab/example_data_on_tag.json b/pkg/integrations/gitlab/example_data_on_tag.json new file mode 100644 index 0000000000..4b3e66b67a --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_tag.json @@ -0,0 +1,44 @@ +{ + "data": { + "object_kind": "tag_push", + "event_name": "tag_push", + "before": "0000000000000000000000000000000000000000", + "after": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "ref": "refs/tags/v1.0.0", + "ref_protected": true, + "checkout_sha": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "message": "Tag message", + "user_id": 1, + "user_name": "John Smith", + "user_username": "jsmith", + "user_email": "john@example.com", + "project": { + "id": 1, + "name": "Example", + "description": "", + "web_url": "http://example.com/jsmith/example", + "avatar_url": null, + "git_ssh_url": "git@example.com:jsmith/example.git", + "git_http_url": "http://example.com/jsmith/example.git", + "namespace": "Jsmith", + "visibility_level": 0, + "path_with_namespace": "jsmith/example", + "default_branch": "master", + "ci_config_path": null + }, + "commits": [], + "total_commits_count": 0, + "push_options": {}, + "repository": { + "name": "Example", + "url": "ssh://git@example.com/jsmith/example.git", + "description": "", + "homepage": "http://example.com/jsmith/example", + "git_http_url": "http://example.com/jsmith/example.git", + "git_ssh_url": "git@example.com:jsmith/example.git", + "visibility_level": 0 + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.tag" +} diff --git a/pkg/integrations/gitlab/example_data_on_vulnerability.json b/pkg/integrations/gitlab/example_data_on_vulnerability.json new file mode 100644 index 0000000000..dcedc55885 --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_vulnerability.json @@ -0,0 +1,65 @@ +{ + "data": { + "object_kind": "vulnerability", + "object_attributes": { + "url": "https://example.com/flightjs/Flight/-/security/vulnerabilities/1", + "title": "REXML DoS vulnerability", + "state": "confirmed", + "project_id": 1, + "location": { + "file": "Gemfile.lock", + "dependency": { + "package": { + "name": "rexml" + }, + "version": "3.3.1" + } + }, + "cvss": [ + { + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + "vendor": "NVD" + } + ], + "severity": "high", + "severity_overridden": false, + "identifiers": [ + { + "name": "Gemnasium-29dce398-220a-4315-8c84-16cd8b6d9b05", + "external_id": "29dce398-220a-4315-8c84-16cd8b6d9b05", + "external_type": "gemnasium", + "url": "https://gitlab.com/gitlab-org/security-products/gemnasium-db/-/blob/master/gem/rexml/CVE-2024-41123.yml" + }, + { + "name": "CVE-2024-41123", + "external_id": "CVE-2024-41123", + "external_type": "cve", + "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-41123" + } + ], + "issues": [ + { + "title": "REXML ReDoS vulnerability", + "url": "https://example.com/flightjs/Flight/-/issues/1", + "created_at": "2025-01-08T00:46:14.429Z", + "updated_at": "2025-01-08T00:46:14.429Z" + } + ], + "report_type": "dependency_scanning", + "confidence": "unknown", + "confidence_overridden": false, + "confirmed_at": "2025-01-08T00:46:14.413Z", + "confirmed_by_id": 1, + "dismissed_at": null, + "dismissed_by_id": null, + "resolved_at": null, + "resolved_by_id": null, + "auto_resolved": false, + "resolved_on_default_branch": false, + "created_at": "2025-01-08T00:46:14.413Z", + "updated_at": "2025-01-08T00:46:14.413Z" + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.vulnerability" +} diff --git a/pkg/integrations/gitlab/gitlab.go b/pkg/integrations/gitlab/gitlab.go index 6b7f64d989..f54d2e94c1 100644 --- a/pkg/integrations/gitlab/gitlab.go +++ b/pkg/integrations/gitlab/gitlab.go @@ -175,6 +175,11 @@ func (g *GitLab) Components() []core.Component { func (g *GitLab) Triggers() []core.Trigger { return []core.Trigger{ &OnIssue{}, + &OnMergeRequest{}, + &OnMilestone{}, + &OnRelease{}, + &OnTag{}, + &OnVulnerability{}, } } diff --git a/pkg/integrations/gitlab/gitlab_test.go b/pkg/integrations/gitlab/gitlab_test.go index 6a5cc6e36a..9fc96fe415 100644 --- a/pkg/integrations/gitlab/gitlab_test.go +++ b/pkg/integrations/gitlab/gitlab_test.go @@ -387,3 +387,16 @@ func Test__GitLab__BaseURLNormalization(t *testing.T) { }) } } + +func gitlabHeaders(event, token string) http.Header { + headers := http.Header{} + if event != "" { + headers.Set("X-Gitlab-Event", event) + } + + if token != "" { + headers.Set("X-Gitlab-Token", token) + } + + return headers +} diff --git a/pkg/integrations/gitlab/hooks.go b/pkg/integrations/gitlab/hooks.go index e060f15131..887681221a 100644 --- a/pkg/integrations/gitlab/hooks.go +++ b/pkg/integrations/gitlab/hooks.go @@ -28,6 +28,8 @@ type Hook struct { WikiPageEvents bool `json:"wiki_page_events"` DeploymentEvents bool `json:"deployment_events"` ReleasesEvents bool `json:"releases_events"` + MilestoneEvents bool `json:"milestone_events"` + VulnerabilityEvents bool `json:"vulnerability_events"` } type HookEvents struct { @@ -41,6 +43,8 @@ type HookEvents struct { WikiPageEvents bool DeploymentEvents bool ReleasesEvents bool + MilestoneEvents bool + VulnerabilityEvents bool } func NewHooksClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*HooksClient, error) { @@ -84,6 +88,8 @@ func (c *HooksClient) CreateHook(projectID string, webhookURL string, secret str "wiki_page_events": events.WikiPageEvents, "deployment_events": events.DeploymentEvents, "releases_events": events.ReleasesEvents, + "milestone_events": events.MilestoneEvents, + "vulnerability_events": events.VulnerabilityEvents, } body, err := json.Marshal(payload) diff --git a/pkg/integrations/gitlab/on_merge_request.go b/pkg/integrations/gitlab/on_merge_request.go new file mode 100644 index 0000000000..c347f2ed2c --- /dev/null +++ b/pkg/integrations/gitlab/on_merge_request.go @@ -0,0 +1,174 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + "slices" + + "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnMergeRequest struct{} + +type OnMergeRequestConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Actions []string `json:"actions" mapstructure:"actions"` +} + +func (m *OnMergeRequest) Name() string { + return "gitlab.onMergeRequest" +} + +func (m *OnMergeRequest) Label() string { + return "On Merge Request" +} + +func (m *OnMergeRequest) Description() string { + return "Listen to merge request events from GitLab" +} + +func (m *OnMergeRequest) Documentation() string { + return `The On Merge Request trigger starts a workflow execution when merge request events occur in a GitLab project. + +## Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which merge request actions to listen for (open, close, merge, etc.). Default: open. + +## Outputs + +- **Default channel**: Emits merge request payload data with action, project, and object attributes` +} + +func (m *OnMergeRequest) Icon() string { + return "gitlab" +} + +func (m *OnMergeRequest) Color() string { + return "orange" +} + +func (m *OnMergeRequest) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "actions", + Label: "Actions", + Type: configuration.FieldTypeMultiSelect, + Required: true, + Default: []string{"open"}, + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Opened", Value: "open"}, + {Label: "Closed", Value: "close"}, + {Label: "Reopened", Value: "reopen"}, + {Label: "Updated", Value: "update"}, + {Label: "Approval Added", Value: "approval"}, + {Label: "Approved", Value: "approved"}, + {Label: "Approval Removed", Value: "unapproval"}, + {Label: "Unapproved", Value: "unapproved"}, + {Label: "Merged", Value: "merge"}, + }, + }, + }, + }, + } +} + +func (m *OnMergeRequest) Setup(ctx core.TriggerContext) error { + var config OnMergeRequestConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "merge_requests", + ProjectID: config.Project, + }) +} + +func (m *OnMergeRequest) Actions() []core.Action { + return []core.Action{} +} + +func (m *OnMergeRequest) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (m *OnMergeRequest) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnMergeRequestConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Merge Request Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + if len(config.Actions) > 0 && !m.whitelistedAction(ctx.Logger, data, config.Actions) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("gitlab.mergeRequest", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (m *OnMergeRequest) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func (m *OnMergeRequest) whitelistedAction(logger *log.Entry, data map[string]any, allowedActions []string) bool { + attrs, ok := data["object_attributes"].(map[string]any) + if !ok { + return false + } + + action, ok := attrs["action"].(string) + if !ok { + return false + } + + if !slices.Contains(allowedActions, action) { + logger.Infof("Action %s is not in the allowed list: %v", action, allowedActions) + return false + } + + return true +} diff --git a/pkg/integrations/gitlab/on_merge_request_test.go b/pkg/integrations/gitlab/on_merge_request_test.go new file mode 100644 index 0000000000..981192fb14 --- /dev/null +++ b/pkg/integrations/gitlab/on_merge_request_test.go @@ -0,0 +1,111 @@ +package gitlab + +import ( + "encoding/json" + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnMergeRequest__HandleWebhook__MissingEventHeader(t *testing.T) { + trigger := &OnMergeRequest{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: http.Header{}, + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusBadRequest, code) + assert.ErrorContains(t, err, "X-Gitlab-Event") +} + +func Test__OnMergeRequest__HandleWebhook__WrongEventType(t *testing.T) { + trigger := &OnMergeRequest{} + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Issue Hook", "token"), + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} + +func Test__OnMergeRequest__HandleWebhook__InvalidToken(t *testing.T) { + trigger := &OnMergeRequest{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Merge Request Hook", "wrong"), + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusForbidden, code) + assert.ErrorContains(t, err, "invalid webhook token") +} + +func Test__OnMergeRequest__HandleWebhook__ActionMatch(t *testing.T) { + trigger := &OnMergeRequest{} + + body, _ := json.Marshal(map[string]any{ + "object_attributes": map[string]any{ + "action": "open", + "title": "New MR", + }, + }) + + events := &contexts.EventContext{} + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Merge Request Hook", "token"), + Body: body, + Configuration: map[string]any{ + "project": "123", + "actions": []string{"open"}, + }, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.mergeRequest", events.Payloads[0].Type) +} + +func Test__OnMergeRequest__HandleWebhook__ActionMismatch(t *testing.T) { + trigger := &OnMergeRequest{} + + body, _ := json.Marshal(map[string]any{ + "object_attributes": map[string]any{ + "action": "merge", + }, + }) + + events := &contexts.EventContext{} + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Merge Request Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} diff --git a/pkg/integrations/gitlab/on_milestone.go b/pkg/integrations/gitlab/on_milestone.go new file mode 100644 index 0000000000..42f433770e --- /dev/null +++ b/pkg/integrations/gitlab/on_milestone.go @@ -0,0 +1,183 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + "slices" + + "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnMilestone struct{} + +type OnMilestoneConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Actions []string `json:"actions" mapstructure:"actions"` +} + +func (m *OnMilestone) Name() string { + return "gitlab.onMilestone" +} + +func (m *OnMilestone) Label() string { + return "On Milestone" +} + +func (m *OnMilestone) Description() string { + return "Listen to milestone events from GitLab" +} + +func (m *OnMilestone) Documentation() string { + return `The On Milestone trigger starts a workflow execution when milestone events occur in a GitLab project. + +## Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which milestone actions to listen for. Default: create. + +## Outputs + +- **Default channel**: Emits milestone payload data with action, project, and object attributes` +} + +func (m *OnMilestone) Icon() string { + return "gitlab" +} + +func (m *OnMilestone) Color() string { + return "orange" +} + +func (m *OnMilestone) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "actions", + Label: "Actions", + Type: configuration.FieldTypeMultiSelect, + Required: true, + Default: []string{"create"}, + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Created", Value: "create"}, + {Label: "Closed", Value: "close"}, + {Label: "Reopened", Value: "reopen"}, + {Label: "Deleted", Value: "delete"}, + }, + }, + }, + }, + } +} + +func (m *OnMilestone) Setup(ctx core.TriggerContext) error { + var config OnMilestoneConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "milestone", + ProjectID: config.Project, + }) +} + +func (m *OnMilestone) Actions() []core.Action { + return []core.Action{} +} + +func (m *OnMilestone) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (m *OnMilestone) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnMilestoneConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Milestone Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + if len(config.Actions) > 0 && !m.whitelistedAction(ctx.Logger, data, config.Actions) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("gitlab.milestone", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (m *OnMilestone) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func (m *OnMilestone) extractAction(data map[string]any) (string, bool) { + action, ok := data["action"].(string) + if ok { + return action, true + } + + attrs, ok := data["object_attributes"].(map[string]any) + if !ok { + return "", false + } + + action, ok = attrs["action"].(string) + if !ok { + return "", false + } + + return action, true +} + +func (m *OnMilestone) whitelistedAction(logger *log.Entry, data map[string]any, allowedActions []string) bool { + action, ok := m.extractAction(data) + if !ok { + return false + } + + if !slices.Contains(allowedActions, action) { + logger.Infof("Action %s is not in the allowed list: %v", action, allowedActions) + return false + } + + return true +} diff --git a/pkg/integrations/gitlab/on_milestone_test.go b/pkg/integrations/gitlab/on_milestone_test.go new file mode 100644 index 0000000000..8ee96be1f9 --- /dev/null +++ b/pkg/integrations/gitlab/on_milestone_test.go @@ -0,0 +1,70 @@ +package gitlab + +import ( + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnMilestone__HandleWebhook__TopLevelAction(t *testing.T) { + trigger := &OnMilestone{} + body := []byte(`{"action":"create","object_attributes":{"title":"v1.0"}}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Milestone Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"create"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.milestone", events.Payloads[0].Type) +} + +func Test__OnMilestone__HandleWebhook__ObjectAttributesAction(t *testing.T) { + trigger := &OnMilestone{} + body := []byte(`{"object_attributes":{"action":"reopen","title":"v1.0"}}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Milestone Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"reopen"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.milestone", events.Payloads[0].Type) +} + +func Test__OnMilestone__HandleWebhook__NonWhitelistedAction(t *testing.T) { + trigger := &OnMilestone{} + body := []byte(`{"action":"close"}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Milestone Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"create"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} diff --git a/pkg/integrations/gitlab/on_release.go b/pkg/integrations/gitlab/on_release.go new file mode 100644 index 0000000000..8c55c494cf --- /dev/null +++ b/pkg/integrations/gitlab/on_release.go @@ -0,0 +1,182 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + "slices" + + "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnRelease struct{} + +type OnReleaseConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Actions []string `json:"actions" mapstructure:"actions"` +} + +func (r *OnRelease) Name() string { + return "gitlab.onRelease" +} + +func (r *OnRelease) Label() string { + return "On Release" +} + +func (r *OnRelease) Description() string { + return "Listen to release events from GitLab" +} + +func (r *OnRelease) Documentation() string { + return `The On Release trigger starts a workflow execution when release events occur in a GitLab project. + +## Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which release actions to listen for. Default: create. + +## Outputs + +- **Default channel**: Emits release payload data with action and release metadata` +} + +func (r *OnRelease) Icon() string { + return "gitlab" +} + +func (r *OnRelease) Color() string { + return "orange" +} + +func (r *OnRelease) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "actions", + Label: "Actions", + Type: configuration.FieldTypeMultiSelect, + Required: true, + Default: []string{"create"}, + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Created", Value: "create"}, + {Label: "Updated", Value: "update"}, + {Label: "Deleted", Value: "delete"}, + }, + }, + }, + }, + } +} + +func (r *OnRelease) Setup(ctx core.TriggerContext) error { + var config OnReleaseConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "releases", + ProjectID: config.Project, + }) +} + +func (r *OnRelease) Actions() []core.Action { + return []core.Action{} +} + +func (r *OnRelease) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (r *OnRelease) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnReleaseConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Release Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + if len(config.Actions) > 0 && !r.whitelistedAction(ctx.Logger, data, config.Actions) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("gitlab.release", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (r *OnRelease) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func (r *OnRelease) extractAction(data map[string]any) (string, bool) { + action, ok := data["action"].(string) + if ok { + return action, true + } + + attrs, ok := data["object_attributes"].(map[string]any) + if !ok { + return "", false + } + + action, ok = attrs["action"].(string) + if !ok { + return "", false + } + + return action, true +} + +func (r *OnRelease) whitelistedAction(logger *log.Entry, data map[string]any, allowedActions []string) bool { + action, ok := r.extractAction(data) + if !ok { + return false + } + + if !slices.Contains(allowedActions, action) { + logger.Infof("Action %s is not in the allowed list: %v", action, allowedActions) + return false + } + + return true +} diff --git a/pkg/integrations/gitlab/on_release_test.go b/pkg/integrations/gitlab/on_release_test.go new file mode 100644 index 0000000000..cbf2f00f84 --- /dev/null +++ b/pkg/integrations/gitlab/on_release_test.go @@ -0,0 +1,50 @@ +package gitlab + +import ( + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnRelease__HandleWebhook__ActionMatch(t *testing.T) { + trigger := &OnRelease{} + body := []byte(`{"action":"create","name":"v1.2.0"}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Release Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"create"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.release", events.Payloads[0].Type) +} + +func Test__OnRelease__HandleWebhook__ActionMismatch(t *testing.T) { + trigger := &OnRelease{} + body := []byte(`{"action":"delete","name":"v1.2.0"}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Release Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "actions": []string{"create"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} diff --git a/pkg/integrations/gitlab/on_tag.go b/pkg/integrations/gitlab/on_tag.go new file mode 100644 index 0000000000..772c143e0e --- /dev/null +++ b/pkg/integrations/gitlab/on_tag.go @@ -0,0 +1,169 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnTag struct{} + +type OnTagConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Tags []configuration.Predicate `json:"tags" mapstructure:"tags"` +} + +func (t *OnTag) Name() string { + return "gitlab.onTag" +} + +func (t *OnTag) Label() string { + return "On Tag" +} + +func (t *OnTag) Description() string { + return "Listen to tag events from GitLab" +} + +func (t *OnTag) Documentation() string { + return `The On Tag trigger starts a workflow execution when tag push events occur in a GitLab project. + +## Configuration + +- **Project** (required): GitLab project to monitor +- **Tags** (required): Configure tag filters using predicates. You can match full refs (refs/tags/v1.0.0) or tag names (v1.0.0). + +## Outputs + +- **Default channel**: Emits tag push payload data including ref, before/after SHA, and project information` +} + +func (t *OnTag) Icon() string { + return "gitlab" +} + +func (t *OnTag) Color() string { + return "orange" +} + +func (t *OnTag) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "tags", + Label: "Tags", + Type: configuration.FieldTypeAnyPredicateList, + Required: true, + Default: []map[string]any{ + { + "type": configuration.PredicateTypeMatches, + "value": ".*", + }, + }, + TypeOptions: &configuration.TypeOptions{ + AnyPredicateList: &configuration.AnyPredicateListTypeOptions{ + Operators: configuration.AllPredicateOperators, + }, + }, + }, + } +} + +func (t *OnTag) Setup(ctx core.TriggerContext) error { + var config OnTagConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "tag_push", + ProjectID: config.Project, + }) +} + +func (t *OnTag) Actions() []core.Action { + return []core.Action{} +} + +func (t *OnTag) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (t *OnTag) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnTagConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Tag Push Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + if len(config.Tags) > 0 && !t.matchesTag(ctx.Logger, data, config.Tags) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("gitlab.tag", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (t *OnTag) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func (t *OnTag) matchesTag(logger *log.Entry, data map[string]any, predicates []configuration.Predicate) bool { + ref, ok := data["ref"].(string) + if !ok { + return false + } + + if configuration.MatchesAnyPredicate(predicates, ref) { + return true + } + + tag := strings.TrimPrefix(ref, "refs/tags/") + if tag != ref && configuration.MatchesAnyPredicate(predicates, tag) { + return true + } + + logger.Infof("Tag %s does not match the allowed predicates: %v", ref, predicates) + return false +} diff --git a/pkg/integrations/gitlab/on_tag_test.go b/pkg/integrations/gitlab/on_tag_test.go new file mode 100644 index 0000000000..25f8ac3708 --- /dev/null +++ b/pkg/integrations/gitlab/on_tag_test.go @@ -0,0 +1,86 @@ +package gitlab + +import ( + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnTag__HandleWebhook__FullRefMatch(t *testing.T) { + trigger := &OnTag{} + body := []byte(`{"ref":"refs/tags/v1.0.0","event_name":"tag_push"}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Tag Push Hook", "token"), + Body: body, + Configuration: map[string]any{ + "project": "123", + "tags": []configuration.Predicate{ + {Type: configuration.PredicateTypeEquals, Value: "refs/tags/v1.0.0"}, + }, + }, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.tag", events.Payloads[0].Type) +} + +func Test__OnTag__HandleWebhook__TagNameMatch(t *testing.T) { + trigger := &OnTag{} + body := []byte(`{"ref":"refs/tags/v1.0.0","event_name":"tag_push"}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Tag Push Hook", "token"), + Body: body, + Configuration: map[string]any{ + "project": "123", + "tags": []configuration.Predicate{ + {Type: configuration.PredicateTypeEquals, Value: "v1.0.0"}, + }, + }, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.tag", events.Payloads[0].Type) +} + +func Test__OnTag__HandleWebhook__TagMismatch(t *testing.T) { + trigger := &OnTag{} + body := []byte(`{"ref":"refs/tags/v2.0.0","event_name":"tag_push"}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Tag Push Hook", "token"), + Body: body, + Configuration: map[string]any{ + "project": "123", + "tags": []configuration.Predicate{ + {Type: configuration.PredicateTypeEquals, Value: "v1.0.0"}, + }, + }, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} diff --git a/pkg/integrations/gitlab/on_vulnerability.go b/pkg/integrations/gitlab/on_vulnerability.go new file mode 100644 index 0000000000..2db1c71100 --- /dev/null +++ b/pkg/integrations/gitlab/on_vulnerability.go @@ -0,0 +1,125 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnVulnerability struct{} + +type OnVulnerabilityConfiguration struct { + Project string `json:"project" mapstructure:"project"` +} + +func (v *OnVulnerability) Name() string { + return "gitlab.onVulnerability" +} + +func (v *OnVulnerability) Label() string { + return "On Vulnerability" +} + +func (v *OnVulnerability) Description() string { + return "Listen to vulnerability events from GitLab" +} + +func (v *OnVulnerability) Documentation() string { + return `The On Vulnerability trigger starts a workflow execution when vulnerability events occur in a GitLab project. + +## Configuration + +- **Project** (required): GitLab project to monitor + +## Outputs + +- **Default channel**: Emits vulnerability payload data including severity, state, location, and linked issues` +} + +func (v *OnVulnerability) Icon() string { + return "gitlab" +} + +func (v *OnVulnerability) Color() string { + return "orange" +} + +func (v *OnVulnerability) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + } +} + +func (v *OnVulnerability) Setup(ctx core.TriggerContext) error { + var config OnVulnerabilityConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "vulnerability", + ProjectID: config.Project, + }) +} + +func (v *OnVulnerability) Actions() []core.Action { + return []core.Action{} +} + +func (v *OnVulnerability) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (v *OnVulnerability) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnVulnerabilityConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Vulnerability Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + if err := ctx.Events.Emit("gitlab.vulnerability", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (v *OnVulnerability) Cleanup(ctx core.TriggerContext) error { + return nil +} diff --git a/pkg/integrations/gitlab/on_vulnerability_test.go b/pkg/integrations/gitlab/on_vulnerability_test.go new file mode 100644 index 0000000000..60bca2b7e4 --- /dev/null +++ b/pkg/integrations/gitlab/on_vulnerability_test.go @@ -0,0 +1,48 @@ +package gitlab + +import ( + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnVulnerability__HandleWebhook__WrongEventType(t *testing.T) { + trigger := &OnVulnerability{} + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Issue Hook", "token"), + Body: []byte(`{}`), + Configuration: map[string]any{"project": "123"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} + +func Test__OnVulnerability__HandleWebhook__EmitsEvent(t *testing.T) { + trigger := &OnVulnerability{} + body := []byte(`{"object_kind":"vulnerability","object_attributes":{"severity":"high"}}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Vulnerability Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123"}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.vulnerability", events.Payloads[0].Type) +} diff --git a/pkg/integrations/gitlab/webhook_handler.go b/pkg/integrations/gitlab/webhook_handler.go index a5055f2ec1..65c140aa29 100644 --- a/pkg/integrations/gitlab/webhook_handler.go +++ b/pkg/integrations/gitlab/webhook_handler.go @@ -68,6 +68,10 @@ func (h *GitLabWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error events.PipelineEvents = true case "releases": events.ReleasesEvents = true + case "milestone": + events.MilestoneEvents = true + case "vulnerability": + events.VulnerabilityEvents = true } hook, err := hooksClient.CreateHook(config.ProjectID, ctx.Webhook.GetURL(), string(secret), events) diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts index 6d78e8e9a4..59c3c9c17d 100644 --- a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts +++ b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts @@ -2,6 +2,11 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { buildActionStateRegistry } from "../utils"; import { createIssueMapper } from "./create_issue"; import { onIssueTriggerRenderer } from "./on_issue"; +import { onMergeRequestTriggerRenderer } from "./on_merge_request"; +import { onMilestoneTriggerRenderer } from "./on_milestone"; +import { onReleaseTriggerRenderer } from "./on_release"; +import { onTagTriggerRenderer } from "./on_tag"; +import { onVulnerabilityTriggerRenderer } from "./on_vulnerability"; export const eventStateRegistry: Record = { createIssue: buildActionStateRegistry("created"), @@ -13,4 +18,9 @@ export const componentMappers: Record = { export const triggerRenderers: Record = { onIssue: onIssueTriggerRenderer, + onMergeRequest: onMergeRequestTriggerRenderer, + onMilestone: onMilestoneTriggerRenderer, + onRelease: onReleaseTriggerRenderer, + onTag: onTagTriggerRenderer, + onVulnerability: onVulnerabilityTriggerRenderer, }; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_merge_request.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_merge_request.ts new file mode 100644 index 0000000000..091000be85 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_merge_request.ts @@ -0,0 +1,115 @@ +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { buildGitlabSubtitle } from "./utils"; +import { GitLabNodeMetadata } from "./types"; + +interface OnMergeRequestConfiguration { + actions: string[]; +} + +interface MergeRequestObjectAttributes { + id?: number; + iid?: number; + title?: string; + description?: string; + state?: string; + action?: string; + url?: string; +} + +interface OnMergeRequestEventData { + object_kind?: string; + event_type?: string; + object_attributes?: MergeRequestObjectAttributes; + user?: { + id: number; + name: string; + username: string; + }; + project?: { + id: number; + name: string; + path_with_namespace: string; + web_url: string; + }; +} + +export const onMergeRequestTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnMergeRequestEventData; + const mr = eventData?.object_attributes; + + return { + title: `#${mr?.iid ?? ""} - ${mr?.title || "Merge Request"}`, + subtitle: buildGitlabSubtitle(mr?.action || "", context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnMergeRequestEventData; + const mr = eventData?.object_attributes; + const values: Record = { + URL: mr?.url || "", + Title: mr?.title || "", + Action: mr?.action || "", + State: mr?.state || "", + IID: mr?.iid?.toString() || "", + }; + + if (eventData?.user?.username) { + values.Author = eventData.user.username; + } + + if (eventData?.project?.path_with_namespace) { + values.Project = eventData.project.path_with_namespace; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const configuration = node.configuration as unknown as OnMergeRequestConfiguration; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + if (configuration?.actions) { + metadataItems.push({ + icon: "funnel", + label: configuration.actions.join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnMergeRequestEventData; + const mr = eventData?.object_attributes; + + props.lastEventData = { + title: `#${mr?.iid ?? ""} - ${mr?.title || "Merge Request"}`, + subtitle: buildGitlabSubtitle(mr?.action || "", lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_milestone.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_milestone.ts new file mode 100644 index 0000000000..c305cb16bd --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_milestone.ts @@ -0,0 +1,109 @@ +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { buildGitlabSubtitle } from "./utils"; +import { GitLabNodeMetadata } from "./types"; +import { stringOrDash } from "../utils"; + +interface OnMilestoneConfiguration { + actions: string[]; +} + +interface MilestoneObjectAttributes { + id?: number; + iid?: number; + title?: string; + description?: string; + state?: string; + due_date?: string; + start_date?: string; +} + +interface OnMilestoneEventData { + object_kind?: string; + event_type?: string; + action?: string; + object_attributes?: MilestoneObjectAttributes; + project?: { + id: number; + name: string; + path_with_namespace: string; + web_url: string; + }; +} + +export const onMilestoneTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnMilestoneEventData; + const milestone = eventData?.object_attributes; + + return { + title: milestone?.title ? milestone.title : "Milestone", + subtitle: buildGitlabSubtitle(eventData?.action || "", context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnMilestoneEventData; + const milestone = eventData?.object_attributes; + const values: Record = { + Title: stringOrDash(milestone?.title), + Action: stringOrDash(eventData?.action), + State: stringOrDash(milestone?.state), + IID: stringOrDash(milestone?.iid?.toString()), + "Start Date": stringOrDash(milestone?.start_date), + "Due Date": stringOrDash(milestone?.due_date), + }; + + if (eventData?.project?.path_with_namespace) { + values.Project = eventData.project.path_with_namespace; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const configuration = node.configuration as unknown as OnMilestoneConfiguration; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + if (configuration?.actions) { + metadataItems.push({ + icon: "funnel", + label: configuration.actions.join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnMilestoneEventData; + const milestone = eventData?.object_attributes; + + props.lastEventData = { + title: milestone?.title ? milestone.title : "Milestone", + subtitle: buildGitlabSubtitle(stringOrDash(eventData?.action), lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_release.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_release.ts new file mode 100644 index 0000000000..53e79a30ab --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_release.ts @@ -0,0 +1,104 @@ +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { buildGitlabSubtitle } from "./utils"; +import { GitLabNodeMetadata } from "./types"; + +interface OnReleaseConfiguration { + actions: string[]; +} + +interface OnReleaseEventData { + id?: number; + object_kind?: string; + action?: string; + name?: string; + tag?: string; + url?: string; + project?: { + id: number; + name: string; + path_with_namespace: string; + web_url: string; + }; +} + +function getReleaseTitle(eventData: OnReleaseEventData): string { + const releaseName = eventData?.name || eventData?.tag || "Release"; + if (eventData?.tag) { + return `${releaseName} (${eventData.tag})`; + } + + return releaseName; +} + +export const onReleaseTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnReleaseEventData; + + return { + title: getReleaseTitle(eventData), + subtitle: buildGitlabSubtitle(eventData?.action || "", context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnReleaseEventData; + const values: Record = { + Name: eventData?.name || "", + Tag: eventData?.tag || "", + Action: eventData?.action || "", + URL: eventData?.url || "", + }; + + if (eventData?.project?.path_with_namespace) { + values.Project = eventData.project.path_with_namespace; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const configuration = node.configuration as unknown as OnReleaseConfiguration; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + if (configuration?.actions) { + metadataItems.push({ + icon: "funnel", + label: configuration.actions.join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnReleaseEventData; + + props.lastEventData = { + title: getReleaseTitle(eventData), + subtitle: buildGitlabSubtitle(eventData?.action || "", lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_tag.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_tag.ts new file mode 100644 index 0000000000..28e5200cfe --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_tag.ts @@ -0,0 +1,99 @@ +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { Predicate, formatPredicate, stringOrDash } from "../utils"; +import { buildGitlabSubtitle } from "./utils"; +import { GitLabNodeMetadata } from "./types"; + +interface OnTagConfiguration { + tags: Predicate[]; +} + +interface OnTagEventData { + object_kind?: string; + event_name?: string; + ref?: string; + before?: string; + after?: string; + user_name?: string; + project?: { + id: number; + name: string; + path_with_namespace: string; + web_url: string; + }; +} + +export const onTagTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnTagEventData; + + return { + title: eventData?.ref ? eventData.ref : "Tag Push", + subtitle: buildGitlabSubtitle(eventData?.event_name || "", context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnTagEventData; + const values: Record = { + Ref: stringOrDash(eventData?.ref), + Before: stringOrDash(eventData?.before), + After: stringOrDash(eventData?.after), + }; + + if (eventData?.user_name) { + values.Author = eventData.user_name; + } + + if (eventData?.project?.path_with_namespace) { + values.Project = eventData.project.path_with_namespace; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const configuration = node.configuration as unknown as OnTagConfiguration; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + if (configuration?.tags?.length > 0) { + metadataItems.push({ + icon: "funnel", + label: configuration.tags.map((tag) => formatPredicate(tag)).join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnTagEventData; + + props.lastEventData = { + title: eventData?.ref ? eventData.ref : "Tag Push", + subtitle: buildGitlabSubtitle(eventData?.event_name || "", lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_vulnerability.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_vulnerability.ts new file mode 100644 index 0000000000..81faafc564 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_vulnerability.ts @@ -0,0 +1,90 @@ +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { buildGitlabSubtitle } from "./utils"; +import { GitLabNodeMetadata } from "./types"; + +interface VulnerabilityObjectAttributes { + title?: string; + severity?: string; + state?: string; + report_type?: string; + url?: string; + project_id?: number; +} + +interface OnVulnerabilityEventData { + object_kind?: string; + object_attributes?: VulnerabilityObjectAttributes; +} + +function getVulnerabilityTitle(attributes: VulnerabilityObjectAttributes | undefined): string { + return attributes?.title || "Vulnerability"; +} + +function getVulnerabilitySubtitle(attributes: VulnerabilityObjectAttributes | undefined): string { + return attributes?.severity || attributes?.state || ""; +} + +export const onVulnerabilityTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnVulnerabilityEventData; + const attrs = eventData?.object_attributes; + + return { + title: getVulnerabilityTitle(attrs), + subtitle: buildGitlabSubtitle(getVulnerabilitySubtitle(attrs), context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnVulnerabilityEventData; + const attrs = eventData?.object_attributes; + + return { + URL: attrs?.url || "", + Title: attrs?.title || "", + Severity: attrs?.severity || "", + State: attrs?.state || "", + "Report Type": attrs?.report_type || "", + "Project ID": attrs?.project_id?.toString() || "", + }; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnVulnerabilityEventData; + const attrs = eventData?.object_attributes; + + props.lastEventData = { + title: getVulnerabilityTitle(attrs), + subtitle: buildGitlabSubtitle(getVulnerabilitySubtitle(attrs), lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; From e87d8cdcd3d82c5fc9bb25009cf6766f4aada0e0 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 11:09:52 +0500 Subject: [PATCH 066/160] fix for Non-constant-time comparison for bearer token authentication Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/grafana/on_alert_firing.go | 3 ++- pkg/workers/contexts/integration_context.go | 15 +++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/pkg/integrations/grafana/on_alert_firing.go b/pkg/integrations/grafana/on_alert_firing.go index b5afdbf85f..6e05464821 100644 --- a/pkg/integrations/grafana/on_alert_firing.go +++ b/pkg/integrations/grafana/on_alert_firing.go @@ -1,6 +1,7 @@ package grafana import ( + "crypto/subtle" "encoding/json" "fmt" "net/http" @@ -109,7 +110,7 @@ func (t *OnAlertFiring) HandleWebhook(ctx core.WebhookRequestContext) (int, erro } token := strings.TrimSpace(strings.TrimPrefix(authHeader, "Bearer ")) - if token != sharedSecret { + if subtle.ConstantTimeCompare([]byte(token), []byte(sharedSecret)) != 1 { return http.StatusUnauthorized, fmt.Errorf("invalid Authorization token") } } diff --git a/pkg/workers/contexts/integration_context.go b/pkg/workers/contexts/integration_context.go index 1579bbeb50..7256d63ae2 100644 --- a/pkg/workers/contexts/integration_context.go +++ b/pkg/workers/contexts/integration_context.go @@ -196,12 +196,6 @@ func (c *IntegrationContext) completeCurrentRequestForInstallation() error { } func (c *IntegrationContext) GetConfig(name string) ([]byte, error) { - config := c.integration.Configuration.Data() - v, ok := config[name] - if !ok { - return nil, fmt.Errorf("config %s not found", name) - } - impl, err := c.registry.GetIntegration(c.integration.AppName) if err != nil { return nil, fmt.Errorf("failed to get integration %s: %w", c.integration.AppName, err) @@ -212,6 +206,15 @@ func (c *IntegrationContext) GetConfig(name string) ([]byte, error) { return nil, fmt.Errorf("failed to find config %s: %w", name, err) } + config := c.integration.Configuration.Data() + v, ok := config[name] + if !ok { + if !configDef.Required { + return nil, nil + } + return nil, fmt.Errorf("config %s not found", name) + } + if configDef.Type != configuration.FieldTypeString && configDef.Type != configuration.FieldTypeSelect { return nil, fmt.Errorf("config %s is not of type: [string, select]", name) } From 195435333149c23be6dc0f5ba10b1fd0df697906 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 11:19:02 +0500 Subject: [PATCH 067/160] GetURl unused method removed (#1945) Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 - pkg/integrations/sendgrid/on_email_event_test.go | 4 ---- pkg/workers/contexts/node_webhook_context.go | 8 -------- test/support/contexts/contexts.go | 6 ------ 4 files changed, 19 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index b5c3fad91a..0e86340fa0 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -134,7 +134,6 @@ type WebhookRequestContext struct { type NodeWebhookContext interface { Setup() (string, error) - GetURL() (string, error) GetSecret() ([]byte, error) ResetSecret() ([]byte, []byte, error) GetBaseURL() string diff --git a/pkg/integrations/sendgrid/on_email_event_test.go b/pkg/integrations/sendgrid/on_email_event_test.go index a18f7aad48..7e0282d9c2 100644 --- a/pkg/integrations/sendgrid/on_email_event_test.go +++ b/pkg/integrations/sendgrid/on_email_event_test.go @@ -171,10 +171,6 @@ func (t *testNodeWebhookContext) Setup() (string, error) { return "", nil } -func (t *testNodeWebhookContext) GetURL() (string, error) { - return "", nil -} - func (t *testNodeWebhookContext) GetSecret() ([]byte, error) { return t.secret, nil } diff --git a/pkg/workers/contexts/node_webhook_context.go b/pkg/workers/contexts/node_webhook_context.go index a1c0e487be..a0b1f78b69 100644 --- a/pkg/workers/contexts/node_webhook_context.go +++ b/pkg/workers/contexts/node_webhook_context.go @@ -66,14 +66,6 @@ func (c *NodeWebhookContext) ResetSecret() ([]byte, []byte, error) { return []byte(plainKey), encryptedKey, nil } -func (c *NodeWebhookContext) GetURL() (string, error) { - if c.node.WebhookID == nil { - return "", fmt.Errorf("node does not have a webhook") - } - - return fmt.Sprintf("%s/webhooks/%s", c.GetBaseURL(), c.node.WebhookID.String()), nil -} - func (c *NodeWebhookContext) Setup() (string, error) { webhook, err := c.findOrCreateWebhook() if err != nil { diff --git a/test/support/contexts/contexts.go b/test/support/contexts/contexts.go index 45d86eb63c..f3e78e48ec 100644 --- a/test/support/contexts/contexts.go +++ b/test/support/contexts/contexts.go @@ -1,7 +1,6 @@ package contexts import ( - "fmt" "net/http" "time" @@ -49,11 +48,6 @@ func (w *WebhookContext) Setup() (string, error) { return id.String(), nil } -func (w *WebhookContext) GetURL() (string, error) { - id := uuid.New() - return fmt.Sprintf("%s/webhooks/%s", w.GetBaseURL(), id.String()), nil -} - func (w *WebhookContext) GetBaseURL() string { return "http://localhost:3000/api/v1" } From 13ca3bc812d41ae0a6bf73b7af1d8f34da8b9b69 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 15:52:13 +0500 Subject: [PATCH 068/160] jfix(grafana): wrap On Alert Firing example payload (#1945) Signed-off-by: Muhammad Fuzail Zubari --- .../grafana/example_data_on_alert_firing.json | 38 ++++++++++--------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/pkg/integrations/grafana/example_data_on_alert_firing.json b/pkg/integrations/grafana/example_data_on_alert_firing.json index c64a1dcae8..9befa78cef 100644 --- a/pkg/integrations/grafana/example_data_on_alert_firing.json +++ b/pkg/integrations/grafana/example_data_on_alert_firing.json @@ -1,21 +1,25 @@ { - "status": "firing", - "title": "High error rate", - "ruleUid": "alert_rule_uid", - "alerts": [ - { - "status": "firing", - "labels": { - "alertname": "HighErrorRate", - "service": "api" - }, - "annotations": { - "summary": "Error rate above threshold" + "data": { + "status": "firing", + "title": "High error rate", + "ruleUid": "alert_rule_uid", + "alerts": [ + { + "status": "firing", + "labels": { + "alertname": "HighErrorRate", + "service": "api" + }, + "annotations": { + "summary": "Error rate above threshold" + } } - } - ], - "commonLabels": { - "alertname": "HighErrorRate" + ], + "commonLabels": { + "alertname": "HighErrorRate" + }, + "externalURL": "http://grafana.local" }, - "externalURL": "http://grafana.local" + "timestamp": "2026-02-12T16:18:03.362582388Z", + "type": "grafana.alert.firing" } From fa6517125238f919a5f4502416863959794f81d8 Mon Sep 17 00:00:00 2001 From: Petar Perovic Date: Fri, 13 Feb 2026 13:03:35 +0100 Subject: [PATCH 069/160] chore: Multi instance integrations UI (#3094) Signed-off-by: Muhammad Fuzail Zubari --- .../assets/icons/integrations/circleci.svg | 14 +- .../src/components/OrganizationMenuButton.tsx | 4 +- web_src/src/components/ui/button.tsx | 2 +- .../settings/IntegrationDetails.tsx | 48 +-- .../organization/settings/Integrations.tsx | 374 +++++++++++------- .../src/pages/organization/settings/index.tsx | 4 +- web_src/src/ui/alert/index.tsx | 2 +- web_src/src/ui/componentHeader/index.tsx | 2 +- .../src/ui/componentSidebar/SettingsTab.tsx | 19 +- 9 files changed, 282 insertions(+), 187 deletions(-) diff --git a/web_src/src/assets/icons/integrations/circleci.svg b/web_src/src/assets/icons/integrations/circleci.svg index c08ac7ffa4..6c5d6cd8c9 100644 --- a/web_src/src/assets/icons/integrations/circleci.svg +++ b/web_src/src/assets/icons/integrations/circleci.svg @@ -1,4 +1,12 @@ - - - + + + + + diff --git a/web_src/src/components/OrganizationMenuButton.tsx b/web_src/src/components/OrganizationMenuButton.tsx index fdcafbfb20..1698006393 100644 --- a/web_src/src/components/OrganizationMenuButton.tsx +++ b/web_src/src/components/OrganizationMenuButton.tsx @@ -3,13 +3,13 @@ import { useAccount } from "@/contexts/AccountContext"; import { useOrganization } from "@/hooks/useOrganizationData"; import { cn } from "@/lib/utils"; import { - AppWindow, ArrowRightLeft, ChevronDown, CircleUser, Key, Lock, LogOut, + Plug, Settings, Shield, User as UserIcon, @@ -112,7 +112,7 @@ export function OrganizationMenuButton({ organizationId, onLogoClick, className { label: "Integrations", href: organizationId ? `/${organizationId}/settings/integrations` : "#", - Icon: AppWindow, + Icon: Plug, permission: { resource: "integrations", action: "read" }, }, { diff --git a/web_src/src/components/ui/button.tsx b/web_src/src/components/ui/button.tsx index 711b4027d4..d319b720d1 100644 --- a/web_src/src/components/ui/button.tsx +++ b/web_src/src/components/ui/button.tsx @@ -20,7 +20,7 @@ const buttonVariants = cva( }, size: { default: "h-8 px-3 py-1.5 has-[>svg]:px-2", - sm: "h-7 rounded-sm gap-1.5 px-3 has-[>svg]:px-2.5", + sm: "h-6 rounded-sm gap-1 px-2 py-1 text-[13px] has-[>svg]:px-2", lg: "h-10 rounded-md px-6 has-[>svg]:px-4", icon: "size-9", "icon-sm": "size-8", diff --git a/web_src/src/pages/organization/settings/IntegrationDetails.tsx b/web_src/src/pages/organization/settings/IntegrationDetails.tsx index a5784e56c0..ecf459ac6e 100644 --- a/web_src/src/pages/organization/settings/IntegrationDetails.tsx +++ b/web_src/src/pages/organization/settings/IntegrationDetails.tsx @@ -1,4 +1,4 @@ -import { ArrowLeft, CircleCheckBig, CircleDashed, CircleX, ExternalLink, Loader2, Trash2 } from "lucide-react"; +import { ArrowLeft, CircleX, ExternalLink, Loader2, Plug, Trash2 } from "lucide-react"; import { useNavigate, useParams } from "react-router-dom"; import { useState, useEffect, useMemo } from "react"; import { @@ -19,6 +19,7 @@ import { IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { IntegrationInstructions } from "@/ui/IntegrationInstructions"; import { PermissionTooltip } from "@/components/PermissionGate"; import { usePermissions } from "@/contexts/PermissionsContext"; +import { Alert, AlertDescription } from "@/ui/alert"; interface IntegrationDetailsProps { organizationId: string; @@ -195,34 +196,29 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) className="w-6 h-6" />
    -

    +

    {integration.metadata?.name || getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || integration.spec?.integrationName}

    - {integration.spec?.integrationName && integration.metadata?.name !== integration.spec?.integrationName && ( -

    - Integration:{" "} - {getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || - integration.spec?.integrationName} -

    - )}
    - {integration.status?.state === "ready" ? ( - - ) : integration.status?.state === "error" ? ( - - ) : ( - - )} + {(integration.status?.state || "unknown").charAt(0).toUpperCase() + @@ -233,14 +229,10 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps)
    {integration.status?.state === "error" && integration.status?.stateDescription && ( -
    -
    - -
    -

    {integration.status.stateDescription}

    -
    -
    -
    + + + {integration.status.stateDescription} + )} {integration?.status?.browserAction && ( diff --git a/web_src/src/pages/organization/settings/Integrations.tsx b/web_src/src/pages/organization/settings/Integrations.tsx index 3814a1320f..cc95e14ab7 100644 --- a/web_src/src/pages/organization/settings/Integrations.tsx +++ b/web_src/src/pages/organization/settings/Integrations.tsx @@ -1,4 +1,4 @@ -import { AppWindow, Loader2 } from "lucide-react"; +import { Loader2, Plug, Search, X } from "lucide-react"; import { useMemo, useState } from "react"; import { useNavigate } from "react-router-dom"; import { @@ -31,6 +31,7 @@ export function Integrations({ organizationId }: IntegrationsProps) { const [integrationName, setIntegrationName] = useState(""); const [configuration, setConfiguration] = useState>({}); const [isModalOpen, setIsModalOpen] = useState(false); + const [filterQuery, setFilterQuery] = useState(""); const canCreateIntegrations = canAct("integrations", "create"); const canUpdateIntegrations = canAct("integrations", "update"); @@ -44,6 +45,90 @@ export function Integrations({ organizationId }: IntegrationsProps) { organizationIntegrations.map((integration) => integration.metadata?.name?.trim()).filter(Boolean) as string[], ); }, [organizationIntegrations]); + const connectedInstancesByProvider = useMemo(() => { + const groups = new Map(); + + organizationIntegrations.forEach((integration) => { + const provider = integration.spec?.integrationName; + if (!provider) return; + const current = groups.get(provider) || []; + current.push(integration); + groups.set(provider, current); + }); + + return groups; + }, [organizationIntegrations]); + const integrationCatalog = useMemo(() => { + const catalogByProvider = new Map< + string, + { + providerName: string; + providerLabel: string; + integrationDef: IntegrationsIntegrationDefinition | null; + instances: typeof organizationIntegrations; + } + >(); + + availableIntegrations.forEach((integrationDef) => { + const providerName = integrationDef.name || ""; + const providerLabel = + integrationDef.label || + getIntegrationTypeDisplayName(undefined, integrationDef.name) || + integrationDef.name || + "Integration"; + const instances = [...(connectedInstancesByProvider.get(providerName) || [])].sort((a, b) => + (a.metadata?.name || providerLabel).localeCompare(b.metadata?.name || providerLabel), + ); + + catalogByProvider.set(providerName, { + providerName, + providerLabel, + integrationDef, + instances, + }); + }); + + connectedInstancesByProvider.forEach((instances, providerName) => { + if (catalogByProvider.has(providerName)) { + return; + } + + const providerLabel = getIntegrationTypeDisplayName(undefined, providerName) || providerName || "Integration"; + const sortedInstances = [...instances].sort((a, b) => + (a.metadata?.name || providerLabel).localeCompare(b.metadata?.name || providerLabel), + ); + + catalogByProvider.set(providerName, { + providerName, + providerLabel, + integrationDef: null, + instances: sortedInstances, + }); + }); + + return [...catalogByProvider.values()].sort((a, b) => a.providerLabel.localeCompare(b.providerLabel)); + }, [availableIntegrations, connectedInstancesByProvider]); + const filteredIntegrationCatalog = useMemo(() => { + const normalizedQuery = filterQuery.trim().toLowerCase(); + if (!normalizedQuery) { + return integrationCatalog; + } + + return integrationCatalog.filter((item) => { + const providerText = [item.providerLabel, item.providerName, item.integrationDef?.description] + .filter(Boolean) + .join(" ") + .toLowerCase(); + + if (providerText.includes(normalizedQuery)) { + return true; + } + + return item.instances.some((instance) => + (instance.metadata?.name || instance.spec?.integrationName || "").toLowerCase().includes(normalizedQuery), + ); + }); + }, [filterQuery, integrationCatalog]); const selectedInstructions = useMemo(() => { return selectedIntegration?.instructions?.trim(); @@ -116,163 +201,158 @@ export function Integrations({ organizationId }: IntegrationsProps) { return (
    - {/* Integrations */} - {organizationIntegrations.length > 0 && ( -
    -

    Connected

    -
    - {[...organizationIntegrations] - .sort((a, b) => - (a.metadata?.name || a.spec?.integrationName || "").localeCompare( - b.metadata?.name || b.spec?.integrationName || "", - ), - ) - .map((integration) => { - const integrationDefinition = availableIntegrations.find( - (a) => a.name === integration.spec?.integrationName, - ); - const integrationLabel = - integrationDefinition?.label || - getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || - integration.spec?.integrationName; - const integrationDisplayName = - integration.metadata?.name || - getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || - integration.spec?.integrationName; - const integrationName = integrationDefinition?.name || integration.spec?.integrationName; - const statusLabel = integration.status?.state - ? integration.status.state.charAt(0).toUpperCase() + integration.status.state.slice(1) - : "Unknown"; - - return ( -
    -
    -
    - -
    -
    -

    - {integrationDisplayName} -

    - {integrationLabel && integrationDisplayName !== integrationLabel ? ( -

    Type: {integrationLabel}

    - ) : null} - {integrationDefinition?.description ? ( -

    - {integrationDefinition.description} -

    - ) : null} -
    +
    + + setFilterQuery(e.target.value)} + placeholder="Filter integrations..." + className="pl-9 pr-9" + /> + {filterQuery.length > 0 ? ( + + ) : null} +
    + {filteredIntegrationCatalog.length === 0 ? ( +
    + +

    + {integrationCatalog.length === 0 ? "No integrations available." : "No integrations match your filter."} +

    +
    + ) : ( +
    + {filteredIntegrationCatalog.map((item) => { + const connectedCount = item.instances.length; + + return ( +
    +
    +
    +
    +
    -
    - - {statusLabel} - - - - +
    +

    {item.providerLabel}

    + {item.integrationDef?.description ? ( +

    + {item.integrationDef?.description} +

    + ) : null}
    - ); - })} -
    -
    - )} - - {/* Available Integrations */} -
    -

    Available

    -
    - {availableIntegrations.length === 0 ? ( -
    - -

    No integrations available.

    -
    - ) : ( -
    - {[...availableIntegrations] - .sort((a, b) => (a.label || a.name || "").localeCompare(b.label || b.name || "")) - .map((app) => { - const appName = app.name; - return ( -
    + + +
    + {item.instances.length > 0 ? ( +
    +

    + {connectedCount} connected instance{connectedCount === 1 ? "" : "s"} +

    + {item.instances.map((integration, index) => { + const integrationDisplayName = + integration.metadata?.name || + getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || + integration.spec?.integrationName; + const statusLabel = integration.status?.state + ? integration.status.state.charAt(0).toUpperCase() + integration.status.state.slice(1) + : "Unknown"; - - - -
    - ); - })} -
    - )} + + + {statusLabel} + +

    + {integrationDisplayName} +

    +
    + + + +
    +
    + ); + })} +
    + ) : null} +
    + ); + })}
    -
    + )} {/* Connect Modal */} {isModalOpen && selectedIntegration && (() => { - const integrationName = selectedIntegration.name; + const integrationTypeName = selectedIntegration.name; return (
    @@ -280,7 +360,7 @@ export function Integrations({ organizationId }: IntegrationsProps) {
    diff --git a/web_src/src/pages/organization/settings/index.tsx b/web_src/src/pages/organization/settings/index.tsx index 76cbfa842d..9f018c3c2c 100644 --- a/web_src/src/pages/organization/settings/index.tsx +++ b/web_src/src/pages/organization/settings/index.tsx @@ -18,13 +18,13 @@ import { SecretDetail } from "./SecretDetail"; import SuperplaneLogo from "@/assets/superplane.svg"; import { cn } from "@/lib/utils"; import { - AppWindow, ArrowRightLeft, CircleUser, Home, Key, Lock, LogOut, + Plug, Settings, Shield, User as UserIcon, @@ -144,7 +144,7 @@ export function OrganizationSettings() { id: "integrations", label: "Integrations", href: `/${organizationId}/settings/integrations`, - Icon: AppWindow, + Icon: Plug, permission: { resource: "integrations", action: "read" }, }, { diff --git a/web_src/src/ui/alert/index.tsx b/web_src/src/ui/alert/index.tsx index 88375b290a..8d8a0b43b8 100644 --- a/web_src/src/ui/alert/index.tsx +++ b/web_src/src/ui/alert/index.tsx @@ -9,7 +9,7 @@ const alertVariants = cva( variants: { variant: { default: "bg-background text-foreground", - destructive: "border-destructive/50 text-destructive dark:border-destructive [&>svg]:text-destructive", + destructive: "bg-white border-red-600 text-red-600 [&>svg]:text-red-600", }, }, defaultVariants: { diff --git a/web_src/src/ui/componentHeader/index.tsx b/web_src/src/ui/componentHeader/index.tsx index a3893e0f57..9921bdbfa3 100644 --- a/web_src/src/ui/componentHeader/index.tsx +++ b/web_src/src/ui/componentHeader/index.tsx @@ -39,7 +39,7 @@ export const ComponentHeader: React.FC = ({
    {iconSrc ? ( - {title} + {title} ) : ( )} diff --git a/web_src/src/ui/componentSidebar/SettingsTab.tsx b/web_src/src/ui/componentSidebar/SettingsTab.tsx index 54d3738c87..aa16fb1adc 100644 --- a/web_src/src/ui/componentSidebar/SettingsTab.tsx +++ b/web_src/src/ui/componentSidebar/SettingsTab.tsx @@ -10,7 +10,7 @@ import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { getIntegrationTypeDisplayName } from "@/utils/integrationDisplayName"; -import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; +import { Select, SelectContent, SelectItem, SelectSeparator, SelectTrigger, SelectValue } from "@/components/ui/select"; import { ConfigurationFieldRenderer } from "@/ui/configurationFieldRenderer"; import { isFieldRequired, isFieldVisible, parseDefaultValues, validateFieldForSubmission } from "@/utils/components"; import { useRealtimeValidation } from "@/hooks/useRealtimeValidation"; @@ -68,6 +68,7 @@ export function SettingsTab({ canCreateIntegrations, canUpdateIntegrations, }: SettingsTabProps) { + const CONNECT_ANOTHER_INSTANCE_VALUE = "__connect_another_instance__"; const isReadOnly = readOnly ?? false; const allowIntegrations = canReadIntegrations ?? true; const allowCreateIntegrations = canCreateIntegrations ?? true; @@ -321,9 +322,16 @@ export function SettingsTab({ Required )} +

    Instance

    {selectedIntegrationFull && ( <> +

    Connection

    {(() => { const hasIntegrationError = selectedIntegrationFull.status?.state === "error" && @@ -363,7 +378,7 @@ export function SettingsTab({ const integrationStatusCard = (
    Date: Fri, 13 Feb 2026 10:18:39 -0300 Subject: [PATCH 070/160] fix: ignore integration with serialization issues when listing them (#3111) Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- pkg/grpc/actions/organizations/list_integrations.go | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pkg/grpc/actions/organizations/list_integrations.go b/pkg/grpc/actions/organizations/list_integrations.go index e579d78bc2..2ccfedbd10 100644 --- a/pkg/grpc/actions/organizations/list_integrations.go +++ b/pkg/grpc/actions/organizations/list_integrations.go @@ -4,6 +4,7 @@ import ( "context" "github.com/google/uuid" + log "github.com/sirupsen/logrus" "github.com/superplanehq/superplane/pkg/models" pb "github.com/superplanehq/superplane/pkg/protos/organizations" "github.com/superplanehq/superplane/pkg/registry" @@ -18,8 +19,14 @@ func ListIntegrations(ctx context.Context, registry *registry.Registry, orgID st protos := []*pb.Integration{} for _, integration := range integrations { proto, err := serializeIntegration(registry, &integration, []models.CanvasNodeReference{}) + + // + // If we have an issue serializing an integration, + // we log the error and continue, to avoid failing the entire request. + // if err != nil { - return nil, err + log.Errorf("failed to serialize integration %s: %v", integration.AppName, err) + continue } protos = append(protos, proto) From fa89ddd0169d58d839e32732c0d444faeeae0d7c Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 20:46:55 +0500 Subject: [PATCH 071/160] fix(grafana): restore integration icons and unblock dev startup (#1945) Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/grafana/grafana.go | 12 ------------ test/support/contexts/contexts.go | 1 + web_src/src/ui/BuildingBlocksSidebar/index.tsx | 3 +++ web_src/src/ui/componentSidebar/integrationIcons.tsx | 3 +++ 4 files changed, 7 insertions(+), 12 deletions(-) diff --git a/pkg/integrations/grafana/grafana.go b/pkg/integrations/grafana/grafana.go index 33d56fa12f..ca2e301564 100644 --- a/pkg/integrations/grafana/grafana.go +++ b/pkg/integrations/grafana/grafana.go @@ -119,18 +119,6 @@ func (g *Grafana) HandleRequest(ctx core.HTTPRequestContext) { ctx.Response.WriteHeader(404) } -func (g *Grafana) CompareWebhookConfig(a, b any) (bool, error) { - return true, nil -} - func (g *Grafana) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { return []core.IntegrationResource{}, nil } - -func (g *Grafana) SetupWebhook(ctx core.SetupWebhookContext) (any, error) { - return nil, nil -} - -func (g *Grafana) CleanupWebhook(ctx core.CleanupWebhookContext) error { - return nil -} diff --git a/test/support/contexts/contexts.go b/test/support/contexts/contexts.go index f3e78e48ec..5fe47703d1 100644 --- a/test/support/contexts/contexts.go +++ b/test/support/contexts/contexts.go @@ -1,6 +1,7 @@ package contexts import ( + "fmt" "net/http" "time" diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index 38d42dd310..a8a7c2bc6a 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -22,6 +22,7 @@ import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; +import grafanaIcon from "@/assets/icons/integrations/grafana.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; import cursorIcon from "@/assets/icons/integrations/cursor.svg"; @@ -405,6 +406,7 @@ function CategorySection({ discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -483,6 +485,7 @@ function CategorySection({ discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index fbfa0360e4..b1e8df8445 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -11,6 +11,7 @@ import daytonaIcon from "@/assets/icons/integrations/daytona.svg"; import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import grafanaIcon from "@/assets/icons/integrations/grafana.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; @@ -36,6 +37,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -62,6 +64,7 @@ export const APP_LOGO_MAP: Record> = { discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, From cc4a0f8c0c1cd039fd603af1c5cd1cdc0f3d1d2f Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 20:57:00 +0500 Subject: [PATCH 072/160] fix(grafana): allow responses up to max size Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/grafana/client.go | 5 ++- pkg/integrations/grafana/client_test.go | 53 +++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) create mode 100644 pkg/integrations/grafana/client_test.go diff --git a/pkg/integrations/grafana/client.go b/pkg/integrations/grafana/client.go index 47d6190a87..14da8760db 100644 --- a/pkg/integrations/grafana/client.go +++ b/pkg/integrations/grafana/client.go @@ -101,13 +101,14 @@ func (c *Client) execRequest(method, path string, body io.Reader, contentType st } defer res.Body.Close() - limitedReader := io.LimitReader(res.Body, maxResponseSize) + // Read one byte beyond the max to detect overflow without rejecting an exact-limit response. + limitedReader := io.LimitReader(res.Body, int64(maxResponseSize)+1) responseBody, err := io.ReadAll(limitedReader) if err != nil { return nil, res.StatusCode, fmt.Errorf("error reading body: %v", err) } - if len(responseBody) >= maxResponseSize { + if len(responseBody) > maxResponseSize { return nil, res.StatusCode, fmt.Errorf("response too large: exceeds maximum size of %d bytes", maxResponseSize) } diff --git a/pkg/integrations/grafana/client_test.go b/pkg/integrations/grafana/client_test.go new file mode 100644 index 0000000000..790e2f0ab6 --- /dev/null +++ b/pkg/integrations/grafana/client_test.go @@ -0,0 +1,53 @@ +package grafana + +import ( + "bytes" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__Client__ExecRequest__AllowsExactMaxSize(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewReader(bytes.Repeat([]byte("a"), maxResponseSize))), + }, + }, + } + + client := &Client{ + BaseURL: "https://grafana.example.com", + http: httpContext, + } + + body, status, err := client.execRequest(http.MethodGet, "/api/health", nil, "") + require.NoError(t, err) + require.Equal(t, http.StatusOK, status) + require.Len(t, body, maxResponseSize) +} + +func Test__Client__ExecRequest__RejectsOverMaxSize(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewReader(bytes.Repeat([]byte("a"), maxResponseSize+1))), + }, + }, + } + + client := &Client{ + BaseURL: "https://grafana.example.com", + http: httpContext, + } + + _, status, err := client.execRequest(http.MethodGet, "/api/health", nil, "") + require.ErrorContains(t, err, "response too large") + require.Equal(t, http.StatusOK, status) +} + From e1b7cd87e133fd530cf6dbf389e02136743224d1 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 21:05:22 +0500 Subject: [PATCH 073/160] fix(grafana): enforce absolute baseURL and prevent runtime request failures (#1945) Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/grafana/client.go | 17 ++++++++++---- pkg/integrations/grafana/client_test.go | 31 +++++++++++++++++++++++++ pkg/integrations/grafana/grafana.go | 17 ++++++++++++-- 3 files changed, 59 insertions(+), 6 deletions(-) diff --git a/pkg/integrations/grafana/client.go b/pkg/integrations/grafana/client.go index 14da8760db..89025224d9 100644 --- a/pkg/integrations/grafana/client.go +++ b/pkg/integrations/grafana/client.go @@ -52,16 +52,25 @@ func readBaseURL(ctx core.IntegrationContext) (string, error) { return "", fmt.Errorf("baseURL is required") } - baseURL := strings.TrimSpace(string(baseURLConfig)) - if baseURL == "" { + baseURLRaw := strings.TrimSpace(string(baseURLConfig)) + if baseURLRaw == "" { return "", fmt.Errorf("baseURL is required") } - if _, err := url.Parse(baseURL); err != nil { + parsed, err := url.Parse(baseURLRaw) + if err != nil { return "", fmt.Errorf("invalid baseURL: %v", err) } - return strings.TrimSuffix(baseURL, "/"), nil + // url.Parse accepts relative URLs (e.g. "grafana.local"), which will fail later in http.NewRequest. + if parsed.Scheme == "" || parsed.Host == "" { + return "", fmt.Errorf("invalid baseURL: must include scheme and host (e.g. https://grafana.example.com)") + } + if parsed.Scheme != "http" && parsed.Scheme != "https" { + return "", fmt.Errorf("invalid baseURL: unsupported scheme %q (expected http or https)", parsed.Scheme) + } + + return strings.TrimSuffix(baseURLRaw, "/"), nil } func readAPIToken(ctx core.IntegrationContext) (string, error) { diff --git a/pkg/integrations/grafana/client_test.go b/pkg/integrations/grafana/client_test.go index 790e2f0ab6..3e43a6cd1d 100644 --- a/pkg/integrations/grafana/client_test.go +++ b/pkg/integrations/grafana/client_test.go @@ -7,9 +7,29 @@ import ( "testing" "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" "github.com/superplanehq/superplane/test/support/contexts" ) +func Test__readBaseURL__RejectsRelativeURL(t *testing.T) { + _, err := readBaseURL(&contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseURL": "grafana.local", + }, + }) + require.ErrorContains(t, err, "must include scheme and host") +} + +func Test__readBaseURL__AcceptsAbsoluteHTTPURL(t *testing.T) { + baseURL, err := readBaseURL(&contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseURL": "https://grafana.example.com/", + }, + }) + require.NoError(t, err) + require.Equal(t, "https://grafana.example.com", baseURL) +} + func Test__Client__ExecRequest__AllowsExactMaxSize(t *testing.T) { httpContext := &contexts.HTTPContext{ Responses: []*http.Response{ @@ -51,3 +71,14 @@ func Test__Client__ExecRequest__RejectsOverMaxSize(t *testing.T) { require.Equal(t, http.StatusOK, status) } +func Test__Grafana__Sync__RejectsRelativeBaseURL(t *testing.T) { + err := (&Grafana{}).Sync(core.SyncContext{ + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "baseURL": "grafana.local", + }, + Metadata: map[string]any{}, + }, + }) + require.ErrorContains(t, err, "must include scheme and host") +} diff --git a/pkg/integrations/grafana/grafana.go b/pkg/integrations/grafana/grafana.go index ca2e301564..822d43919e 100644 --- a/pkg/integrations/grafana/grafana.go +++ b/pkg/integrations/grafana/grafana.go @@ -2,6 +2,7 @@ package grafana import ( "fmt" + "net/url" "strings" "github.com/mitchellh/mapstructure" @@ -99,16 +100,28 @@ func (g *Grafana) Sync(ctx core.SyncContext) error { return fmt.Errorf("error reading baseURL: %v", err) } - if baseURL == nil || strings.TrimSpace(string(baseURL)) == "" { + baseURLRaw := strings.TrimSpace(string(baseURL)) + if baseURL == nil || baseURLRaw == "" { return fmt.Errorf("baseURL is required") } + parsed, err := url.Parse(baseURLRaw) + if err != nil { + return fmt.Errorf("invalid baseURL: %v", err) + } + if parsed.Scheme == "" || parsed.Host == "" { + return fmt.Errorf("invalid baseURL: must include scheme and host (e.g. https://grafana.example.com)") + } + if parsed.Scheme != "http" && parsed.Scheme != "https" { + return fmt.Errorf("invalid baseURL: unsupported scheme %q (expected http or https)", parsed.Scheme) + } + metadata := IntegrationMetadata{} if err := mapstructure.Decode(ctx.Integration.GetMetadata(), &metadata); err != nil { return fmt.Errorf("failed to decode metadata: %v", err) } - metadata.BaseURL = strings.TrimSuffix(strings.TrimSpace(string(baseURL)), "/") + metadata.BaseURL = strings.TrimSuffix(baseURLRaw, "/") ctx.Integration.SetMetadata(metadata) ctx.Integration.Ready() From 679425ad166b6602aba33788c6177c2f4b8dbaf0 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 21:11:46 +0500 Subject: [PATCH 074/160] fix(grafana): align Query Data Source example output with event envelope (#1945) Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/grafana/example_output_query_data_source.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/integrations/grafana/example_output_query_data_source.json b/pkg/integrations/grafana/example_output_query_data_source.json index b8b8b5bb73..1a39f4389e 100644 --- a/pkg/integrations/grafana/example_output_query_data_source.json +++ b/pkg/integrations/grafana/example_output_query_data_source.json @@ -1,4 +1,6 @@ { + "type": "grafana.query.result", + "timestamp": "2026-02-12T16:18:03.362582388Z", "data": { "results": { "A": { From 9c5816202c0b1b7adb0b9abb71b0e525ecc111f5 Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:47:25 +0100 Subject: [PATCH 075/160] Cursor can create changelog now (#2912) Added Cursor skill and command to review key changes merged in `main` in selected time range and create a summary in temp .md file. Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/changelog.md | 10 +++--- .cursor/skills/superplane-changelog/SKILL.md | 34 ++++++++------------ 2 files changed, 19 insertions(+), 25 deletions(-) diff --git a/.cursor/commands/changelog.md b/.cursor/commands/changelog.md index 6f3da21174..05419034b6 100644 --- a/.cursor/commands/changelog.md +++ b/.cursor/commands/changelog.md @@ -6,17 +6,17 @@ description: Generate a "what's new" changelog from merged commits over a time r Generate a changelog of what was merged to `main` for a given time range. The output is a single markdown file in `tmp/` with new integrations, new components and triggers, improvements, security updates, and bug fixes. -**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. Section titles must include numeric counts for both integrations (e.g. "#### 3 new integrations") and components/triggers (e.g. "#### 12 new components and triggers"). +**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. ## Input -- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", "from Feb 3 to now", or "since v0.6.0". If the user does not specify, ask or default to "since Monday (5 days)". +- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", or "from Feb 3 to now". If the user does not specify, ask or default to "since Monday (5 days)". ## Process -1. Determine start and end of the window. When the range is version-based (e.g. "since v0.6.0"), use **date and time**: get the tag's commit timestamp (e.g. `git log -1 --format="%cI" v0.6.0`) so the window is strictly after the tag and same-day commits before the tag are excluded. -2. Run `git log --since="" --format="%h %ad %s" main` with `` as a date (`YYYY-MM-DD`) or as the tag's ISO 8601 timestamp when using a version tag. Use `--date=short` or `--date=iso` as appropriate. Use the result to identify what landed in the window. -3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. Do not include or derive entries from `chore:` commits. Omit bug/security fixes that only affect a component or integration introduced in this changelog window. +1. Determine start and end dates from the user's time range. +2. Run `git log --since="" --format="%h %ad %s" --date=short main` and use it to identify what landed in the window. +3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. 4. Resolve component/trigger names from `pkg/integrations/` and `pkg/components/` (Labels). 5. Write `tmp/changelog__to_.md` following the skill's structure and format rules. diff --git a/.cursor/skills/superplane-changelog/SKILL.md b/.cursor/skills/superplane-changelog/SKILL.md index c55b70e929..83d435012c 100644 --- a/.cursor/skills/superplane-changelog/SKILL.md +++ b/.cursor/skills/superplane-changelog/SKILL.md @@ -11,13 +11,9 @@ Use this skill when the user wants a changelog of what was merged to `main` over ## 1. Determine time range -- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", "since v0.6.0", or a specific date. -- **Compute**: Start and end of the window. Use **date and time** (not just date) when the start is a version tag so that same-day commits before the tag are excluded. - - **Date-only ranges** (e.g. "since Monday", "Feb 3 to now"): Start = date at midnight, end = today. For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. - - **Version-tag ranges** (e.g. "since v0.6.0"): Start = **exact commit timestamp of the tag** (e.g. `git log -1 --format="%cI" v0.6.0` for ISO 8601). End = now or a chosen end date. This ensures commits that landed the same calendar day but before the tag are not included. -- **Git**: Use `git log --since="" --format="%h %ad %s" main` where `` is: - - For date-only: `YYYY-MM-DD` (e.g. `2026-02-03`). Use `--date=short` in the format. - - For version-tag: the tag's commit timestamp in ISO 8601 (e.g. `2026-02-01T15:30:00+00:00`). Use `--date=iso` if you need to compare times. Only include in the changelog items whose commit/merge date is **strictly after** the start when using a tag. +- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", or a specific date. +- **Compute**: Start date (e.g. last Monday = start of week) and end date (today). For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. +- **Git**: Use `git log --since="YYYY-MM-DD" --format="%h %ad %s" --date=short main` to list commits. Only include in the changelog items whose merge/commit date falls **on or after** the start date. --- @@ -25,12 +21,11 @@ Use this skill when the user wants a changelog of what was merged to `main` over From commit messages and dates: -- **Exclude `chore:` commits (mandatory).** Do not list or derive any changelog entry from commits whose subject starts with `chore:` or `chore(...):`. This applies to every section: do not add an improvement, integration, component, or any other bullet based on a chore commit, even if the change seems user-facing (e.g. "Allow running multiple instances" is still a chore and must be omitted). When classifying what landed, skip chore commits entirely; only use `feat:`, `fix:`, `docs:` (for user-facing doc changes), and similar non-chore prefixes as sources for changelog entries. - **New integrations**: Integrations that were **fully added** in the window (base integration registered + first components). Example: SendGrid, Jira. Do **not** count standalone components (e.g. SSH is a component under `pkg/components/ssh`, not an integration). -- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit timestamps (date and time) to exclude anything that landed before the start of the window (e.g. when the window is "since v0.6.0", exclude commits with timestamp on or before the tag's commit time, so same-day commits before the tag are excluded). -- **Improvements**: User-facing product changes from non-chore commits only (e.g. RBAC, Secrets, integrations UX). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). Describe each improvement in user-oriented terms: what the user can do, what problem it solves, or what benefit they get (e.g. "Define roles and permissions and control what each user can do" rather than "Permission guard in the UI"). -- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. Do not list a security fix if it only affects a component or integration that was introduced in this changelog window. -- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. Do not list a fix if it only affects a component or integration that was introduced in this changelog window (e.g. "fix: AWS ECR timestamp" when ECR was added in the same window). +- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit dates to exclude anything that landed before the start date (e.g. Cloudflare DNS records merged Feb 1 are excluded if the window is "Monday Feb 3 to now"). +- **Improvements**: User-facing product changes (RBAC, Secrets, Bounty Program, integrations UX, list vs expression, multiple instances). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). +- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. +- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. To resolve component/trigger names and which integration they belong to, use `pkg/integrations/*/` and `pkg/components/*/`: check each integration's `Components()` and `Triggers()` and their `Label()` / `Name()` (e.g. `aws.go` for AWS, `ecr/`, `codeartifact/`). @@ -42,9 +37,9 @@ To resolve component/trigger names and which integration they belong to, use `pk - **No "We" language**. Use direct, neutral phrasing (e.g. "Role-based access control." not "We introduced role-based access control."). - **New integrations section**: List only integration names, one per line (e.g. SendGrid, Jira). - **New components section**: Use **Integration:** Component1, Component2, ... One line per integration or standalone component (e.g. **GitHub:** Get Release; **SSH:** Run commands on remote hosts). -- **Improvements**: Each bullet is **Bold label**: Short, user-oriented description. Write from the user's perspective: what they can do, what problem it solves, or what benefit they get. Avoid implementation jargon (e.g. "permission guard", "payload limit"); prefer outcome and capability (e.g. "Control what each user can do in your organization", "Secrets can be used in the SSH component to store private keys"). No "We". +- **Improvements**: Each bullet is **Bold label**: Short, user-focused description. No implementation details. No "We". - **Security**: Dedicated section (use only when there are security-related commits). Each bullet: include **CVE identifier** when available (e.g. CVE-2024-12345), then a short description of the vulnerability or fix. If no CVE, use "Fixed: " plus description (e.g. "Fixed: SSRF protection added to HTTP requests"). Same tone as rest of changelog; no em dashes. -- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. Omit fixes that only apply to components or integrations that are new in this changelog. +- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. --- @@ -52,8 +47,6 @@ To resolve component/trigger names and which integration they belong to, use `pk Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) with this structure: -- **Section titles must include the numeric count** for both integrations and components (e.g. "#### 3 new integrations", "#### 12 new components and triggers"). Count each integration as 1. For components and triggers, count each component or trigger as 1 (e.g. one line "**GitHub:** Get Release, On Release" is 2). - ```markdown # SuperPlane Changelog (Feb X-Y, YYYY) @@ -74,6 +67,7 @@ Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) - **RBAC**: Role-based access control. Define roles and permissions... - **Secrets**: Create, update, and delete organization secrets... + - **Bounty Program**: Get paid for building integrations. See [link]... - (etc.) #### Security @@ -89,15 +83,15 @@ Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) ``` - Use three spaces before list bullets for indentation under each #### heading. -- Replace N and M with the actual counts. N = number of integrations listed. M = total number of components and triggers (each component or trigger counts as 1, even when several are on one line). Counts must match the listed items and the chosen time window. +- Counts (N new integrations, M new components and triggers) must match the listed items and the chosen time window. --- ## 5. Workflow summary -1. Ask for or infer time range (e.g. "Monday to now" = 5 days; "since v0.6.0" = after the tag's commit timestamp). -2. Run `git log --since="" --format="%h %ad %s" main` with `` as date (`YYYY-MM-DD`) or as the tag's commit timestamp in ISO 8601 when the range is version-based. Use `--date=short` or `--date=iso` as needed. Optionally inspect merge dates for key PRs. -3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only; never derived from chore commits), security fixes (dedicated section; separate from bug fixes), and bug fixes. Do not include or derive any entry from `chore:` or `chore(...):` commits in any section. +1. Ask for or infer time range (e.g. "Monday to now" = 5 days). +2. Run `git log --since="" --format="%h %ad %s" --date=short main` and optionally inspect merge dates for key PRs. +3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only), security fixes (dedicated section; separate from bug fixes), and bug fixes. 4. Resolve labels from code: `pkg/integrations//` and `pkg/components/` for component/trigger names. 5. Write `tmp/changelog_.md` following the structure and format rules above. 6. Tell the user the file path and that they can review or edit it. From 669f514e5b74188481bdfb900c05f152c93843a6 Mon Sep 17 00:00:00 2001 From: harxhist Date: Fri, 6 Feb 2026 22:13:22 +0530 Subject: [PATCH 076/160] feat: Add Claude integration and Create Message component (#2909) ### Summary This PR introduces the **Anthropic Claude** integration to SuperPlane. It establishes the base connection using an API Key and adds the `createMessage` component. This allows workflows to generate text, summaries, and structured responses using Claude models (e.g., for incident summaries, PR analysis, or documentation). Resolves #2623 Resolves #2624 ### Features - **Base Integration:** Connects to Anthropic via API Key (header authentication). - **Create Message Component:** - Support for user `Prompt` and optional `System message`. - Configurable `Model` selection (e.g., `claude-3-5-sonnet`, `claude-3-5-haiku`). - Adjustable `Max tokens` and `Temperature`. - **UI/UX:** Added integration icon and frontend mappers for the workflow builder. ### Output The `createMessage` component returns a single assistant reply to the output channel containing: - `id`: The unique message identifier. - `content`: The text content of the response. - `usage`: Token statistics (`input_tokens`, `output_tokens`). - `stop_reason`: The reason the generation stopped. ### Implementation Details - **Backend:** Implemented package `claude` to handle requests to `https://api.anthropic.com/v1/`. - **Structure:** Followed standard integration patterns for connection and separate files for components. - **Testing:** Added comprehensive unit tests for execution logic. - **Frontend:** Registered the integration in the sidebar and added necessary mappers in `web_src`. ### Files Changed In total 29 files are added or updated. | **Changes** | **Change Description** | | ----------------------------------------------- | ------------------------------------------------------------ | | `pkg/integrations/claude/claude.go` | Base integration setup and definition | | `pkg/integrations/claude/create_message.go` | `createMessage` component logic and execution | | `pkg/integrations/claude/client.go` | HTTP client for Anthropic API interaction | | `pkg/integrations/claude/*_test.go` | Unit tests for base, client, and component | | `pkg/integrations/claude/example*` | Example output JSON and embedding logic | | `docs/components/` | Added `Claude.mdx` & updated ordering for other components | | `web_src/src/pages/workflowv2/mappers/claude/*` | Frontend mappers for UI configuration | | `web_src/src/ui/...` & `web_src/src/utils/...` | UI registration, sidebar updates, and display name utilities | | `web_src/src/assets/...` | Claude integration SVG icon | | `pkg/server/server.go` | Registered new Claude integration | ### Test Plan - [x] All existing tests pass - [x] New component tests pass (`go test ./pkg/integrations/claude/...`) - [x] Code compiles without errors - [x] Verified API connection with valid Anthropic API Key - [x] Verified UI rendering of the Claude component in the Workflow Builder - [x] Used Claude in the Workflow and recorded video: [Watch Video (Loom)](https://www.loom.com/share/6b7af4edfca749f89a98417a96d58a80) ### Checklist - [x] Signed-off commits - [x] Unit tests - [x] Example output JSON - [x] Documentation in component --------- Signed-off-by: Harsh Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/claude/create_message.go | 262 ++++++++++++++ .../claude/create_message_test.go | 321 ++++++++++++++++++ .../claude/example_output_create_message.json | 31 ++ 3 files changed, 614 insertions(+) create mode 100644 pkg/integrations/claude/create_message.go create mode 100644 pkg/integrations/claude/create_message_test.go create mode 100644 pkg/integrations/claude/example_output_create_message.json diff --git a/pkg/integrations/claude/create_message.go b/pkg/integrations/claude/create_message.go new file mode 100644 index 0000000000..fa2f74004e --- /dev/null +++ b/pkg/integrations/claude/create_message.go @@ -0,0 +1,262 @@ +package claude + +import ( + "fmt" + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "net/http" + "strings" +) + +const MessagePayloadType = "claude.message" + +type CreateMessage struct{} + +type CreateMessageSpec struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + SystemMessage string `json:"systemMessage"` + MaxTokens int `json:"maxTokens"` + Temperature *float64 `json:"temperature"` +} + +type MessagePayload struct { + ID string `json:"id"` + Model string `json:"model"` + Text string `json:"text"` + Usage *MessageUsage `json:"usage,omitempty"` + StopReason string `json:"stopReason,omitempty"` + Response *CreateMessageResponse `json:"response"` +} + +func (c *CreateMessage) Name() string { + return "claude.createMessage" +} + +func (c *CreateMessage) Label() string { + return "Create Message" +} + +func (c *CreateMessage) Description() string { + return "Generate a response using Anthropic's Claude models via the Messages API" +} + +func (c *CreateMessage) Documentation() string { + return `The Create Message component uses Anthropic's Claude models to generate text responses. + +## Use Cases + +- **Summarization**: Generate summaries of incidents or deployments. +- **Code Analysis**: specific code review or PR comments. +- **Content Generation**: Create documentation or drafting communications. + +## Configuration + +- **Model**: The Claude model to use (e.g., claude-3-5-sonnet-latest). +- **Prompt**: The main user message/instruction. +- **System Message**: (Optional) Context to define the assistant's behavior or persona. +- **Max Tokens**: (Optional) Limit the length of the generated response. +- **Temperature**: (Optional) Control randomness (0.0 to 1.0). + +## Output + +Returns a payload containing: +- **text**: The content generated by Claude. +- **usage**: Input and output token counts. +- **stopReason**: Why the generation ended (e.g., "end_turn", "max_tokens"). +- **model**: The specific model version used. + +## Notes + +- Requires a valid Claude API key configured in integration +- Response quality and speed depend on the selected model +- Token usage is tracked and may incur costs based on your Claude plan +` +} + +func (c *CreateMessage) Icon() string { + return "message-square" +} + +func (c *CreateMessage) Color() string { + return "orange" +} + +func (c *CreateMessage) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateMessage) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "model", + Label: "Model", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + Default: "claude-opus-4-6", + Placeholder: "Select a Claude model", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "model", + }, + }, + }, + { + Name: "prompt", + Label: "Prompt", + Type: configuration.FieldTypeText, + Required: true, + Placeholder: "Enter the user prompt", + Description: "The main instruction or question for Claude", + }, + { + Name: "systemMessage", + Label: "System Message", + Type: configuration.FieldTypeText, + Required: false, + Placeholder: "e.g. You are a concise DevOps assistant", + Description: "Optional context to set behavior or persona", + }, + { + Name: "maxTokens", + Label: "Max Tokens", + Type: configuration.FieldTypeNumber, + Required: false, + Default: "4096", + Description: "Maximum number of tokens to generate e.g. Defaults to 4096.", + }, + { + Name: "temperature", + Label: "Temperature", + Type: configuration.FieldTypeNumber, + Required: false, + Default: "1.0", + Description: "Amount of randomness injected into the response (0.0 to 1.0)", + }, + } +} + +func (c *CreateMessage) Setup(ctx core.SetupContext) error { + spec := CreateMessageSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %v", err) + } + + if spec.Model == "" { + return fmt.Errorf("model is required") + } + + if spec.Prompt == "" { + return fmt.Errorf("prompt is required") + } + + return nil +} + +func (c *CreateMessage) Execute(ctx core.ExecutionContext) error { + spec := CreateMessageSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %v", err) + } + + if spec.Model == "" { + return fmt.Errorf("model is required") + } + if spec.Prompt == "" { + return fmt.Errorf("prompt is required") + } + + if spec.MaxTokens == 0 { + spec.MaxTokens = 4096 + } + + if spec.MaxTokens < 1 { + return fmt.Errorf("maxTokens must be at least 1") + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + req := CreateMessageRequest{ + Model: spec.Model, + MaxTokens: spec.MaxTokens, + Messages: []Message{ + { + Role: "user", + Content: spec.Prompt, + }, + }, + Temperature: spec.Temperature, + } + + if spec.SystemMessage != "" { + req.System = spec.SystemMessage + } + + response, err := client.CreateMessage(req) + if err != nil { + return err + } + + text := extractMessageText(response) + + payload := MessagePayload{ + ID: response.ID, + Model: response.Model, + Text: text, + Usage: &response.Usage, + StopReason: response.StopReason, + Response: response, + } + + return ctx.ExecutionState.Emit( + core.DefaultOutputChannel.Name, + MessagePayloadType, + []any{payload}, + ) +} + +func (c *CreateMessage) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateMessage) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateMessage) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateMessage) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *CreateMessage) Cleanup(ctx core.SetupContext) error { + return nil +} + +func extractMessageText(response *CreateMessageResponse) string { + if response == nil || len(response.Content) == 0 { + return "" + } + + var builder strings.Builder + for _, block := range response.Content { + if block.Type == "text" { + if builder.Len() > 0 { + builder.WriteString("\n") + } + builder.WriteString(block.Text) + } + } + return builder.String() +} diff --git a/pkg/integrations/claude/create_message_test.go b/pkg/integrations/claude/create_message_test.go new file mode 100644 index 0000000000..5c78764850 --- /dev/null +++ b/pkg/integrations/claude/create_message_test.go @@ -0,0 +1,321 @@ +package claude + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "testing" + + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +// --- Mocks --- + +// mockExecutionState implements core.ExecutionStateContext +type mockExecutionState struct { + EmittedChannel string + EmittedType string + EmittedPayloads []any + Finished bool + Failed bool + FailReason, FailMsg string +} + +func (m *mockExecutionState) IsFinished() bool { return m.Finished } +func (m *mockExecutionState) SetKV(key, value string) error { return nil } + +func (m *mockExecutionState) Emit(channel, payloadType string, payloads []any) error { + m.EmittedChannel = channel + m.EmittedType = payloadType + m.EmittedPayloads = payloads + return nil +} + +func (m *mockExecutionState) Pass() error { + m.Finished = true + return nil +} + +func (m *mockExecutionState) Fail(reason, message string) error { + m.Finished = true + m.Failed = true + m.FailReason = reason + m.FailMsg = message + return nil +} + +// --- Tests --- + +func TestCreateMessage_Configuration(t *testing.T) { + c := &CreateMessage{} + config := c.Configuration() + + expectedFields := map[string]struct { + Required bool + Type string + }{ + "model": {true, string(configuration.FieldTypeIntegrationResource)}, + "prompt": {true, string(configuration.FieldTypeText)}, + "systemMessage": {false, string(configuration.FieldTypeText)}, + "maxTokens": {false, string(configuration.FieldTypeNumber)}, + "temperature": {false, string(configuration.FieldTypeNumber)}, + } + + for _, field := range config { + expected, ok := expectedFields[field.Name] + if !ok { + t.Errorf("unexpected field: %s", field.Name) + continue + } + if field.Required != expected.Required { + t.Errorf("field %s: expected required %v, got %v", field.Name, expected.Required, field.Required) + } + if string(field.Type) != expected.Type { + t.Errorf("field %s: expected type %s, got %s", field.Name, expected.Type, field.Type) + } + } +} + +func TestCreateMessage_Setup(t *testing.T) { + c := &CreateMessage{} + + tests := []struct { + name string + config map[string]interface{} + expectError bool + }{ + { + name: "Valid Config", + config: map[string]interface{}{ + "model": "claude-3-opus", + "prompt": "Hello", + }, + expectError: false, + }, + { + name: "Missing Model", + config: map[string]interface{}{ + "prompt": "Hello", + }, + expectError: true, + }, + { + name: "Missing Prompt", + config: map[string]interface{}{ + "model": "claude-3-opus", + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := core.SetupContext{ + Configuration: tt.config, + } + err := c.Setup(ctx) + if tt.expectError && err == nil { + t.Error("expected error, got nil") + } + if !tt.expectError && err != nil { + t.Errorf("unexpected error: %v", err) + } + }) + } +} + +func TestCreateMessage_Execute(t *testing.T) { + c := &CreateMessage{} + + // Helper to create a valid response JSON + validResponseJSON := `{ + "id": "msg_01", + "type": "message", + "role": "assistant", + "model": "claude-3-test", + "content": [ + {"type": "text", "text": "Hello world"} + ], + "stop_reason": "end_turn", + "usage": {"input_tokens": 10, "output_tokens": 5} + }` + + tests := []struct { + name string + config map[string]interface{} + mockResponse func(*http.Request) *http.Response + expectError bool + expectEmission bool + validatePayload func(*testing.T, MessagePayload) + }{ + { + name: "Success", + config: map[string]interface{}{ + "model": "claude-3-test", + "prompt": "Say hello", + "maxTokens": 500, + "systemMessage": "You are a bot", + "temperature": 0.7, + }, + mockResponse: func(req *http.Request) *http.Response { + // Verify request body + body, _ := io.ReadAll(req.Body) + var sent CreateMessageRequest + json.Unmarshal(body, &sent) + + if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { + return &http.Response{StatusCode: 400, Body: io.NopCloser(bytes.NewBufferString("bad request body"))} + } + + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(validResponseJSON)), + } + }, + expectError: false, + expectEmission: true, + validatePayload: func(t *testing.T, p MessagePayload) { + if p.Text != "Hello world" { + t.Errorf("expected text 'Hello world', got '%s'", p.Text) + } + if p.ID != "msg_01" { + t.Errorf("expected ID 'msg_01', got '%s'", p.ID) + } + if p.Usage.InputTokens != 10 { + t.Errorf("expected usage 10, got %d", p.Usage.InputTokens) + } + }, + }, + { + name: "Missing Configuration in Execute", + config: map[string]interface{}{ + "model": "", // Invalid + }, + expectError: true, + }, + { + name: "API Error", + config: map[string]interface{}{ + "model": "claude-3-test", + "prompt": "fail me", + }, + mockResponse: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: 500, + Body: io.NopCloser(bytes.NewBufferString(`{"error": {"message": "internal error"}}`)), + } + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Setup Mocks + mockState := &mockExecutionState{} + mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponse} + mockInt := &mockIntegrationContext{ + config: map[string][]byte{ + "apiKey": []byte("test-key"), + }, + } + + ctx := core.ExecutionContext{ + Configuration: tt.config, + ExecutionState: mockState, + HTTP: mockHTTP, + Integration: mockInt, + } + + err := c.Execute(ctx) + + if tt.expectError { + if err == nil { + t.Error("expected error, got nil") + } + return + } + + if err != nil { + t.Errorf("unexpected error: %v", err) + return + } + + if tt.expectEmission { + if mockState.EmittedType != MessagePayloadType { + t.Errorf("expected emitted type %s, got %s", MessagePayloadType, mockState.EmittedType) + } + if len(mockState.EmittedPayloads) != 1 { + t.Errorf("expected 1 payload, got %d", len(mockState.EmittedPayloads)) + } else if tt.validatePayload != nil { + // Convert payload back to struct for validation + // In real execution this is passed as any, here we cast it + payload, ok := mockState.EmittedPayloads[0].(MessagePayload) + if !ok { + t.Error("emitted payload is not MessagePayload") + } else { + tt.validatePayload(t, payload) + } + } + } + }) + } +} + +func TestExtractMessageText(t *testing.T) { + // This function is unexported, but accessible within the package_test if package is same + // If the test file is package claude_test, we can't access it. + // Assuming package claude per user instruction. + + tests := []struct { + name string + response *CreateMessageResponse + expected string + }{ + { + name: "Nil Response", + response: nil, + expected: "", + }, + { + name: "Single Text Block", + response: &CreateMessageResponse{ + Content: []MessageContent{ + {Type: "text", Text: "Hello"}, + }, + }, + expected: "Hello", + }, + { + name: "Multiple Text Blocks", + response: &CreateMessageResponse{ + Content: []MessageContent{ + {Type: "text", Text: "Hello"}, + {Type: "text", Text: "World"}, + }, + }, + expected: "Hello\nWorld", + }, + { + name: "Mixed Blocks (ignore non-text if any)", + response: &CreateMessageResponse{ + Content: []MessageContent{ + {Type: "image", Text: ""}, // hypothetical non-text + {Type: "text", Text: "Real Text"}, + }, + }, + expected: "Real Text", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := extractMessageText(tt.response) + if got != tt.expected { + t.Errorf("expected '%s', got '%s'", tt.expected, got) + } + }) + } +} diff --git a/pkg/integrations/claude/example_output_create_message.json b/pkg/integrations/claude/example_output_create_message.json new file mode 100644 index 0000000000..2fd3c0e729 --- /dev/null +++ b/pkg/integrations/claude/example_output_create_message.json @@ -0,0 +1,31 @@ +{ + "type": "claude.message", + "data": { + "id": "msg_01X9JGt5...123456", + "model": "claude-3-5-sonnet-latest", + "text": "Here is the summary of the deployment logs you requested...", + "stopReason": "end_turn", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + }, + "response": { + "id": "msg_01X9JGt5...123456", + "type": "message", + "role": "assistant", + "model": "claude-3-5-sonnet-latest", + "content": [ + { + "type": "text", + "text": "Here is the summary of the deployment logs you requested..." + } + ], + "stop_reason": "end_turn", + "usage": { + "input_tokens": 45, + "output_tokens": 120 + } + } + }, + "timestamp": "2026-02-06T12:00:00Z" +} \ No newline at end of file From ed74ad38fa18fd4c6391667149c2cca6ee5436bc Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 6 Feb 2026 17:46:51 -0300 Subject: [PATCH 077/160] docs: update component-review.rules.md (#2917) Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/component-review.rules.md | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.cursor/commands/component-review.rules.md b/.cursor/commands/component-review.rules.md index 5e66cc0965..172daafc3a 100644 --- a/.cursor/commands/component-review.rules.md +++ b/.cursor/commands/component-review.rules.md @@ -72,8 +72,8 @@ The output channels shown in the UI include at least one channel (or rely on def ### Webhooks -- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, use `ctx.Integration.RequestWebhook()` and implement a `core.WebhookHandler` with `Setup`, `Cleanup`, and `CompareConfig`, registered via `registry.RegisterIntegrationWithWebhookHandler`. -- We should always aim to share webhooks between components if they use the same underlying event configuration. Use `CompareConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. +- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, `ctx.Integration.RequestWebhook()` and implement the integration's `SetupWebhook`, `CleanupWebhook` +- We should always aim to share webhooks between components, if they use the same underlying event configuration. Use `CompareWebhookConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. ### Triggers @@ -82,13 +82,6 @@ The output channels shown in the UI include at least one channel (or rely on def - `github.onPush`: we select the repository we want to listen to - `pagerduty.onIncident`: we select the service -### Security - -- Components should always execute HTTP requests using the `HTTPContext` available to them, and never use `net/http` to do so -- Components should never import `pkg/models` and interact with database directly, only through methods provided through core interfaces -- HandleWebhook() implementations in components/triggers should always verify that the requests are authenticated using the secret in the webhook -- HandleRequest() implementations in integrations should always verify that the requests are authenticated using the secret in the webhook - ## Code Quality ### Unit testing From 10fa4de474dfa1c6ecc718ad2e5e5dd67019276d Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 6 Feb 2026 19:01:57 -0300 Subject: [PATCH 078/160] docs: include security considerations on component review rules (#2919) Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/component-review.rules.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.cursor/commands/component-review.rules.md b/.cursor/commands/component-review.rules.md index 172daafc3a..233993223b 100644 --- a/.cursor/commands/component-review.rules.md +++ b/.cursor/commands/component-review.rules.md @@ -82,6 +82,13 @@ The output channels shown in the UI include at least one channel (or rely on def - `github.onPush`: we select the repository we want to listen to - `pagerduty.onIncident`: we select the service +### Security + +- Components should always execute HTTP requests using the `HTTPContext` available to them, and never use `net/http` to do so +- Components should never import `pkg/models` and interact with database directly, only through methods provided through core interfaces +- HandleWebhook() implementations in components/triggers should always verify that the requests are authenticated using the secret in the webhook +- HandleRequest() implementations in integrations should always verify that the requests are authenticated using the secret in the webhook + ## Code Quality ### Unit testing From 82fc46ed690ba0fe8f289e8bed927e245ed6350c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sun, 8 Feb 2026 12:18:00 +0100 Subject: [PATCH 079/160] chore: Rename claude.createMessage -> claude.textPrompt (#2948) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For consistency with openai. No one is using this component yet, no need to write data migrations. --------- Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/claude/create_message.go | 262 -------------- .../claude/create_message_test.go | 321 ------------------ .../claude/example_output_create_message.json | 31 -- pkg/integrations/claude/text_prompt_test.go | 128 ++++--- 4 files changed, 80 insertions(+), 662 deletions(-) delete mode 100644 pkg/integrations/claude/create_message.go delete mode 100644 pkg/integrations/claude/create_message_test.go delete mode 100644 pkg/integrations/claude/example_output_create_message.json diff --git a/pkg/integrations/claude/create_message.go b/pkg/integrations/claude/create_message.go deleted file mode 100644 index fa2f74004e..0000000000 --- a/pkg/integrations/claude/create_message.go +++ /dev/null @@ -1,262 +0,0 @@ -package claude - -import ( - "fmt" - "github.com/google/uuid" - "github.com/mitchellh/mapstructure" - "github.com/superplanehq/superplane/pkg/configuration" - "github.com/superplanehq/superplane/pkg/core" - "net/http" - "strings" -) - -const MessagePayloadType = "claude.message" - -type CreateMessage struct{} - -type CreateMessageSpec struct { - Model string `json:"model"` - Prompt string `json:"prompt"` - SystemMessage string `json:"systemMessage"` - MaxTokens int `json:"maxTokens"` - Temperature *float64 `json:"temperature"` -} - -type MessagePayload struct { - ID string `json:"id"` - Model string `json:"model"` - Text string `json:"text"` - Usage *MessageUsage `json:"usage,omitempty"` - StopReason string `json:"stopReason,omitempty"` - Response *CreateMessageResponse `json:"response"` -} - -func (c *CreateMessage) Name() string { - return "claude.createMessage" -} - -func (c *CreateMessage) Label() string { - return "Create Message" -} - -func (c *CreateMessage) Description() string { - return "Generate a response using Anthropic's Claude models via the Messages API" -} - -func (c *CreateMessage) Documentation() string { - return `The Create Message component uses Anthropic's Claude models to generate text responses. - -## Use Cases - -- **Summarization**: Generate summaries of incidents or deployments. -- **Code Analysis**: specific code review or PR comments. -- **Content Generation**: Create documentation or drafting communications. - -## Configuration - -- **Model**: The Claude model to use (e.g., claude-3-5-sonnet-latest). -- **Prompt**: The main user message/instruction. -- **System Message**: (Optional) Context to define the assistant's behavior or persona. -- **Max Tokens**: (Optional) Limit the length of the generated response. -- **Temperature**: (Optional) Control randomness (0.0 to 1.0). - -## Output - -Returns a payload containing: -- **text**: The content generated by Claude. -- **usage**: Input and output token counts. -- **stopReason**: Why the generation ended (e.g., "end_turn", "max_tokens"). -- **model**: The specific model version used. - -## Notes - -- Requires a valid Claude API key configured in integration -- Response quality and speed depend on the selected model -- Token usage is tracked and may incur costs based on your Claude plan -` -} - -func (c *CreateMessage) Icon() string { - return "message-square" -} - -func (c *CreateMessage) Color() string { - return "orange" -} - -func (c *CreateMessage) OutputChannels(configuration any) []core.OutputChannel { - return []core.OutputChannel{core.DefaultOutputChannel} -} - -func (c *CreateMessage) Configuration() []configuration.Field { - return []configuration.Field{ - { - Name: "model", - Label: "Model", - Type: configuration.FieldTypeIntegrationResource, - Required: true, - Default: "claude-opus-4-6", - Placeholder: "Select a Claude model", - TypeOptions: &configuration.TypeOptions{ - Resource: &configuration.ResourceTypeOptions{ - Type: "model", - }, - }, - }, - { - Name: "prompt", - Label: "Prompt", - Type: configuration.FieldTypeText, - Required: true, - Placeholder: "Enter the user prompt", - Description: "The main instruction or question for Claude", - }, - { - Name: "systemMessage", - Label: "System Message", - Type: configuration.FieldTypeText, - Required: false, - Placeholder: "e.g. You are a concise DevOps assistant", - Description: "Optional context to set behavior or persona", - }, - { - Name: "maxTokens", - Label: "Max Tokens", - Type: configuration.FieldTypeNumber, - Required: false, - Default: "4096", - Description: "Maximum number of tokens to generate e.g. Defaults to 4096.", - }, - { - Name: "temperature", - Label: "Temperature", - Type: configuration.FieldTypeNumber, - Required: false, - Default: "1.0", - Description: "Amount of randomness injected into the response (0.0 to 1.0)", - }, - } -} - -func (c *CreateMessage) Setup(ctx core.SetupContext) error { - spec := CreateMessageSpec{} - if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { - return fmt.Errorf("failed to decode configuration: %v", err) - } - - if spec.Model == "" { - return fmt.Errorf("model is required") - } - - if spec.Prompt == "" { - return fmt.Errorf("prompt is required") - } - - return nil -} - -func (c *CreateMessage) Execute(ctx core.ExecutionContext) error { - spec := CreateMessageSpec{} - if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { - return fmt.Errorf("failed to decode configuration: %v", err) - } - - if spec.Model == "" { - return fmt.Errorf("model is required") - } - if spec.Prompt == "" { - return fmt.Errorf("prompt is required") - } - - if spec.MaxTokens == 0 { - spec.MaxTokens = 4096 - } - - if spec.MaxTokens < 1 { - return fmt.Errorf("maxTokens must be at least 1") - } - - client, err := NewClient(ctx.HTTP, ctx.Integration) - if err != nil { - return err - } - - req := CreateMessageRequest{ - Model: spec.Model, - MaxTokens: spec.MaxTokens, - Messages: []Message{ - { - Role: "user", - Content: spec.Prompt, - }, - }, - Temperature: spec.Temperature, - } - - if spec.SystemMessage != "" { - req.System = spec.SystemMessage - } - - response, err := client.CreateMessage(req) - if err != nil { - return err - } - - text := extractMessageText(response) - - payload := MessagePayload{ - ID: response.ID, - Model: response.Model, - Text: text, - Usage: &response.Usage, - StopReason: response.StopReason, - Response: response, - } - - return ctx.ExecutionState.Emit( - core.DefaultOutputChannel.Name, - MessagePayloadType, - []any{payload}, - ) -} - -func (c *CreateMessage) Cancel(ctx core.ExecutionContext) error { - return nil -} - -func (c *CreateMessage) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { - return ctx.DefaultProcessing() -} - -func (c *CreateMessage) Actions() []core.Action { - return []core.Action{} -} - -func (c *CreateMessage) HandleAction(ctx core.ActionContext) error { - return nil -} - -func (c *CreateMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { - return http.StatusOK, nil -} - -func (c *CreateMessage) Cleanup(ctx core.SetupContext) error { - return nil -} - -func extractMessageText(response *CreateMessageResponse) string { - if response == nil || len(response.Content) == 0 { - return "" - } - - var builder strings.Builder - for _, block := range response.Content { - if block.Type == "text" { - if builder.Len() > 0 { - builder.WriteString("\n") - } - builder.WriteString(block.Text) - } - } - return builder.String() -} diff --git a/pkg/integrations/claude/create_message_test.go b/pkg/integrations/claude/create_message_test.go deleted file mode 100644 index 5c78764850..0000000000 --- a/pkg/integrations/claude/create_message_test.go +++ /dev/null @@ -1,321 +0,0 @@ -package claude - -import ( - "bytes" - "encoding/json" - "io" - "net/http" - "testing" - - "github.com/superplanehq/superplane/pkg/configuration" - "github.com/superplanehq/superplane/pkg/core" -) - -// --- Mocks --- - -// mockExecutionState implements core.ExecutionStateContext -type mockExecutionState struct { - EmittedChannel string - EmittedType string - EmittedPayloads []any - Finished bool - Failed bool - FailReason, FailMsg string -} - -func (m *mockExecutionState) IsFinished() bool { return m.Finished } -func (m *mockExecutionState) SetKV(key, value string) error { return nil } - -func (m *mockExecutionState) Emit(channel, payloadType string, payloads []any) error { - m.EmittedChannel = channel - m.EmittedType = payloadType - m.EmittedPayloads = payloads - return nil -} - -func (m *mockExecutionState) Pass() error { - m.Finished = true - return nil -} - -func (m *mockExecutionState) Fail(reason, message string) error { - m.Finished = true - m.Failed = true - m.FailReason = reason - m.FailMsg = message - return nil -} - -// --- Tests --- - -func TestCreateMessage_Configuration(t *testing.T) { - c := &CreateMessage{} - config := c.Configuration() - - expectedFields := map[string]struct { - Required bool - Type string - }{ - "model": {true, string(configuration.FieldTypeIntegrationResource)}, - "prompt": {true, string(configuration.FieldTypeText)}, - "systemMessage": {false, string(configuration.FieldTypeText)}, - "maxTokens": {false, string(configuration.FieldTypeNumber)}, - "temperature": {false, string(configuration.FieldTypeNumber)}, - } - - for _, field := range config { - expected, ok := expectedFields[field.Name] - if !ok { - t.Errorf("unexpected field: %s", field.Name) - continue - } - if field.Required != expected.Required { - t.Errorf("field %s: expected required %v, got %v", field.Name, expected.Required, field.Required) - } - if string(field.Type) != expected.Type { - t.Errorf("field %s: expected type %s, got %s", field.Name, expected.Type, field.Type) - } - } -} - -func TestCreateMessage_Setup(t *testing.T) { - c := &CreateMessage{} - - tests := []struct { - name string - config map[string]interface{} - expectError bool - }{ - { - name: "Valid Config", - config: map[string]interface{}{ - "model": "claude-3-opus", - "prompt": "Hello", - }, - expectError: false, - }, - { - name: "Missing Model", - config: map[string]interface{}{ - "prompt": "Hello", - }, - expectError: true, - }, - { - name: "Missing Prompt", - config: map[string]interface{}{ - "model": "claude-3-opus", - }, - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - ctx := core.SetupContext{ - Configuration: tt.config, - } - err := c.Setup(ctx) - if tt.expectError && err == nil { - t.Error("expected error, got nil") - } - if !tt.expectError && err != nil { - t.Errorf("unexpected error: %v", err) - } - }) - } -} - -func TestCreateMessage_Execute(t *testing.T) { - c := &CreateMessage{} - - // Helper to create a valid response JSON - validResponseJSON := `{ - "id": "msg_01", - "type": "message", - "role": "assistant", - "model": "claude-3-test", - "content": [ - {"type": "text", "text": "Hello world"} - ], - "stop_reason": "end_turn", - "usage": {"input_tokens": 10, "output_tokens": 5} - }` - - tests := []struct { - name string - config map[string]interface{} - mockResponse func(*http.Request) *http.Response - expectError bool - expectEmission bool - validatePayload func(*testing.T, MessagePayload) - }{ - { - name: "Success", - config: map[string]interface{}{ - "model": "claude-3-test", - "prompt": "Say hello", - "maxTokens": 500, - "systemMessage": "You are a bot", - "temperature": 0.7, - }, - mockResponse: func(req *http.Request) *http.Response { - // Verify request body - body, _ := io.ReadAll(req.Body) - var sent CreateMessageRequest - json.Unmarshal(body, &sent) - - if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { - return &http.Response{StatusCode: 400, Body: io.NopCloser(bytes.NewBufferString("bad request body"))} - } - - return &http.Response{ - StatusCode: 200, - Body: io.NopCloser(bytes.NewBufferString(validResponseJSON)), - } - }, - expectError: false, - expectEmission: true, - validatePayload: func(t *testing.T, p MessagePayload) { - if p.Text != "Hello world" { - t.Errorf("expected text 'Hello world', got '%s'", p.Text) - } - if p.ID != "msg_01" { - t.Errorf("expected ID 'msg_01', got '%s'", p.ID) - } - if p.Usage.InputTokens != 10 { - t.Errorf("expected usage 10, got %d", p.Usage.InputTokens) - } - }, - }, - { - name: "Missing Configuration in Execute", - config: map[string]interface{}{ - "model": "", // Invalid - }, - expectError: true, - }, - { - name: "API Error", - config: map[string]interface{}{ - "model": "claude-3-test", - "prompt": "fail me", - }, - mockResponse: func(req *http.Request) *http.Response { - return &http.Response{ - StatusCode: 500, - Body: io.NopCloser(bytes.NewBufferString(`{"error": {"message": "internal error"}}`)), - } - }, - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Setup Mocks - mockState := &mockExecutionState{} - mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponse} - mockInt := &mockIntegrationContext{ - config: map[string][]byte{ - "apiKey": []byte("test-key"), - }, - } - - ctx := core.ExecutionContext{ - Configuration: tt.config, - ExecutionState: mockState, - HTTP: mockHTTP, - Integration: mockInt, - } - - err := c.Execute(ctx) - - if tt.expectError { - if err == nil { - t.Error("expected error, got nil") - } - return - } - - if err != nil { - t.Errorf("unexpected error: %v", err) - return - } - - if tt.expectEmission { - if mockState.EmittedType != MessagePayloadType { - t.Errorf("expected emitted type %s, got %s", MessagePayloadType, mockState.EmittedType) - } - if len(mockState.EmittedPayloads) != 1 { - t.Errorf("expected 1 payload, got %d", len(mockState.EmittedPayloads)) - } else if tt.validatePayload != nil { - // Convert payload back to struct for validation - // In real execution this is passed as any, here we cast it - payload, ok := mockState.EmittedPayloads[0].(MessagePayload) - if !ok { - t.Error("emitted payload is not MessagePayload") - } else { - tt.validatePayload(t, payload) - } - } - } - }) - } -} - -func TestExtractMessageText(t *testing.T) { - // This function is unexported, but accessible within the package_test if package is same - // If the test file is package claude_test, we can't access it. - // Assuming package claude per user instruction. - - tests := []struct { - name string - response *CreateMessageResponse - expected string - }{ - { - name: "Nil Response", - response: nil, - expected: "", - }, - { - name: "Single Text Block", - response: &CreateMessageResponse{ - Content: []MessageContent{ - {Type: "text", Text: "Hello"}, - }, - }, - expected: "Hello", - }, - { - name: "Multiple Text Blocks", - response: &CreateMessageResponse{ - Content: []MessageContent{ - {Type: "text", Text: "Hello"}, - {Type: "text", Text: "World"}, - }, - }, - expected: "Hello\nWorld", - }, - { - name: "Mixed Blocks (ignore non-text if any)", - response: &CreateMessageResponse{ - Content: []MessageContent{ - {Type: "image", Text: ""}, // hypothetical non-text - {Type: "text", Text: "Real Text"}, - }, - }, - expected: "Real Text", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := extractMessageText(tt.response) - if got != tt.expected { - t.Errorf("expected '%s', got '%s'", tt.expected, got) - } - }) - } -} diff --git a/pkg/integrations/claude/example_output_create_message.json b/pkg/integrations/claude/example_output_create_message.json deleted file mode 100644 index 2fd3c0e729..0000000000 --- a/pkg/integrations/claude/example_output_create_message.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "type": "claude.message", - "data": { - "id": "msg_01X9JGt5...123456", - "model": "claude-3-5-sonnet-latest", - "text": "Here is the summary of the deployment logs you requested...", - "stopReason": "end_turn", - "usage": { - "input_tokens": 45, - "output_tokens": 120 - }, - "response": { - "id": "msg_01X9JGt5...123456", - "type": "message", - "role": "assistant", - "model": "claude-3-5-sonnet-latest", - "content": [ - { - "type": "text", - "text": "Here is the summary of the deployment logs you requested..." - } - ], - "stop_reason": "end_turn", - "usage": { - "input_tokens": 45, - "output_tokens": 120 - } - } - }, - "timestamp": "2026-02-06T12:00:00Z" -} \ No newline at end of file diff --git a/pkg/integrations/claude/text_prompt_test.go b/pkg/integrations/claude/text_prompt_test.go index bfa48e5c0c..d8263b22ea 100644 --- a/pkg/integrations/claude/text_prompt_test.go +++ b/pkg/integrations/claude/text_prompt_test.go @@ -9,9 +9,45 @@ import ( "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" - "github.com/superplanehq/superplane/test/support/contexts" ) +// --- Mocks --- + +// mockExecutionState implements core.ExecutionStateContext +type mockExecutionState struct { + EmittedChannel string + EmittedType string + EmittedPayloads []any + Finished bool + Failed bool + FailReason, FailMsg string +} + +func (m *mockExecutionState) IsFinished() bool { return m.Finished } +func (m *mockExecutionState) SetKV(key, value string) error { return nil } + +func (m *mockExecutionState) Emit(channel, payloadType string, payloads []any) error { + m.EmittedChannel = channel + m.EmittedType = payloadType + m.EmittedPayloads = payloads + return nil +} + +func (m *mockExecutionState) Pass() error { + m.Finished = true + return nil +} + +func (m *mockExecutionState) Fail(reason, message string) error { + m.Finished = true + m.Failed = true + m.FailReason = reason + m.FailMsg = message + return nil +} + +// --- Tests --- + func TestTextPrompt_Configuration(t *testing.T) { c := &TextPrompt{} config := c.Configuration() @@ -109,8 +145,7 @@ func TestTextPrompt_Execute(t *testing.T) { tests := []struct { name string config map[string]interface{} - responseStatus int - responseBody string + mockResponse func(*http.Request) *http.Response expectError bool expectEmission bool validatePayload func(*testing.T, MessagePayload) @@ -124,8 +159,21 @@ func TestTextPrompt_Execute(t *testing.T) { "systemMessage": "You are a bot", "temperature": 0.7, }, - responseStatus: 200, - responseBody: validResponseJSON, + mockResponse: func(req *http.Request) *http.Response { + // Verify request body + body, _ := io.ReadAll(req.Body) + var sent CreateMessageRequest + json.Unmarshal(body, &sent) + + if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { + return &http.Response{StatusCode: 400, Body: io.NopCloser(bytes.NewBufferString("bad request body"))} + } + + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(validResponseJSON)), + } + }, expectError: false, expectEmission: true, validatePayload: func(t *testing.T, p MessagePayload) { @@ -153,35 +201,32 @@ func TestTextPrompt_Execute(t *testing.T) { "model": "claude-3-test", "prompt": "fail me", }, - responseStatus: 500, - responseBody: `{"error": {"message": "internal error"}}`, - expectError: true, + mockResponse: func(req *http.Request) *http.Response { + return &http.Response{ + StatusCode: 500, + Body: io.NopCloser(bytes.NewBufferString(`{"error": {"message": "internal error"}}`)), + } + }, + expectError: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - execState := &contexts.ExecutionStateContext{KVs: map[string]string{}} - integrationCtx := &contexts.IntegrationContext{ - Configuration: map[string]any{"apiKey": "test-key"}, - } - - var responses []*http.Response - if tt.responseStatus != 0 { - responses = []*http.Response{ - { - StatusCode: tt.responseStatus, - Body: io.NopCloser(bytes.NewBufferString(tt.responseBody)), - }, - } + // Setup Mocks + mockState := &mockExecutionState{} + mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponse} + mockInt := &mockIntegrationContext{ + config: map[string][]byte{ + "apiKey": []byte("test-key"), + }, } - httpCtx := &contexts.HTTPContext{Responses: responses} ctx := core.ExecutionContext{ Configuration: tt.config, - ExecutionState: execState, - HTTP: httpCtx, - Integration: integrationCtx, + ExecutionState: mockState, + HTTP: mockHTTP, + Integration: mockInt, } err := c.Execute(ctx) @@ -199,32 +244,19 @@ func TestTextPrompt_Execute(t *testing.T) { } if tt.expectEmission { - if execState.Type != MessagePayloadType { - t.Errorf("expected emitted type %s, got %s", MessagePayloadType, execState.Type) + if mockState.EmittedType != MessagePayloadType { + t.Errorf("expected emitted type %s, got %s", MessagePayloadType, mockState.EmittedType) } - if len(execState.Payloads) != 1 { - t.Errorf("expected 1 payload, got %d", len(execState.Payloads)) + if len(mockState.EmittedPayloads) != 1 { + t.Errorf("expected 1 payload, got %d", len(mockState.EmittedPayloads)) } else if tt.validatePayload != nil { - wrapped, ok := execState.Payloads[0].(map[string]any) - if !ok { - t.Error("emitted payload wrapper is not map[string]any") - return - } - data, ok := wrapped["data"].(MessagePayload) + // Convert payload back to struct for validation + // In real execution this is passed as any, here we cast it + payload, ok := mockState.EmittedPayloads[0].(MessagePayload) if !ok { - t.Error("emitted payload data is not MessagePayload") - return - } - tt.validatePayload(t, data) - } - // Verify request body was sent correctly (e.g. Success case) - if len(httpCtx.Requests) == 1 && tt.validatePayload != nil { - bodyBytes, _ := io.ReadAll(httpCtx.Requests[0].Body) - var sent CreateMessageRequest - if err := json.Unmarshal(bodyBytes, &sent); err != nil { - t.Errorf("failed to unmarshal sent body: %v", err) - } else if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { - t.Errorf("request body mismatch: model=%s max_tokens=%d system=%s", sent.Model, sent.MaxTokens, sent.System) + t.Error("emitted payload is not MessagePayload") + } else { + tt.validatePayload(t, payload) } } } From 4a0df12a3f11e078cc45b5ea5c5a6d9f8243e388 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Sun, 8 Feb 2026 13:38:08 -0300 Subject: [PATCH 080/160] chore: add WebhookHandler interface (#2937) The `Integration` interface was starting to get full of webhook-related methods, but not all integrations make use of those. In fact, from our current integrations, 11/16 do not use it, so more than half do not use it. Due to that, we move that logic into a more specialized `WebhookHandler` interface. That keeps the Integration interface clean, and makes it a lot more explicit when an integration makes use of it. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/component-review.rules.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.cursor/commands/component-review.rules.md b/.cursor/commands/component-review.rules.md index 233993223b..5e66cc0965 100644 --- a/.cursor/commands/component-review.rules.md +++ b/.cursor/commands/component-review.rules.md @@ -72,8 +72,8 @@ The output channels shown in the UI include at least one channel (or rely on def ### Webhooks -- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, `ctx.Integration.RequestWebhook()` and implement the integration's `SetupWebhook`, `CleanupWebhook` -- We should always aim to share webhooks between components, if they use the same underlying event configuration. Use `CompareWebhookConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. +- If the webhook is not configured through the integration, use `ctx.Webhook.Setup()`. If the webhook is configured through the integration, use `ctx.Integration.RequestWebhook()` and implement a `core.WebhookHandler` with `Setup`, `Cleanup`, and `CompareConfig`, registered via `registry.RegisterIntegrationWithWebhookHandler`. +- We should always aim to share webhooks between components if they use the same underlying event configuration. Use `CompareConfig` for that. For example, if we have two `github.onPush` triggers, one for main branch, and one for release branches, both of those triggers use the same webhook in GitHub. ### Triggers From 0af36775fbfaccab0250d30490e1c4c3f4ef9580 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sun, 8 Feb 2026 18:44:24 +0100 Subject: [PATCH 081/160] chore: Use standard unit tests without explicit mocks (#2968) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/claude/text_prompt_test.go | 128 ++++++++------------ 1 file changed, 48 insertions(+), 80 deletions(-) diff --git a/pkg/integrations/claude/text_prompt_test.go b/pkg/integrations/claude/text_prompt_test.go index d8263b22ea..bfa48e5c0c 100644 --- a/pkg/integrations/claude/text_prompt_test.go +++ b/pkg/integrations/claude/text_prompt_test.go @@ -9,45 +9,9 @@ import ( "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" ) -// --- Mocks --- - -// mockExecutionState implements core.ExecutionStateContext -type mockExecutionState struct { - EmittedChannel string - EmittedType string - EmittedPayloads []any - Finished bool - Failed bool - FailReason, FailMsg string -} - -func (m *mockExecutionState) IsFinished() bool { return m.Finished } -func (m *mockExecutionState) SetKV(key, value string) error { return nil } - -func (m *mockExecutionState) Emit(channel, payloadType string, payloads []any) error { - m.EmittedChannel = channel - m.EmittedType = payloadType - m.EmittedPayloads = payloads - return nil -} - -func (m *mockExecutionState) Pass() error { - m.Finished = true - return nil -} - -func (m *mockExecutionState) Fail(reason, message string) error { - m.Finished = true - m.Failed = true - m.FailReason = reason - m.FailMsg = message - return nil -} - -// --- Tests --- - func TestTextPrompt_Configuration(t *testing.T) { c := &TextPrompt{} config := c.Configuration() @@ -145,7 +109,8 @@ func TestTextPrompt_Execute(t *testing.T) { tests := []struct { name string config map[string]interface{} - mockResponse func(*http.Request) *http.Response + responseStatus int + responseBody string expectError bool expectEmission bool validatePayload func(*testing.T, MessagePayload) @@ -159,21 +124,8 @@ func TestTextPrompt_Execute(t *testing.T) { "systemMessage": "You are a bot", "temperature": 0.7, }, - mockResponse: func(req *http.Request) *http.Response { - // Verify request body - body, _ := io.ReadAll(req.Body) - var sent CreateMessageRequest - json.Unmarshal(body, &sent) - - if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { - return &http.Response{StatusCode: 400, Body: io.NopCloser(bytes.NewBufferString("bad request body"))} - } - - return &http.Response{ - StatusCode: 200, - Body: io.NopCloser(bytes.NewBufferString(validResponseJSON)), - } - }, + responseStatus: 200, + responseBody: validResponseJSON, expectError: false, expectEmission: true, validatePayload: func(t *testing.T, p MessagePayload) { @@ -201,32 +153,35 @@ func TestTextPrompt_Execute(t *testing.T) { "model": "claude-3-test", "prompt": "fail me", }, - mockResponse: func(req *http.Request) *http.Response { - return &http.Response{ - StatusCode: 500, - Body: io.NopCloser(bytes.NewBufferString(`{"error": {"message": "internal error"}}`)), - } - }, - expectError: true, + responseStatus: 500, + responseBody: `{"error": {"message": "internal error"}}`, + expectError: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - // Setup Mocks - mockState := &mockExecutionState{} - mockHTTP := &mockHTTPContext{RoundTripFunc: tt.mockResponse} - mockInt := &mockIntegrationContext{ - config: map[string][]byte{ - "apiKey": []byte("test-key"), - }, + execState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + integrationCtx := &contexts.IntegrationContext{ + Configuration: map[string]any{"apiKey": "test-key"}, } + var responses []*http.Response + if tt.responseStatus != 0 { + responses = []*http.Response{ + { + StatusCode: tt.responseStatus, + Body: io.NopCloser(bytes.NewBufferString(tt.responseBody)), + }, + } + } + httpCtx := &contexts.HTTPContext{Responses: responses} + ctx := core.ExecutionContext{ Configuration: tt.config, - ExecutionState: mockState, - HTTP: mockHTTP, - Integration: mockInt, + ExecutionState: execState, + HTTP: httpCtx, + Integration: integrationCtx, } err := c.Execute(ctx) @@ -244,19 +199,32 @@ func TestTextPrompt_Execute(t *testing.T) { } if tt.expectEmission { - if mockState.EmittedType != MessagePayloadType { - t.Errorf("expected emitted type %s, got %s", MessagePayloadType, mockState.EmittedType) + if execState.Type != MessagePayloadType { + t.Errorf("expected emitted type %s, got %s", MessagePayloadType, execState.Type) } - if len(mockState.EmittedPayloads) != 1 { - t.Errorf("expected 1 payload, got %d", len(mockState.EmittedPayloads)) + if len(execState.Payloads) != 1 { + t.Errorf("expected 1 payload, got %d", len(execState.Payloads)) } else if tt.validatePayload != nil { - // Convert payload back to struct for validation - // In real execution this is passed as any, here we cast it - payload, ok := mockState.EmittedPayloads[0].(MessagePayload) + wrapped, ok := execState.Payloads[0].(map[string]any) + if !ok { + t.Error("emitted payload wrapper is not map[string]any") + return + } + data, ok := wrapped["data"].(MessagePayload) if !ok { - t.Error("emitted payload is not MessagePayload") - } else { - tt.validatePayload(t, payload) + t.Error("emitted payload data is not MessagePayload") + return + } + tt.validatePayload(t, data) + } + // Verify request body was sent correctly (e.g. Success case) + if len(httpCtx.Requests) == 1 && tt.validatePayload != nil { + bodyBytes, _ := io.ReadAll(httpCtx.Requests[0].Body) + var sent CreateMessageRequest + if err := json.Unmarshal(bodyBytes, &sent); err != nil { + t.Errorf("failed to unmarshal sent body: %v", err) + } else if sent.Model != "claude-3-test" || sent.MaxTokens != 500 || sent.System != "You are a bot" { + t.Errorf("request body mismatch: model=%s max_tokens=%d system=%s", sent.Model, sent.MaxTokens, sent.System) } } } From 4bce08c035a2382a07705f4404201aa34ace12a9 Mon Sep 17 00:00:00 2001 From: dimbaja Date: Mon, 9 Feb 2026 22:18:45 +0100 Subject: [PATCH 082/160] feat: Implement initial GitLab integration (OAuth + PAT) (#2696) The GitLab integration is implemented using OAuth and Personal Access Tokens (PATs). The client credentials grant is not supported by GitLab, and Service Accounts and Group Access Tokens are Premium-tier features both on GitLab.com and self-hosted GitLab instances. OAuth and PATs were chosen as the initial supported authentication mechanisms. Closes #1912 #1914 #1936 registering-integration-1 registering-integration-2 registering-integration-3 oauth-setup-1 oauth-setup-2 create-issue-component Create-issue-config on-issue-component on-issue-config --------- Signed-off-by: dimbaja Signed-off-by: Lucas Pinheiro Co-authored-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitLab.mdx | 391 ------------------ pkg/integrations/gitlab/example.go | 50 --- pkg/integrations/gitlab/gitlab.go | 5 - pkg/integrations/gitlab/gitlab_test.go | 13 - pkg/integrations/gitlab/hooks.go | 6 - pkg/integrations/gitlab/on_issue.go | 31 +- pkg/integrations/gitlab/on_issue_test.go | 14 +- pkg/integrations/gitlab/webhook_handler.go | 4 - .../pages/workflowv2/mappers/gitlab/index.ts | 10 - web_src/src/pages/workflowv2/mappers/index.ts | 4 - .../src/ui/BuildingBlocksSidebar/index.tsx | 2 - web_src/src/ui/IntegrationInstructions.tsx | 2 +- .../ui/componentSidebar/integrationIcons.tsx | 3 - 13 files changed, 11 insertions(+), 524 deletions(-) diff --git a/docs/components/GitLab.mdx b/docs/components/GitLab.mdx index 4264d7f665..3e56b7ae24 100644 --- a/docs/components/GitLab.mdx +++ b/docs/components/GitLab.mdx @@ -8,11 +8,6 @@ Manage and react to changes in your GitLab repositories - - - - - import { CardGrid, LinkCard } from "@astrojs/starlight/components"; @@ -131,392 +126,6 @@ This trigger automatically sets up a GitLab webhook when configured. The webhook } ``` - - -## On Merge Request - -The On Merge Request trigger starts a workflow execution when merge request events occur in a GitLab project. - -### Configuration - -- **Project** (required): GitLab project to monitor -- **Actions** (required): Select which merge request actions to listen for (open, close, merge, etc.). Default: open. - -### Outputs - -- **Default channel**: Emits merge request payload data with action, project, and object attributes - -### Example Data - -```json -{ - "data": { - "assignees": [ - { - "avatar_url": "https://www.gravatar.com/avatar/ab12cd34?s=80\u0026d=identicon", - "email": "jrivera@example.com", - "id": 4, - "name": "Jamie Rivera", - "username": "jrivera" - } - ], - "changes": { - "title": { - "current": "Add merge request trigger", - "previous": "Add trigger" - } - }, - "event_type": "merge_request", - "labels": [ - { - "id": 101, - "title": "backend" - } - ], - "object_attributes": { - "action": "open", - "description": "Adds support for additional GitLab webhook trigger types.", - "id": 93, - "iid": 12, - "state": "opened", - "title": "Add merge request trigger" - }, - "object_kind": "merge_request", - "project": { - "avatar_url": null, - "ci_config_path": null, - "default_branch": "main", - "description": "Project used to demonstrate merge request webhook payloads.", - "git_http_url": "https://gitlab.example.com/group/example.git", - "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", - "id": 1, - "name": "Example Project", - "namespace": "group", - "path_with_namespace": "group/example", - "visibility_level": 20, - "web_url": "https://gitlab.example.com/group/example" - }, - "repository": { - "description": "Project used to demonstrate merge request webhook payloads.", - "git_http_url": "https://gitlab.example.com/group/example.git", - "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", - "homepage": "https://gitlab.example.com/group/example", - "name": "Example Project", - "url": "ssh://git@gitlab.example.com/group/example.git", - "visibility_level": 20 - }, - "reviewers": [ - { - "avatar_url": "https://www.gravatar.com/avatar/ef56gh78?s=80\u0026d=identicon", - "email": "mlee@example.com", - "id": 6, - "name": "Morgan Lee", - "state": "unreviewed", - "username": "mlee" - } - ], - "user": { - "avatar_url": "https://www.gravatar.com/avatar/1a29da0ccd099482194440fac762f5ccb4ec53227761d1859979367644a889a5?s=80\u0026d=identicon", - "email": "agarcia@example.com", - "id": 1, - "name": "Alex Garcia", - "username": "agarcia" - } - }, - "timestamp": "2026-02-12T20:40:00.000000000Z", - "type": "gitlab.mergeRequest" -} -``` - - - -## On Milestone - -The On Milestone trigger starts a workflow execution when milestone events occur in a GitLab project. - -### Configuration - -- **Project** (required): GitLab project to monitor -- **Actions** (required): Select which milestone actions to listen for. Default: create. - -### Outputs - -- **Default channel**: Emits milestone payload data with action, project, and object attributes - -### Example Data - -```json -{ - "data": { - "action": "create", - "event_type": "milestone", - "object_attributes": { - "created_at": "2025-06-16 14:10:57 UTC", - "description": "First stable release", - "due_date": "2025-06-30", - "group_id": null, - "id": 61, - "iid": 10, - "project_id": 1, - "start_date": "2025-06-16", - "state": "active", - "title": "v1.0", - "updated_at": "2025-06-16 14:10:57 UTC" - }, - "object_kind": "milestone", - "project": { - "avatar_url": null, - "ci_config_path": null, - "default_branch": "master", - "description": "Aut reprehenderit ut est.", - "git_http_url": "http://example.com/gitlabhq/gitlab-test.git", - "git_ssh_url": "git@example.com:gitlabhq/gitlab-test.git", - "homepage": "http://example.com/gitlabhq/gitlab-test", - "http_url": "http://example.com/gitlabhq/gitlab-test.git", - "id": 1, - "name": "Gitlab Test", - "namespace": "GitlabHQ", - "path_with_namespace": "gitlabhq/gitlab-test", - "ssh_url": "git@example.com:gitlabhq/gitlab-test.git", - "url": "http://example.com/gitlabhq/gitlab-test.git", - "visibility_level": 20, - "web_url": "http://example.com/gitlabhq/gitlab-test" - } - }, - "timestamp": "2026-02-12T20:40:00.000000000Z", - "type": "gitlab.milestone" -} -``` - - - -## On Release - -The On Release trigger starts a workflow execution when release events occur in a GitLab project. - -### Configuration - -- **Project** (required): GitLab project to monitor -- **Actions** (required): Select which release actions to listen for. Default: create. - -### Outputs - -- **Default channel**: Emits release payload data with action and release metadata - -### Example Data - -```json -{ - "data": { - "action": "create", - "assets": { - "count": 2, - "links": [ - { - "id": 1, - "link_type": "other", - "name": "Changelog", - "url": "https://example.net/changelog" - } - ], - "sources": [ - { - "format": "zip", - "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.zip" - }, - { - "format": "tar.gz", - "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.tar.gz" - } - ] - }, - "commit": { - "author": { - "email": "user@example.com", - "name": "Example User" - }, - "id": "ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8", - "message": "Release v1.1", - "timestamp": "2020-10-31T14:58:32+11:00", - "title": "Release v1.1", - "url": "https://example.com/gitlab-org/release-webhook-example/-/commit/ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8" - }, - "created_at": "2020-11-02 12:55:12 UTC", - "description": "v1.1 has been released", - "id": 1, - "name": "v1.1", - "object_kind": "release", - "project": { - "avatar_url": null, - "ci_config_path": null, - "default_branch": "master", - "description": "", - "git_http_url": "https://example.com/gitlab-org/release-webhook-example.git", - "git_ssh_url": "ssh://git@example.com/gitlab-org/release-webhook-example.git", - "id": 1, - "name": "release-webhook-example", - "namespace": "Gitlab", - "path_with_namespace": "gitlab-org/release-webhook-example", - "visibility_level": 0, - "web_url": "https://example.com/gitlab-org/release-webhook-example" - }, - "released_at": "2020-11-02 12:55:12 UTC", - "tag": "v1.1", - "url": "https://example.com/gitlab-org/release-webhook-example/-/releases/v1.1" - }, - "timestamp": "2026-02-12T20:40:00.000000000Z", - "type": "gitlab.release" -} -``` - - - -## On Tag - -The On Tag trigger starts a workflow execution when tag push events occur in a GitLab project. - -### Configuration - -- **Project** (required): GitLab project to monitor -- **Tags** (required): Configure tag filters using predicates. You can match full refs (refs/tags/v1.0.0) or tag names (v1.0.0). - -### Outputs - -- **Default channel**: Emits tag push payload data including ref, before/after SHA, and project information - -### Example Data - -```json -{ - "data": { - "after": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", - "before": "0000000000000000000000000000000000000000", - "checkout_sha": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", - "commits": [], - "event_name": "tag_push", - "message": "Tag message", - "object_kind": "tag_push", - "project": { - "avatar_url": null, - "ci_config_path": null, - "default_branch": "master", - "description": "", - "git_http_url": "http://example.com/jsmith/example.git", - "git_ssh_url": "git@example.com:jsmith/example.git", - "id": 1, - "name": "Example", - "namespace": "Jsmith", - "path_with_namespace": "jsmith/example", - "visibility_level": 0, - "web_url": "http://example.com/jsmith/example" - }, - "push_options": {}, - "ref": "refs/tags/v1.0.0", - "ref_protected": true, - "repository": { - "description": "", - "git_http_url": "http://example.com/jsmith/example.git", - "git_ssh_url": "git@example.com:jsmith/example.git", - "homepage": "http://example.com/jsmith/example", - "name": "Example", - "url": "ssh://git@example.com/jsmith/example.git", - "visibility_level": 0 - }, - "total_commits_count": 0, - "user_email": "john@example.com", - "user_id": 1, - "user_name": "John Smith", - "user_username": "jsmith" - }, - "timestamp": "2026-02-12T20:40:00.000000000Z", - "type": "gitlab.tag" -} -``` - - - -## On Vulnerability - -The On Vulnerability trigger starts a workflow execution when vulnerability events occur in a GitLab project. - -### Configuration - -- **Project** (required): GitLab project to monitor - -### Outputs - -- **Default channel**: Emits vulnerability payload data including severity, state, location, and linked issues - -### Example Data - -```json -{ - "data": { - "object_attributes": { - "auto_resolved": false, - "confidence": "unknown", - "confidence_overridden": false, - "confirmed_at": "2025-01-08T00:46:14.413Z", - "confirmed_by_id": 1, - "created_at": "2025-01-08T00:46:14.413Z", - "cvss": [ - { - "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", - "vendor": "NVD" - } - ], - "dismissed_at": null, - "dismissed_by_id": null, - "identifiers": [ - { - "external_id": "29dce398-220a-4315-8c84-16cd8b6d9b05", - "external_type": "gemnasium", - "name": "Gemnasium-29dce398-220a-4315-8c84-16cd8b6d9b05", - "url": "https://gitlab.com/gitlab-org/security-products/gemnasium-db/-/blob/master/gem/rexml/CVE-2024-41123.yml" - }, - { - "external_id": "CVE-2024-41123", - "external_type": "cve", - "name": "CVE-2024-41123", - "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-41123" - } - ], - "issues": [ - { - "created_at": "2025-01-08T00:46:14.429Z", - "title": "REXML ReDoS vulnerability", - "updated_at": "2025-01-08T00:46:14.429Z", - "url": "https://example.com/flightjs/Flight/-/issues/1" - } - ], - "location": { - "dependency": { - "package": { - "name": "rexml" - }, - "version": "3.3.1" - }, - "file": "Gemfile.lock" - }, - "project_id": 1, - "report_type": "dependency_scanning", - "resolved_at": null, - "resolved_by_id": null, - "resolved_on_default_branch": false, - "severity": "high", - "severity_overridden": false, - "state": "confirmed", - "title": "REXML DoS vulnerability", - "updated_at": "2025-01-08T00:46:14.413Z", - "url": "https://example.com/flightjs/Flight/-/security/vulnerabilities/1" - }, - "object_kind": "vulnerability" - }, - "timestamp": "2026-02-12T20:40:00.000000000Z", - "type": "gitlab.vulnerability" -} -``` - ## Create Issue diff --git a/pkg/integrations/gitlab/example.go b/pkg/integrations/gitlab/example.go index 397360118d..f328477b63 100644 --- a/pkg/integrations/gitlab/example.go +++ b/pkg/integrations/gitlab/example.go @@ -10,59 +10,9 @@ import ( //go:embed example_data_on_issue.json var exampleDataOnIssueBytes []byte -//go:embed example_data_on_merge_request.json -var exampleDataOnMergeRequestBytes []byte - -//go:embed example_data_on_milestone.json -var exampleDataOnMilestoneBytes []byte - -//go:embed example_data_on_release.json -var exampleDataOnReleaseBytes []byte - -//go:embed example_data_on_tag.json -var exampleDataOnTagBytes []byte - -//go:embed example_data_on_vulnerability.json -var exampleDataOnVulnerabilityBytes []byte - var exampleDataOnIssueOnce sync.Once var exampleDataOnIssue map[string]any -var exampleDataOnMergeRequestOnce sync.Once -var exampleDataOnMergeRequest map[string]any - -var exampleDataOnMilestoneOnce sync.Once -var exampleDataOnMilestone map[string]any - -var exampleDataOnReleaseOnce sync.Once -var exampleDataOnRelease map[string]any - -var exampleDataOnTagOnce sync.Once -var exampleDataOnTag map[string]any - -var exampleDataOnVulnerabilityOnce sync.Once -var exampleDataOnVulnerability map[string]any - func (i *OnIssue) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIssueOnce, exampleDataOnIssueBytes, &exampleDataOnIssue) } - -func (m *OnMergeRequest) ExampleData() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleDataOnMergeRequestOnce, exampleDataOnMergeRequestBytes, &exampleDataOnMergeRequest) -} - -func (m *OnMilestone) ExampleData() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleDataOnMilestoneOnce, exampleDataOnMilestoneBytes, &exampleDataOnMilestone) -} - -func (r *OnRelease) ExampleData() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleDataOnReleaseOnce, exampleDataOnReleaseBytes, &exampleDataOnRelease) -} - -func (t *OnTag) ExampleData() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleDataOnTagOnce, exampleDataOnTagBytes, &exampleDataOnTag) -} - -func (v *OnVulnerability) ExampleData() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleDataOnVulnerabilityOnce, exampleDataOnVulnerabilityBytes, &exampleDataOnVulnerability) -} diff --git a/pkg/integrations/gitlab/gitlab.go b/pkg/integrations/gitlab/gitlab.go index f54d2e94c1..6b7f64d989 100644 --- a/pkg/integrations/gitlab/gitlab.go +++ b/pkg/integrations/gitlab/gitlab.go @@ -175,11 +175,6 @@ func (g *GitLab) Components() []core.Component { func (g *GitLab) Triggers() []core.Trigger { return []core.Trigger{ &OnIssue{}, - &OnMergeRequest{}, - &OnMilestone{}, - &OnRelease{}, - &OnTag{}, - &OnVulnerability{}, } } diff --git a/pkg/integrations/gitlab/gitlab_test.go b/pkg/integrations/gitlab/gitlab_test.go index 9fc96fe415..6a5cc6e36a 100644 --- a/pkg/integrations/gitlab/gitlab_test.go +++ b/pkg/integrations/gitlab/gitlab_test.go @@ -387,16 +387,3 @@ func Test__GitLab__BaseURLNormalization(t *testing.T) { }) } } - -func gitlabHeaders(event, token string) http.Header { - headers := http.Header{} - if event != "" { - headers.Set("X-Gitlab-Event", event) - } - - if token != "" { - headers.Set("X-Gitlab-Token", token) - } - - return headers -} diff --git a/pkg/integrations/gitlab/hooks.go b/pkg/integrations/gitlab/hooks.go index 887681221a..e060f15131 100644 --- a/pkg/integrations/gitlab/hooks.go +++ b/pkg/integrations/gitlab/hooks.go @@ -28,8 +28,6 @@ type Hook struct { WikiPageEvents bool `json:"wiki_page_events"` DeploymentEvents bool `json:"deployment_events"` ReleasesEvents bool `json:"releases_events"` - MilestoneEvents bool `json:"milestone_events"` - VulnerabilityEvents bool `json:"vulnerability_events"` } type HookEvents struct { @@ -43,8 +41,6 @@ type HookEvents struct { WikiPageEvents bool DeploymentEvents bool ReleasesEvents bool - MilestoneEvents bool - VulnerabilityEvents bool } func NewHooksClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*HooksClient, error) { @@ -88,8 +84,6 @@ func (c *HooksClient) CreateHook(projectID string, webhookURL string, secret str "wiki_page_events": events.WikiPageEvents, "deployment_events": events.DeploymentEvents, "releases_events": events.ReleasesEvents, - "milestone_events": events.MilestoneEvents, - "vulnerability_events": events.VulnerabilityEvents, } body, err := json.Marshal(payload) diff --git a/pkg/integrations/gitlab/on_issue.go b/pkg/integrations/gitlab/on_issue.go index d76f646904..fe92ca62a3 100644 --- a/pkg/integrations/gitlab/on_issue.go +++ b/pkg/integrations/gitlab/on_issue.go @@ -7,7 +7,6 @@ import ( "slices" "github.com/mitchellh/mapstructure" - log "github.com/sirupsen/logrus" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" ) @@ -160,14 +159,14 @@ func (i *OnIssue) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { // // Verify that the action is in the allowed list // - if len(config.Actions) > 0 && !i.whitelistedAction(ctx.Logger, data, config.Actions) { + if len(config.Actions) > 0 && !i.whitelistedAction(data, config.Actions) { return http.StatusOK, nil } // // Verify that the labels are in the allowed list // - if len(config.Labels) > 0 && !i.hasWhitelistedLabel(ctx.Logger, data, config.Labels) { + if len(config.Labels) > 0 && !i.hasWhitelistedLabel(data, config.Labels) { return http.StatusOK, nil } @@ -182,7 +181,7 @@ func (i *OnIssue) Cleanup(ctx core.TriggerContext) error { return nil } -func (i *OnIssue) whitelistedAction(logger *log.Entry, data map[string]any, allowedActions []string) bool { +func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string) bool { attrs, ok := data["object_attributes"].(map[string]any) if !ok { return false @@ -194,7 +193,6 @@ func (i *OnIssue) whitelistedAction(logger *log.Entry, data map[string]any, allo } if !slices.Contains(allowedActions, action) { - logger.Infof("Action %s is not in the allowed list: %v", action, allowedActions) return false } @@ -215,41 +213,26 @@ func (i *OnIssue) whitelistedAction(logger *log.Entry, data map[string]any, allo return false } - if state != "opened" { - logger.Infof("Received update for issue in non-opened state: %s - ignoring", state) - return false - } - - return true + return state == "opened" } -func (i *OnIssue) hasWhitelistedLabel(logger *log.Entry, data map[string]any, allowedLabels []configuration.Predicate) bool { +func (i *OnIssue) hasWhitelistedLabel(data map[string]any, allowedLabels []configuration.Predicate) bool { labels, ok := data["labels"].([]any) if !ok { return false } - labelNames := []string{} for _, label := range labels { labelMap, ok := label.(map[string]any) if !ok { continue } - title, ok := labelMap["title"].(string) - if !ok { - continue - } - - labelNames = append(labelNames, title) - } - - for _, labelName := range labelNames { - if configuration.MatchesAnyPredicate(allowedLabels, labelName) { + title, _ := labelMap["title"].(string) + if configuration.MatchesAnyPredicate(allowedLabels, title) { return true } } - logger.Infof("Labels do not match the allowed list: Received: %v, Allowed: %v", labelNames, allowedLabels) return false } diff --git a/pkg/integrations/gitlab/on_issue_test.go b/pkg/integrations/gitlab/on_issue_test.go index b65b5f8aac..3a9ad0efe4 100644 --- a/pkg/integrations/gitlab/on_issue_test.go +++ b/pkg/integrations/gitlab/on_issue_test.go @@ -5,7 +5,6 @@ import ( "net/http" "testing" - log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" @@ -39,7 +38,6 @@ func Test__OnIssue__HandleWebhook__WrongEventType(t *testing.T) { Body: []byte(`{}`), Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Events: eventsCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -62,7 +60,6 @@ func Test__OnIssue__HandleWebhook__InvalidToken(t *testing.T) { Body: []byte(`{}`), Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Webhook: webhookCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -95,7 +92,6 @@ func Test__OnIssue__HandleWebhook__StateNotOpened(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"close"}}, Webhook: webhookCtx, Events: eventsCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -131,7 +127,6 @@ func Test__OnIssue__HandleWebhook__Success(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Webhook: webhookCtx, Events: eventsCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -173,7 +168,6 @@ func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, Webhook: webhookCtx, Events: eventsCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -200,7 +194,6 @@ func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, Webhook: webhookCtx, Events: eventsCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -221,7 +214,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { "action": "open", }, } - result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) + result := trigger.whitelistedAction(data, []string{"open", "close"}) assert.True(t, result) }) @@ -232,7 +225,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { }, } - result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) + result := trigger.whitelistedAction(data, []string{"open", "close"}) assert.False(t, result) }) @@ -241,7 +234,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { "object_attributes": map[string]any{}, } - result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) + result := trigger.whitelistedAction(data, []string{"open", "close"}) assert.False(t, result) }) @@ -271,7 +264,6 @@ func Test__OnIssue__HandleWebhook__UpdateOnClosed(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"update"}}, Webhook: webhookCtx, Events: eventsCtx, - Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) diff --git a/pkg/integrations/gitlab/webhook_handler.go b/pkg/integrations/gitlab/webhook_handler.go index 65c140aa29..a5055f2ec1 100644 --- a/pkg/integrations/gitlab/webhook_handler.go +++ b/pkg/integrations/gitlab/webhook_handler.go @@ -68,10 +68,6 @@ func (h *GitLabWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error events.PipelineEvents = true case "releases": events.ReleasesEvents = true - case "milestone": - events.MilestoneEvents = true - case "vulnerability": - events.VulnerabilityEvents = true } hook, err := hooksClient.CreateHook(config.ProjectID, ctx.Webhook.GetURL(), string(secret), events) diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts index 59c3c9c17d..6d78e8e9a4 100644 --- a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts +++ b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts @@ -2,11 +2,6 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { buildActionStateRegistry } from "../utils"; import { createIssueMapper } from "./create_issue"; import { onIssueTriggerRenderer } from "./on_issue"; -import { onMergeRequestTriggerRenderer } from "./on_merge_request"; -import { onMilestoneTriggerRenderer } from "./on_milestone"; -import { onReleaseTriggerRenderer } from "./on_release"; -import { onTagTriggerRenderer } from "./on_tag"; -import { onVulnerabilityTriggerRenderer } from "./on_vulnerability"; export const eventStateRegistry: Record = { createIssue: buildActionStateRegistry("created"), @@ -18,9 +13,4 @@ export const componentMappers: Record = { export const triggerRenderers: Record = { onIssue: onIssueTriggerRenderer, - onMergeRequest: onMergeRequestTriggerRenderer, - onMilestone: onMilestoneTriggerRenderer, - onRelease: onReleaseTriggerRenderer, - onTag: onTagTriggerRenderer, - onVulnerability: onVulnerabilityTriggerRenderer, }; diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 00a6d89d2c..a80e55fc11 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -227,11 +227,7 @@ const appEventStateRegistries: Record circleci: circleCIEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, - prometheus: prometheusEventStateRegistry, - cursor: cursorEventStateRegistry, gitlab: gitlabEventStateRegistry, - dockerhub: dockerhubEventStateRegistry, - grafana: grafanaEventStateRegistry, }; const componentAdditionalDataBuilders: Record = { diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index a8a7c2bc6a..36499a1ac1 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -406,7 +406,6 @@ function CategorySection({ discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, - grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -485,7 +484,6 @@ function CategorySection({ discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, - grafana: grafanaIcon, openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, diff --git a/web_src/src/ui/IntegrationInstructions.tsx b/web_src/src/ui/IntegrationInstructions.tsx index 0654f44c2a..8ece26cf1c 100644 --- a/web_src/src/ui/IntegrationInstructions.tsx +++ b/web_src/src/ui/IntegrationInstructions.tsx @@ -39,7 +39,7 @@ export function IntegrationInstructions({ description, onContinue, className = " ol: ({ children }) =>
      {children}
    , li: ({ children }) =>
  • {children}
  • , a: ({ children, href }) => ( - + {children} ), diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index b1e8df8445..fbfa0360e4 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -11,7 +11,6 @@ import daytonaIcon from "@/assets/icons/integrations/daytona.svg"; import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; -import grafanaIcon from "@/assets/icons/integrations/grafana.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; @@ -37,7 +36,6 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, - grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -64,7 +62,6 @@ export const APP_LOGO_MAP: Record> = { discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, - grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, From 6ead750aebbdf6121c5f5937967731e6ecc912bf Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Mon, 9 Feb 2026 19:08:32 -0300 Subject: [PATCH 083/160] chore: add Logger to WebhookRequestContext (#2988) This makes it possible for triggers to log more contextual messages when they have filters configured. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 - pkg/integrations/gitlab/on_issue.go | 31 ++++++++++++++++++------ pkg/integrations/gitlab/on_issue_test.go | 14 ++++++++--- pkg/public/server.go | 2 -- 4 files changed, 35 insertions(+), 13 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 0e86340fa0..523874b102 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -115,7 +115,6 @@ type WebhookRequestContext struct { WorkflowID string NodeID string Configuration any - Metadata MetadataContext Logger *log.Entry Webhook NodeWebhookContext Events EventContext diff --git a/pkg/integrations/gitlab/on_issue.go b/pkg/integrations/gitlab/on_issue.go index fe92ca62a3..d76f646904 100644 --- a/pkg/integrations/gitlab/on_issue.go +++ b/pkg/integrations/gitlab/on_issue.go @@ -7,6 +7,7 @@ import ( "slices" "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" ) @@ -159,14 +160,14 @@ func (i *OnIssue) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { // // Verify that the action is in the allowed list // - if len(config.Actions) > 0 && !i.whitelistedAction(data, config.Actions) { + if len(config.Actions) > 0 && !i.whitelistedAction(ctx.Logger, data, config.Actions) { return http.StatusOK, nil } // // Verify that the labels are in the allowed list // - if len(config.Labels) > 0 && !i.hasWhitelistedLabel(data, config.Labels) { + if len(config.Labels) > 0 && !i.hasWhitelistedLabel(ctx.Logger, data, config.Labels) { return http.StatusOK, nil } @@ -181,7 +182,7 @@ func (i *OnIssue) Cleanup(ctx core.TriggerContext) error { return nil } -func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string) bool { +func (i *OnIssue) whitelistedAction(logger *log.Entry, data map[string]any, allowedActions []string) bool { attrs, ok := data["object_attributes"].(map[string]any) if !ok { return false @@ -193,6 +194,7 @@ func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string } if !slices.Contains(allowedActions, action) { + logger.Infof("Action %s is not in the allowed list: %v", action, allowedActions) return false } @@ -213,26 +215,41 @@ func (i *OnIssue) whitelistedAction(data map[string]any, allowedActions []string return false } - return state == "opened" + if state != "opened" { + logger.Infof("Received update for issue in non-opened state: %s - ignoring", state) + return false + } + + return true } -func (i *OnIssue) hasWhitelistedLabel(data map[string]any, allowedLabels []configuration.Predicate) bool { +func (i *OnIssue) hasWhitelistedLabel(logger *log.Entry, data map[string]any, allowedLabels []configuration.Predicate) bool { labels, ok := data["labels"].([]any) if !ok { return false } + labelNames := []string{} for _, label := range labels { labelMap, ok := label.(map[string]any) if !ok { continue } - title, _ := labelMap["title"].(string) - if configuration.MatchesAnyPredicate(allowedLabels, title) { + title, ok := labelMap["title"].(string) + if !ok { + continue + } + + labelNames = append(labelNames, title) + } + + for _, labelName := range labelNames { + if configuration.MatchesAnyPredicate(allowedLabels, labelName) { return true } } + logger.Infof("Labels do not match the allowed list: Received: %v, Allowed: %v", labelNames, allowedLabels) return false } diff --git a/pkg/integrations/gitlab/on_issue_test.go b/pkg/integrations/gitlab/on_issue_test.go index 3a9ad0efe4..b65b5f8aac 100644 --- a/pkg/integrations/gitlab/on_issue_test.go +++ b/pkg/integrations/gitlab/on_issue_test.go @@ -5,6 +5,7 @@ import ( "net/http" "testing" + log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/superplanehq/superplane/pkg/configuration" "github.com/superplanehq/superplane/pkg/core" @@ -38,6 +39,7 @@ func Test__OnIssue__HandleWebhook__WrongEventType(t *testing.T) { Body: []byte(`{}`), Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -60,6 +62,7 @@ func Test__OnIssue__HandleWebhook__InvalidToken(t *testing.T) { Body: []byte(`{}`), Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Webhook: webhookCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -92,6 +95,7 @@ func Test__OnIssue__HandleWebhook__StateNotOpened(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"close"}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -127,6 +131,7 @@ func Test__OnIssue__HandleWebhook__Success(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -168,6 +173,7 @@ func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -194,6 +200,7 @@ func Test__OnIssue__HandleWebhook__Filters(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"open"}, "labels": []configuration.Predicate{{Type: configuration.PredicateTypeEquals, Value: "backend"}}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) @@ -214,7 +221,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { "action": "open", }, } - result := trigger.whitelistedAction(data, []string{"open", "close"}) + result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) assert.True(t, result) }) @@ -225,7 +232,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { }, } - result := trigger.whitelistedAction(data, []string{"open", "close"}) + result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) assert.False(t, result) }) @@ -234,7 +241,7 @@ func Test__WhitelistedAction__ValidAction(t *testing.T) { "object_attributes": map[string]any{}, } - result := trigger.whitelistedAction(data, []string{"open", "close"}) + result := trigger.whitelistedAction(log.NewEntry(log.New()), data, []string{"open", "close"}) assert.False(t, result) }) @@ -264,6 +271,7 @@ func Test__OnIssue__HandleWebhook__UpdateOnClosed(t *testing.T) { Configuration: map[string]any{"project": "123", "actions": []string{"update"}}, Webhook: webhookCtx, Events: eventsCtx, + Logger: log.NewEntry(log.New()), } code, err := trigger.HandleWebhook(ctx) diff --git a/pkg/public/server.go b/pkg/public/server.go index 4ddd80c222..4c8eb55d61 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -807,7 +807,6 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), - Metadata: contexts.NewNodeMetadataContext(tx, &node), Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), @@ -842,7 +841,6 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), - Metadata: contexts.NewNodeMetadataContext(tx, &node), Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), From 2a6614ff9e7a571b5ba3ee169a24fb67a31e1f67 Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:30:01 +0100 Subject: [PATCH 084/160] chore: Update changelog skill for cursor (#2993) Signed-off-by: Muhammad Fuzail Zubari --- .cursor/commands/changelog.md | 10 +++--- .cursor/skills/superplane-changelog/SKILL.md | 34 ++++++++++++-------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/.cursor/commands/changelog.md b/.cursor/commands/changelog.md index 05419034b6..6f3da21174 100644 --- a/.cursor/commands/changelog.md +++ b/.cursor/commands/changelog.md @@ -6,17 +6,17 @@ description: Generate a "what's new" changelog from merged commits over a time r Generate a changelog of what was merged to `main` for a given time range. The output is a single markdown file in `tmp/` with new integrations, new components and triggers, improvements, security updates, and bug fixes. -**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. +**Use the skill `superplane-changelog`** for the full workflow: time range, classifying commits (new integrations vs new components vs improvements vs security vs bug fixes), format rules (no em dashes, no "We", **Integration:** components, user-focused improvements, dedicated Security section with CVE when available), and output structure. Section titles must include numeric counts for both integrations (e.g. "#### 3 new integrations") and components/triggers (e.g. "#### 12 new components and triggers"). ## Input -- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", or "from Feb 3 to now". If the user does not specify, ask or default to "since Monday (5 days)". +- **Time range** (required): e.g. "since Monday", "last 5 days", "since last Friday", "from Feb 3 to now", or "since v0.6.0". If the user does not specify, ask or default to "since Monday (5 days)". ## Process -1. Determine start and end dates from the user's time range. -2. Run `git log --since="" --format="%h %ad %s" --date=short main` and use it to identify what landed in the window. -3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. +1. Determine start and end of the window. When the range is version-based (e.g. "since v0.6.0"), use **date and time**: get the tag's commit timestamp (e.g. `git log -1 --format="%cI" v0.6.0`) so the window is strictly after the tag and same-day commits before the tag are excluded. +2. Run `git log --since="" --format="%h %ad %s" main` with `` as a date (`YYYY-MM-DD`) or as the tag's ISO 8601 timestamp when using a version tag. Use `--date=short` or `--date=iso` as appropriate. Use the result to identify what landed in the window. +3. Classify: new integrations (whole integration new), new components/triggers only (filter by date; for existing integrations list only new components), user-facing improvements (no tech-only items), security fixes (separate section; CVE when available), bug fixes. Do not include or derive entries from `chore:` commits. Omit bug/security fixes that only affect a component or integration introduced in this changelog window. 4. Resolve component/trigger names from `pkg/integrations/` and `pkg/components/` (Labels). 5. Write `tmp/changelog__to_.md` following the skill's structure and format rules. diff --git a/.cursor/skills/superplane-changelog/SKILL.md b/.cursor/skills/superplane-changelog/SKILL.md index 83d435012c..c55b70e929 100644 --- a/.cursor/skills/superplane-changelog/SKILL.md +++ b/.cursor/skills/superplane-changelog/SKILL.md @@ -11,9 +11,13 @@ Use this skill when the user wants a changelog of what was merged to `main` over ## 1. Determine time range -- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", or a specific date. -- **Compute**: Start date (e.g. last Monday = start of week) and end date (today). For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. -- **Git**: Use `git log --since="YYYY-MM-DD" --format="%h %ad %s" --date=short main` to list commits. Only include in the changelog items whose merge/commit date falls **on or after** the start date. +- **User may say**: "since Monday", "last 5 days", "since last Friday", "Feb 3 to now", "since v0.6.0", or a specific date. +- **Compute**: Start and end of the window. Use **date and time** (not just date) when the start is a version tag so that same-day commits before the tag are excluded. + - **Date-only ranges** (e.g. "since Monday", "Feb 3 to now"): Start = date at midnight, end = today. For "last 5 days" use Monday to Friday; for "since last Friday" use that Friday through today. + - **Version-tag ranges** (e.g. "since v0.6.0"): Start = **exact commit timestamp of the tag** (e.g. `git log -1 --format="%cI" v0.6.0` for ISO 8601). End = now or a chosen end date. This ensures commits that landed the same calendar day but before the tag are not included. +- **Git**: Use `git log --since="" --format="%h %ad %s" main` where `` is: + - For date-only: `YYYY-MM-DD` (e.g. `2026-02-03`). Use `--date=short` in the format. + - For version-tag: the tag's commit timestamp in ISO 8601 (e.g. `2026-02-01T15:30:00+00:00`). Use `--date=iso` if you need to compare times. Only include in the changelog items whose commit/merge date is **strictly after** the start when using a tag. --- @@ -21,11 +25,12 @@ Use this skill when the user wants a changelog of what was merged to `main` over From commit messages and dates: +- **Exclude `chore:` commits (mandatory).** Do not list or derive any changelog entry from commits whose subject starts with `chore:` or `chore(...):`. This applies to every section: do not add an improvement, integration, component, or any other bullet based on a chore commit, even if the change seems user-facing (e.g. "Allow running multiple instances" is still a chore and must be omitted). When classifying what landed, skip chore commits entirely; only use `feat:`, `fix:`, `docs:` (for user-facing doc changes), and similar non-chore prefixes as sources for changelog entries. - **New integrations**: Integrations that were **fully added** in the window (base integration registered + first components). Example: SendGrid, Jira. Do **not** count standalone components (e.g. SSH is a component under `pkg/components/ssh`, not an integration). -- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit dates to exclude anything that landed before the start date (e.g. Cloudflare DNS records merged Feb 1 are excluded if the window is "Monday Feb 3 to now"). -- **Improvements**: User-facing product changes (RBAC, Secrets, Bounty Program, integrations UX, list vs expression, multiple instances). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). -- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. -- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. +- **New components and triggers**: Only components/triggers that **first appeared in the time window**. If an integration already existed, list only the new component(s) (e.g. GitHub: Get Release). If the integration is new, list all its components and triggers. Use commit timestamps (date and time) to exclude anything that landed before the start of the window (e.g. when the window is "since v0.6.0", exclude commits with timestamp on or before the tag's commit time, so same-day commits before the tag are excluded). +- **Improvements**: User-facing product changes from non-chore commits only (e.g. RBAC, Secrets, integrations UX). Exclude internal/technical items (e.g. "Component/Trigger Cleanup()", "listing integration resources with additional parameters", Cursor skills). Describe each improvement in user-oriented terms: what the user can do, what problem it solves, or what benefit they get (e.g. "Define roles and permissions and control what each user can do" rather than "Permission guard in the UI"). +- **Security**: Vulnerability fixes and security-related changes from the same commit range. Look for commits that mention "security", "SSRF", "CVE", "vulnerability", "auth", "injection", "XSS", "sanitiz", etc. Include a dedicated **Security** section whenever at least one such fix is present. Do not list a security fix if it only affects a component or integration that was introduced in this changelog window. +- **Bug fixes**: Fixes and reliability improvements from the same commit range (excluding security fixes, which go under Security). Keep in "Bug Fixes" even if somewhat technical. Do not list a fix if it only affects a component or integration that was introduced in this changelog window (e.g. "fix: AWS ECR timestamp" when ECR was added in the same window). To resolve component/trigger names and which integration they belong to, use `pkg/integrations/*/` and `pkg/components/*/`: check each integration's `Components()` and `Triggers()` and their `Label()` / `Name()` (e.g. `aws.go` for AWS, `ecr/`, `codeartifact/`). @@ -37,9 +42,9 @@ To resolve component/trigger names and which integration they belong to, use `pk - **No "We" language**. Use direct, neutral phrasing (e.g. "Role-based access control." not "We introduced role-based access control."). - **New integrations section**: List only integration names, one per line (e.g. SendGrid, Jira). - **New components section**: Use **Integration:** Component1, Component2, ... One line per integration or standalone component (e.g. **GitHub:** Get Release; **SSH:** Run commands on remote hosts). -- **Improvements**: Each bullet is **Bold label**: Short, user-focused description. No implementation details. No "We". +- **Improvements**: Each bullet is **Bold label**: Short, user-oriented description. Write from the user's perspective: what they can do, what problem it solves, or what benefit they get. Avoid implementation jargon (e.g. "permission guard", "payload limit"); prefer outcome and capability (e.g. "Control what each user can do in your organization", "Secrets can be used in the SSH component to store private keys"). No "We". - **Security**: Dedicated section (use only when there are security-related commits). Each bullet: include **CVE identifier** when available (e.g. CVE-2024-12345), then a short description of the vulnerability or fix. If no CVE, use "Fixed: " plus description (e.g. "Fixed: SSRF protection added to HTTP requests"). Same tone as rest of changelog; no em dashes. -- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. +- **Bug fixes**: Each bullet starts with "Fixed: " then a short description. Do not list security fixes here; they go under Security. Omit fixes that only apply to components or integrations that are new in this changelog. --- @@ -47,6 +52,8 @@ To resolve component/trigger names and which integration they belong to, use `pk Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) with this structure: +- **Section titles must include the numeric count** for both integrations and components (e.g. "#### 3 new integrations", "#### 12 new components and triggers"). Count each integration as 1. For components and triggers, count each component or trigger as 1 (e.g. one line "**GitHub:** Get Release, On Release" is 2). + ```markdown # SuperPlane Changelog (Feb X-Y, YYYY) @@ -67,7 +74,6 @@ Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) - **RBAC**: Role-based access control. Define roles and permissions... - **Secrets**: Create, update, and delete organization secrets... - - **Bounty Program**: Get paid for building integrations. See [link]... - (etc.) #### Security @@ -83,15 +89,15 @@ Write a single file to `tmp/changelog_YYYY-MM-DD_to_YYYY-MM-DD.md` (or similar) ``` - Use three spaces before list bullets for indentation under each #### heading. -- Counts (N new integrations, M new components and triggers) must match the listed items and the chosen time window. +- Replace N and M with the actual counts. N = number of integrations listed. M = total number of components and triggers (each component or trigger counts as 1, even when several are on one line). Counts must match the listed items and the chosen time window. --- ## 5. Workflow summary -1. Ask for or infer time range (e.g. "Monday to now" = 5 days). -2. Run `git log --since="" --format="%h %ad %s" --date=short main` and optionally inspect merge dates for key PRs. -3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only), security fixes (dedicated section; separate from bug fixes), and bug fixes. +1. Ask for or infer time range (e.g. "Monday to now" = 5 days; "since v0.6.0" = after the tag's commit timestamp). +2. Run `git log --since="" --format="%h %ad %s" main` with `` as date (`YYYY-MM-DD`) or as the tag's commit timestamp in ISO 8601 when the range is version-based. Use `--date=short` or `--date=iso` as needed. Optionally inspect merge dates for key PRs. +3. Identify new integrations (whole new integration only), new components/triggers (per integration, only in window), improvements (user-facing only; never derived from chore commits), security fixes (dedicated section; separate from bug fixes), and bug fixes. Do not include or derive any entry from `chore:` or `chore(...):` commits in any section. 4. Resolve labels from code: `pkg/integrations//` and `pkg/components/` for component/trigger names. 5. Write `tmp/changelog_.md` following the structure and format rules above. 6. Tell the user the file path and that they can review or edit it. From 936164f514e9f80d2f91610c05ce4e17d4568ad4 Mon Sep 17 00:00:00 2001 From: e-todorovski-bm <132341821+e-todorovski-bm@users.noreply.github.com> Date: Tue, 10 Feb 2026 09:25:58 +0100 Subject: [PATCH 085/160] feat: PagerDuty list notes (#2857) UI: Screenshot 2026-02-04 at 10 29 21 Screenshot 2026-02-04 at 10 29 32 Screenshot 2026-02-04 at 10 29 46 Screenshot 2026-02-04 at 10 29 57 --------- Signed-off-by: Emil Todorovski Signed-off-by: Muhammad Fuzail Zubari --- docs/components/PagerDuty.mdx | 88 ------------------- pkg/integrations/pagerduty/client.go | 35 -------- pkg/integrations/pagerduty/example.go | 10 --- pkg/integrations/pagerduty/pagerduty.go | 1 - .../workflowv2/mappers/pagerduty/index.ts | 3 - .../mappers/pagerduty/list_notes.ts | 81 +++++++++-------- .../workflowv2/mappers/pagerduty/types.ts | 23 ----- 7 files changed, 42 insertions(+), 199 deletions(-) diff --git a/docs/components/PagerDuty.mdx b/docs/components/PagerDuty.mdx index 516b002614..7e352f496b 100644 --- a/docs/components/PagerDuty.mdx +++ b/docs/components/PagerDuty.mdx @@ -20,7 +20,6 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; - @@ -655,93 +654,6 @@ Returns a list of open incidents with: } ``` - - -## List Log Entries - -The List Log Entries component retrieves all log entries (audit trail) for a PagerDuty incident. - -### Use Cases - -- **Audit trail**: Access complete incident history for compliance or review -- **Timeline reconstruction**: Build a detailed timeline of all incident activity -- **Incident analysis**: Analyze escalation patterns and response times -- **Forensics**: Review all actions taken during an incident - -### Configuration - -- **Incident ID**: The ID of the incident to list log entries for (e.g., A12BC34567...) -- **Limit**: Maximum number of log entries to return (default: 100) - -### Output - -Returns a list of log entries with: -- **id**: Log entry ID -- **type**: The type of log entry (e.g., trigger_log_entry, acknowledge_log_entry, annotate_log_entry) -- **summary**: A summary of what happened -- **created_at**: When the log entry was created -- **agent**: The agent (user or service) that caused the log entry -- **channel**: The channel through which the action was performed - -### Example Output - -```json -{ - "data": { - "log_entries": [ - { - "agent": { - "html_url": "https://acme.pagerduty.com/services/PLH1HKV", - "id": "PLH1HKV", - "summary": "API Service", - "type": "service_reference" - }, - "channel": { - "type": "api" - }, - "created_at": "2024-01-15T10:00:00Z", - "id": "Q02JTSNZWHSEKV", - "summary": "Triggered through the API", - "type": "trigger_log_entry" - }, - { - "agent": { - "html_url": "https://acme.pagerduty.com/users/PUSER01", - "id": "PUSER01", - "summary": "John Smith", - "type": "user_reference" - }, - "channel": { - "type": "web_ui" - }, - "created_at": "2024-01-15T10:15:00Z", - "id": "Q02JTSNZWHSEKW", - "summary": "Acknowledged by John Smith", - "type": "acknowledge_log_entry" - }, - { - "agent": { - "html_url": "https://acme.pagerduty.com/users/PUSER01", - "id": "PUSER01", - "summary": "John Smith", - "type": "user_reference" - }, - "channel": { - "type": "web_ui" - }, - "created_at": "2024-01-15T10:30:00Z", - "id": "Q02JTSNZWHSEKX", - "summary": "John Smith added a note", - "type": "annotate_log_entry" - } - ], - "total": 3 - }, - "timestamp": "2024-01-15T11:00:00Z", - "type": "pagerduty.log_entries.list" -} -``` - ## List Notes diff --git a/pkg/integrations/pagerduty/client.go b/pkg/integrations/pagerduty/client.go index abcefc9e49..e6f68c5a14 100644 --- a/pkg/integrations/pagerduty/client.go +++ b/pkg/integrations/pagerduty/client.go @@ -742,38 +742,3 @@ func (c *Client) ListIncidentNotes(incidentID string) ([]Note, error) { return response.Notes, nil } - -// LogEntry represents a log entry for a PagerDuty incident -type LogEntry struct { - ID string `json:"id"` - Type string `json:"type"` - Summary string `json:"summary"` - CreatedAt string `json:"created_at"` - Agent *ServiceRef `json:"agent"` - Channel *LogChannel `json:"channel"` -} - -// LogChannel represents the channel through which a log entry was created -type LogChannel struct { - Type string `json:"type"` -} - -// ListIncidentLogEntries retrieves log entries for a given incident -func (c *Client) ListIncidentLogEntries(incidentID string, limit int) ([]LogEntry, error) { - apiURL := fmt.Sprintf("%s/incidents/%s/log_entries?limit=%d", c.BaseURL, incidentID, limit) - responseBody, err := c.execRequest(http.MethodGet, apiURL, nil) - if err != nil { - return nil, err - } - - var response struct { - LogEntries []LogEntry `json:"log_entries"` - } - - err = json.Unmarshal(responseBody, &response) - if err != nil { - return nil, fmt.Errorf("error parsing response: %v", err) - } - - return response.LogEntries, nil -} diff --git a/pkg/integrations/pagerduty/example.go b/pkg/integrations/pagerduty/example.go index 632ef83020..40c732f0b2 100644 --- a/pkg/integrations/pagerduty/example.go +++ b/pkg/integrations/pagerduty/example.go @@ -61,12 +61,6 @@ var exampleOutputListNotesBytes []byte var exampleOutputListNotesOnce sync.Once var exampleOutputListNotes map[string]any -//go:embed example_output_list_log_entries.json -var exampleOutputListLogEntriesBytes []byte - -var exampleOutputListLogEntriesOnce sync.Once -var exampleOutputListLogEntries map[string]any - func (c *CreateIncident) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIncidentOnce, exampleOutputCreateIncidentBytes, &exampleOutputCreateIncident) } @@ -91,10 +85,6 @@ func (l *ListNotes) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputListNotesOnce, exampleOutputListNotesBytes, &exampleOutputListNotes) } -func (l *ListLogEntries) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleOutputListLogEntriesOnce, exampleOutputListLogEntriesBytes, &exampleOutputListLogEntries) -} - func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/pagerduty/pagerduty.go b/pkg/integrations/pagerduty/pagerduty.go index 6e9c08a048..11861f6844 100644 --- a/pkg/integrations/pagerduty/pagerduty.go +++ b/pkg/integrations/pagerduty/pagerduty.go @@ -139,7 +139,6 @@ func (p *PagerDuty) Components() []core.Component { &AnnotateIncident{}, &ListIncidents{}, &ListNotes{}, - &ListLogEntries{}, &SnoozeIncident{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts index 8cacb7f46f..28ea68f434 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts @@ -7,7 +7,6 @@ import { updateIncidentMapper } from "./update_incident"; import { annotateIncidentMapper } from "./annotate_incident"; import { listIncidentsMapper, LIST_INCIDENTS_STATE_REGISTRY } from "./list_incidents"; import { listNotesMapper } from "./list_notes"; -import { listLogEntriesMapper } from "./list_log_entries"; import { snoozeIncidentMapper } from "./snooze_incident"; import { buildActionStateRegistry } from "../utils"; @@ -17,7 +16,6 @@ export const componentMappers: Record = { annotateIncident: annotateIncidentMapper, listIncidents: listIncidentsMapper, listNotes: listNotesMapper, - listLogEntries: listLogEntriesMapper, snoozeIncident: snoozeIncidentMapper, }; @@ -33,6 +31,5 @@ export const eventStateRegistry: Record = { annotateIncident: buildActionStateRegistry("annotated"), listIncidents: LIST_INCIDENTS_STATE_REGISTRY, listNotes: buildActionStateRegistry("listed"), - listLogEntries: buildActionStateRegistry("listed"), snoozeIncident: buildActionStateRegistry("snoozed"), }; diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts index 7a361d9ec2..c0a8b41e2f 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts @@ -1,23 +1,22 @@ -import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; +import { + ComponentsNode, + ComponentsComponent, + CanvasesCanvasNodeExecution, + CanvasesCanvasNodeQueueItem, +} from "@/api-client"; import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; -import { MetadataItem } from "@/ui/metadataList"; import { getBackgroundColorClass } from "@/utils/colors"; -import { formatTimeAgo } from "@/utils/date"; import { getState, getStateMap, getTriggerRenderer } from ".."; -import { - ComponentBaseContext, - ComponentBaseMapper, - ExecutionDetailsContext, - ExecutionInfo, - OutputPayload, - SubtitleContext, -} from "../types"; +import { ComponentBaseMapper, OutputPayload } from "../types"; +import { MetadataItem } from "@/ui/metadataList"; +import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; +import { formatTimeAgo } from "@/utils/date"; import { ListNotesResponse, Note } from "./types"; /** * Extracts the first payload from execution outputs. */ -function getFirstPayload(execution: ExecutionInfo): OutputPayload | null { +function getFirstPayload(execution: CanvasesCanvasNodeExecution): OutputPayload | null { const outputs = execution.outputs as { default?: OutputPayload[] } | undefined; if (!outputs) return null; @@ -31,7 +30,7 @@ function getFirstPayload(execution: ExecutionInfo): OutputPayload | null { /** * Extracts notes from the execution payload. */ -function getNotes(execution: ExecutionInfo): Note[] { +function getNotes(execution: CanvasesCanvasNodeExecution): Note[] { const payload = getFirstPayload(execution); if (!payload || !payload.data) return []; @@ -42,30 +41,31 @@ function getNotes(execution: ExecutionInfo): Note[] { } export const listNotesMapper: ComponentBaseMapper = { - props(context: ComponentBaseContext): ComponentBaseProps { - const lastExecution = - context.lastExecutions && context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; - const componentName = context.componentDefinition?.name || "unknown"; + props( + nodes: ComponentsNode[], + node: ComponentsNode, + componentDefinition: ComponentsComponent, + lastExecutions: CanvasesCanvasNodeExecution[], + _?: CanvasesCanvasNodeQueueItem[], + ): ComponentBaseProps { + const lastExecution = lastExecutions.length > 0 ? lastExecutions[0] : null; + const componentName = componentDefinition.name || node.component?.name || "unknown"; return { iconSrc: pdIcon, - collapsedBackground: getBackgroundColorClass(context.componentDefinition?.color), - collapsed: context.node?.isCollapsed ?? false, - title: - context.node?.name || - context.componentDefinition?.label || - context.componentDefinition?.name || - "Unnamed component", - eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, - metadata: metadataList(context.node), + collapsedBackground: getBackgroundColorClass(componentDefinition.color), + collapsed: node.isCollapsed, + title: node.name || componentDefinition.label || componentDefinition.name || "Unnamed component", + eventSections: lastExecution ? baseEventSections(nodes, lastExecution, componentName) : undefined, + metadata: metadataList(node), includeEmptyState: !lastExecution, eventStateMap: getStateMap(componentName), }; }, - subtitle(context: SubtitleContext): string { - const timeAgo = formatTimeAgo(new Date(context.execution.createdAt!)); - const notes = getNotes(context.execution); + subtitle(_node: ComponentsNode, execution: CanvasesCanvasNodeExecution): string { + const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); + const notes = getNotes(execution); if (notes.length > 0) { return `${notes.length} note${notes.length === 1 ? "" : "s"} · ${timeAgo}`; @@ -74,24 +74,23 @@ export const listNotesMapper: ComponentBaseMapper = { return `no notes · ${timeAgo}`; }, - getExecutionDetails(context: ExecutionDetailsContext): Record { + getExecutionDetails(execution: CanvasesCanvasNodeExecution, _: ComponentsNode): Record { const details: Record = {}; // Add "Checked at" timestamp - if (context.execution.createdAt) { - details["Checked at"] = new Date(context.execution.createdAt).toLocaleString(); + if (execution.createdAt) { + details["Checked at"] = new Date(execution.createdAt).toLocaleString(); } - const notes = getNotes(context.execution); + const notes = getNotes(execution); details["Notes"] = `${notes.length} note${notes.length === 1 ? "" : "s"} fetched`; return details; }, }; -function metadataList(node: { configuration?: unknown }): MetadataItem[] { +function metadataList(node: ComponentsNode): MetadataItem[] { const metadata: MetadataItem[] = []; - if (!node) return metadata; const configuration = node.configuration as any; if (configuration.incidentId) { @@ -101,10 +100,14 @@ function metadataList(node: { configuration?: unknown }): MetadataItem[] { return metadata; } -function baseEventSections(nodes: { id: string }[], execution: ExecutionInfo, componentName: string): EventSection[] { +function baseEventSections( + nodes: ComponentsNode[], + execution: CanvasesCanvasNodeExecution, + componentName: string, +): EventSection[] { const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer((rootTriggerNode as any)?.trigger?.name || ""); - const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.trigger?.name || ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle(execution.rootEvent!); const notes = getNotes(execution); const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); @@ -122,7 +125,7 @@ function baseEventSections(nodes: { id: string }[], execution: ExecutionInfo, co eventTitle: title, eventSubtitle, eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id, + eventId: execution.rootEvent!.id!, }, ]; } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts index 7945b0ddb9..e0ff6c05db 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts @@ -67,26 +67,3 @@ export interface ListNotesResponse { notes: Note[]; total: number; } - -export interface LogEntry { - id?: string; - type?: string; - summary?: string; - created_at?: string; - agent?: ResourceRef; - channel?: LogChannel; -} - -export interface LogChannel { - type?: string; -} - -export interface ListLogEntriesConfiguration { - incidentId?: string; - limit?: number; -} - -export interface ListLogEntriesResponse { - log_entries: LogEntry[]; - total: number; -} From 7768253c2891a29ea0d0d69b541b259297f3cb70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Tue, 10 Feb 2026 11:33:46 +0100 Subject: [PATCH 086/160] chore: Fix frontend mappers to use new format (#2996) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- .../mappers/pagerduty/list_notes.ts | 72 +++++++++---------- 1 file changed, 34 insertions(+), 38 deletions(-) diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts index c0a8b41e2f..76d0fd602f 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts @@ -1,13 +1,15 @@ -import { - ComponentsNode, - ComponentsComponent, - CanvasesCanvasNodeExecution, - CanvasesCanvasNodeQueueItem, -} from "@/api-client"; import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; import { getBackgroundColorClass } from "@/utils/colors"; import { getState, getStateMap, getTriggerRenderer } from ".."; -import { ComponentBaseMapper, OutputPayload } from "../types"; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + ExecutionInfo, + NodeInfo, + OutputPayload, + SubtitleContext, +} from "../types"; import { MetadataItem } from "@/ui/metadataList"; import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; import { formatTimeAgo } from "@/utils/date"; @@ -16,7 +18,7 @@ import { ListNotesResponse, Note } from "./types"; /** * Extracts the first payload from execution outputs. */ -function getFirstPayload(execution: CanvasesCanvasNodeExecution): OutputPayload | null { +function getFirstPayload(execution: ExecutionInfo): OutputPayload | null { const outputs = execution.outputs as { default?: OutputPayload[] } | undefined; if (!outputs) return null; @@ -30,7 +32,7 @@ function getFirstPayload(execution: CanvasesCanvasNodeExecution): OutputPayload /** * Extracts notes from the execution payload. */ -function getNotes(execution: CanvasesCanvasNodeExecution): Note[] { +function getNotes(execution: ExecutionInfo): Note[] { const payload = getFirstPayload(execution); if (!payload || !payload.data) return []; @@ -41,31 +43,29 @@ function getNotes(execution: CanvasesCanvasNodeExecution): Note[] { } export const listNotesMapper: ComponentBaseMapper = { - props( - nodes: ComponentsNode[], - node: ComponentsNode, - componentDefinition: ComponentsComponent, - lastExecutions: CanvasesCanvasNodeExecution[], - _?: CanvasesCanvasNodeQueueItem[], - ): ComponentBaseProps { - const lastExecution = lastExecutions.length > 0 ? lastExecutions[0] : null; - const componentName = componentDefinition.name || node.component?.name || "unknown"; + props(context: ComponentBaseContext): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name ?? "pagerduty"; return { iconSrc: pdIcon, - collapsedBackground: getBackgroundColorClass(componentDefinition.color), - collapsed: node.isCollapsed, - title: node.name || componentDefinition.label || componentDefinition.name || "Unnamed component", - eventSections: lastExecution ? baseEventSections(nodes, lastExecution, componentName) : undefined, - metadata: metadataList(node), + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + title: + context.node.name || + context.componentDefinition.label || + context.componentDefinition.name || + "Unnamed component", + eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, + metadata: metadataList(context.node), includeEmptyState: !lastExecution, eventStateMap: getStateMap(componentName), }; }, - subtitle(_node: ComponentsNode, execution: CanvasesCanvasNodeExecution): string { - const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); - const notes = getNotes(execution); + subtitle(context: SubtitleContext): string { + const timeAgo = formatTimeAgo(new Date(context.execution.createdAt!)); + const notes = getNotes(context.execution); if (notes.length > 0) { return `${notes.length} note${notes.length === 1 ? "" : "s"} · ${timeAgo}`; @@ -74,22 +74,22 @@ export const listNotesMapper: ComponentBaseMapper = { return `no notes · ${timeAgo}`; }, - getExecutionDetails(execution: CanvasesCanvasNodeExecution, _: ComponentsNode): Record { + getExecutionDetails(context: ExecutionDetailsContext): Record { const details: Record = {}; // Add "Checked at" timestamp - if (execution.createdAt) { - details["Checked at"] = new Date(execution.createdAt).toLocaleString(); + if (context.execution.createdAt) { + details["Checked at"] = new Date(context.execution.createdAt).toLocaleString(); } - const notes = getNotes(execution); + const notes = getNotes(context.execution); details["Notes"] = `${notes.length} note${notes.length === 1 ? "" : "s"} fetched`; return details; }, }; -function metadataList(node: ComponentsNode): MetadataItem[] { +function metadataList(node: NodeInfo): MetadataItem[] { const metadata: MetadataItem[] = []; const configuration = node.configuration as any; @@ -100,14 +100,10 @@ function metadataList(node: ComponentsNode): MetadataItem[] { return metadata; } -function baseEventSections( - nodes: ComponentsNode[], - execution: CanvasesCanvasNodeExecution, - componentName: string, -): EventSection[] { +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.trigger?.name || ""); - const { title } = rootTriggerRenderer.getTitleAndSubtitle(execution.rootEvent!); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName ?? ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); const notes = getNotes(execution); const timeAgo = formatTimeAgo(new Date(execution.createdAt!)); From 09483471f97ea98f50fd7f62d071595438d3503e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Tue, 10 Feb 2026 18:17:43 +0100 Subject: [PATCH 087/160] feat: PagerDuty list log entries (#3012) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Emil Todorovski Signed-off-by: Igor Šarčević Co-authored-by: Emil Todorovski Signed-off-by: Muhammad Fuzail Zubari --- docs/components/PagerDuty.mdx | 88 +++++++++++++++++++ pkg/integrations/pagerduty/client.go | 35 ++++++++ pkg/integrations/pagerduty/example.go | 10 +++ pkg/integrations/pagerduty/pagerduty.go | 1 + .../workflowv2/mappers/pagerduty/index.ts | 3 + .../mappers/pagerduty/list_notes.ts | 31 +++---- .../workflowv2/mappers/pagerduty/types.ts | 23 +++++ 7 files changed, 176 insertions(+), 15 deletions(-) diff --git a/docs/components/PagerDuty.mdx b/docs/components/PagerDuty.mdx index 7e352f496b..516b002614 100644 --- a/docs/components/PagerDuty.mdx +++ b/docs/components/PagerDuty.mdx @@ -20,6 +20,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -654,6 +655,93 @@ Returns a list of open incidents with: } ``` + + +## List Log Entries + +The List Log Entries component retrieves all log entries (audit trail) for a PagerDuty incident. + +### Use Cases + +- **Audit trail**: Access complete incident history for compliance or review +- **Timeline reconstruction**: Build a detailed timeline of all incident activity +- **Incident analysis**: Analyze escalation patterns and response times +- **Forensics**: Review all actions taken during an incident + +### Configuration + +- **Incident ID**: The ID of the incident to list log entries for (e.g., A12BC34567...) +- **Limit**: Maximum number of log entries to return (default: 100) + +### Output + +Returns a list of log entries with: +- **id**: Log entry ID +- **type**: The type of log entry (e.g., trigger_log_entry, acknowledge_log_entry, annotate_log_entry) +- **summary**: A summary of what happened +- **created_at**: When the log entry was created +- **agent**: The agent (user or service) that caused the log entry +- **channel**: The channel through which the action was performed + +### Example Output + +```json +{ + "data": { + "log_entries": [ + { + "agent": { + "html_url": "https://acme.pagerduty.com/services/PLH1HKV", + "id": "PLH1HKV", + "summary": "API Service", + "type": "service_reference" + }, + "channel": { + "type": "api" + }, + "created_at": "2024-01-15T10:00:00Z", + "id": "Q02JTSNZWHSEKV", + "summary": "Triggered through the API", + "type": "trigger_log_entry" + }, + { + "agent": { + "html_url": "https://acme.pagerduty.com/users/PUSER01", + "id": "PUSER01", + "summary": "John Smith", + "type": "user_reference" + }, + "channel": { + "type": "web_ui" + }, + "created_at": "2024-01-15T10:15:00Z", + "id": "Q02JTSNZWHSEKW", + "summary": "Acknowledged by John Smith", + "type": "acknowledge_log_entry" + }, + { + "agent": { + "html_url": "https://acme.pagerduty.com/users/PUSER01", + "id": "PUSER01", + "summary": "John Smith", + "type": "user_reference" + }, + "channel": { + "type": "web_ui" + }, + "created_at": "2024-01-15T10:30:00Z", + "id": "Q02JTSNZWHSEKX", + "summary": "John Smith added a note", + "type": "annotate_log_entry" + } + ], + "total": 3 + }, + "timestamp": "2024-01-15T11:00:00Z", + "type": "pagerduty.log_entries.list" +} +``` + ## List Notes diff --git a/pkg/integrations/pagerduty/client.go b/pkg/integrations/pagerduty/client.go index e6f68c5a14..abcefc9e49 100644 --- a/pkg/integrations/pagerduty/client.go +++ b/pkg/integrations/pagerduty/client.go @@ -742,3 +742,38 @@ func (c *Client) ListIncidentNotes(incidentID string) ([]Note, error) { return response.Notes, nil } + +// LogEntry represents a log entry for a PagerDuty incident +type LogEntry struct { + ID string `json:"id"` + Type string `json:"type"` + Summary string `json:"summary"` + CreatedAt string `json:"created_at"` + Agent *ServiceRef `json:"agent"` + Channel *LogChannel `json:"channel"` +} + +// LogChannel represents the channel through which a log entry was created +type LogChannel struct { + Type string `json:"type"` +} + +// ListIncidentLogEntries retrieves log entries for a given incident +func (c *Client) ListIncidentLogEntries(incidentID string, limit int) ([]LogEntry, error) { + apiURL := fmt.Sprintf("%s/incidents/%s/log_entries?limit=%d", c.BaseURL, incidentID, limit) + responseBody, err := c.execRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + var response struct { + LogEntries []LogEntry `json:"log_entries"` + } + + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + return response.LogEntries, nil +} diff --git a/pkg/integrations/pagerduty/example.go b/pkg/integrations/pagerduty/example.go index 40c732f0b2..632ef83020 100644 --- a/pkg/integrations/pagerduty/example.go +++ b/pkg/integrations/pagerduty/example.go @@ -61,6 +61,12 @@ var exampleOutputListNotesBytes []byte var exampleOutputListNotesOnce sync.Once var exampleOutputListNotes map[string]any +//go:embed example_output_list_log_entries.json +var exampleOutputListLogEntriesBytes []byte + +var exampleOutputListLogEntriesOnce sync.Once +var exampleOutputListLogEntries map[string]any + func (c *CreateIncident) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateIncidentOnce, exampleOutputCreateIncidentBytes, &exampleOutputCreateIncident) } @@ -85,6 +91,10 @@ func (l *ListNotes) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputListNotesOnce, exampleOutputListNotesBytes, &exampleOutputListNotes) } +func (l *ListLogEntries) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputListLogEntriesOnce, exampleOutputListLogEntriesBytes, &exampleOutputListLogEntries) +} + func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/pagerduty/pagerduty.go b/pkg/integrations/pagerduty/pagerduty.go index 11861f6844..6e9c08a048 100644 --- a/pkg/integrations/pagerduty/pagerduty.go +++ b/pkg/integrations/pagerduty/pagerduty.go @@ -139,6 +139,7 @@ func (p *PagerDuty) Components() []core.Component { &AnnotateIncident{}, &ListIncidents{}, &ListNotes{}, + &ListLogEntries{}, &SnoozeIncident{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts index 28ea68f434..8cacb7f46f 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/index.ts @@ -7,6 +7,7 @@ import { updateIncidentMapper } from "./update_incident"; import { annotateIncidentMapper } from "./annotate_incident"; import { listIncidentsMapper, LIST_INCIDENTS_STATE_REGISTRY } from "./list_incidents"; import { listNotesMapper } from "./list_notes"; +import { listLogEntriesMapper } from "./list_log_entries"; import { snoozeIncidentMapper } from "./snooze_incident"; import { buildActionStateRegistry } from "../utils"; @@ -16,6 +17,7 @@ export const componentMappers: Record = { annotateIncident: annotateIncidentMapper, listIncidents: listIncidentsMapper, listNotes: listNotesMapper, + listLogEntries: listLogEntriesMapper, snoozeIncident: snoozeIncidentMapper, }; @@ -31,5 +33,6 @@ export const eventStateRegistry: Record = { annotateIncident: buildActionStateRegistry("annotated"), listIncidents: LIST_INCIDENTS_STATE_REGISTRY, listNotes: buildActionStateRegistry("listed"), + listLogEntries: buildActionStateRegistry("listed"), snoozeIncident: buildActionStateRegistry("snoozed"), }; diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts index 76d0fd602f..7a361d9ec2 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/list_notes.ts @@ -1,18 +1,17 @@ +import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { MetadataItem } from "@/ui/metadataList"; import { getBackgroundColorClass } from "@/utils/colors"; +import { formatTimeAgo } from "@/utils/date"; import { getState, getStateMap, getTriggerRenderer } from ".."; import { ComponentBaseContext, ComponentBaseMapper, ExecutionDetailsContext, ExecutionInfo, - NodeInfo, OutputPayload, SubtitleContext, } from "../types"; -import { MetadataItem } from "@/ui/metadataList"; -import pdIcon from "@/assets/icons/integrations/pagerduty.svg"; -import { formatTimeAgo } from "@/utils/date"; import { ListNotesResponse, Note } from "./types"; /** @@ -44,17 +43,18 @@ function getNotes(execution: ExecutionInfo): Note[] { export const listNotesMapper: ComponentBaseMapper = { props(context: ComponentBaseContext): ComponentBaseProps { - const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; - const componentName = context.componentDefinition.name ?? "pagerduty"; + const lastExecution = + context.lastExecutions && context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition?.name || "unknown"; return { iconSrc: pdIcon, - collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), - collapsed: context.node.isCollapsed, + collapsedBackground: getBackgroundColorClass(context.componentDefinition?.color), + collapsed: context.node?.isCollapsed ?? false, title: - context.node.name || - context.componentDefinition.label || - context.componentDefinition.name || + context.node?.name || + context.componentDefinition?.label || + context.componentDefinition?.name || "Unnamed component", eventSections: lastExecution ? baseEventSections(context.nodes, lastExecution, componentName) : undefined, metadata: metadataList(context.node), @@ -89,8 +89,9 @@ export const listNotesMapper: ComponentBaseMapper = { }, }; -function metadataList(node: NodeInfo): MetadataItem[] { +function metadataList(node: { configuration?: unknown }): MetadataItem[] { const metadata: MetadataItem[] = []; + if (!node) return metadata; const configuration = node.configuration as any; if (configuration.incidentId) { @@ -100,9 +101,9 @@ function metadataList(node: NodeInfo): MetadataItem[] { return metadata; } -function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { +function baseEventSections(nodes: { id: string }[], execution: ExecutionInfo, componentName: string): EventSection[] { const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName ?? ""); + const rootTriggerRenderer = getTriggerRenderer((rootTriggerNode as any)?.trigger?.name || ""); const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); const notes = getNotes(execution); @@ -121,7 +122,7 @@ function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componen eventTitle: title, eventSubtitle, eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id!, + eventId: execution.rootEvent!.id, }, ]; } diff --git a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts index e0ff6c05db..7945b0ddb9 100644 --- a/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts +++ b/web_src/src/pages/workflowv2/mappers/pagerduty/types.ts @@ -67,3 +67,26 @@ export interface ListNotesResponse { notes: Note[]; total: number; } + +export interface LogEntry { + id?: string; + type?: string; + summary?: string; + created_at?: string; + agent?: ResourceRef; + channel?: LogChannel; +} + +export interface LogChannel { + type?: string; +} + +export interface ListLogEntriesConfiguration { + incidentId?: string; + limit?: number; +} + +export interface ListLogEntriesResponse { + log_entries: LogEntry[]; + total: number; +} From 3c02219dc42cb58eae10d3fb255167f7526a3839 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Tue, 10 Feb 2026 15:33:42 -0300 Subject: [PATCH 088/160] fix: allow integrations of the same type + allow editing integration name (#3019) closes: https://github.com/superplanehq/superplane/issues/3008 closes: https://github.com/superplanehq/superplane/issues/3009 [feat: make it possible to update integration name](https://github.com/superplanehq/superplane/pull/3019/commits/015a0f3b3653f23719d795d0ba685043cc3d4947) [fix: make it possible to add integration of the same type](https://github.com/superplanehq/superplane/pull/3019/commits/4fd70fe4cf5ba16b12ec46424597a384e89b8dec) image image image --------- Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- .../settings/IntegrationDetails.tsx | 2 +- .../organization/settings/Integrations.tsx | 222 +++++++----------- web_src/src/ui/componentSidebar/index.tsx | 2 +- 3 files changed, 81 insertions(+), 145 deletions(-) diff --git a/web_src/src/pages/organization/settings/IntegrationDetails.tsx b/web_src/src/pages/organization/settings/IntegrationDetails.tsx index ecf459ac6e..c6de6c25f4 100644 --- a/web_src/src/pages/organization/settings/IntegrationDetails.tsx +++ b/web_src/src/pages/organization/settings/IntegrationDetails.tsx @@ -257,6 +257,7 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) Integration Name * +

    A unique name for this integration

    -

    A unique name for this integration

    {integrationDef.configuration.map((field: ConfigurationField) => ( diff --git a/web_src/src/pages/organization/settings/Integrations.tsx b/web_src/src/pages/organization/settings/Integrations.tsx index cc95e14ab7..3bc2dc3ed4 100644 --- a/web_src/src/pages/organization/settings/Integrations.tsx +++ b/web_src/src/pages/organization/settings/Integrations.tsx @@ -45,90 +45,6 @@ export function Integrations({ organizationId }: IntegrationsProps) { organizationIntegrations.map((integration) => integration.metadata?.name?.trim()).filter(Boolean) as string[], ); }, [organizationIntegrations]); - const connectedInstancesByProvider = useMemo(() => { - const groups = new Map(); - - organizationIntegrations.forEach((integration) => { - const provider = integration.spec?.integrationName; - if (!provider) return; - const current = groups.get(provider) || []; - current.push(integration); - groups.set(provider, current); - }); - - return groups; - }, [organizationIntegrations]); - const integrationCatalog = useMemo(() => { - const catalogByProvider = new Map< - string, - { - providerName: string; - providerLabel: string; - integrationDef: IntegrationsIntegrationDefinition | null; - instances: typeof organizationIntegrations; - } - >(); - - availableIntegrations.forEach((integrationDef) => { - const providerName = integrationDef.name || ""; - const providerLabel = - integrationDef.label || - getIntegrationTypeDisplayName(undefined, integrationDef.name) || - integrationDef.name || - "Integration"; - const instances = [...(connectedInstancesByProvider.get(providerName) || [])].sort((a, b) => - (a.metadata?.name || providerLabel).localeCompare(b.metadata?.name || providerLabel), - ); - - catalogByProvider.set(providerName, { - providerName, - providerLabel, - integrationDef, - instances, - }); - }); - - connectedInstancesByProvider.forEach((instances, providerName) => { - if (catalogByProvider.has(providerName)) { - return; - } - - const providerLabel = getIntegrationTypeDisplayName(undefined, providerName) || providerName || "Integration"; - const sortedInstances = [...instances].sort((a, b) => - (a.metadata?.name || providerLabel).localeCompare(b.metadata?.name || providerLabel), - ); - - catalogByProvider.set(providerName, { - providerName, - providerLabel, - integrationDef: null, - instances: sortedInstances, - }); - }); - - return [...catalogByProvider.values()].sort((a, b) => a.providerLabel.localeCompare(b.providerLabel)); - }, [availableIntegrations, connectedInstancesByProvider]); - const filteredIntegrationCatalog = useMemo(() => { - const normalizedQuery = filterQuery.trim().toLowerCase(); - if (!normalizedQuery) { - return integrationCatalog; - } - - return integrationCatalog.filter((item) => { - const providerText = [item.providerLabel, item.providerName, item.integrationDef?.description] - .filter(Boolean) - .join(" ") - .toLowerCase(); - - if (providerText.includes(normalizedQuery)) { - return true; - } - - return item.instances.some((instance) => - (instance.metadata?.name || instance.spec?.integrationName || "").toLowerCase().includes(normalizedQuery), - ); - }); - }, [filterQuery, integrationCatalog]); const selectedInstructions = useMemo(() => { return selectedIntegration?.instructions?.trim(); @@ -201,48 +117,60 @@ export function Integrations({ organizationId }: IntegrationsProps) { return (
    -
    - - setFilterQuery(e.target.value)} - placeholder="Filter integrations..." - className="pl-9 pr-9" - /> - {filterQuery.length > 0 ? ( - - ) : null} -
    - {filteredIntegrationCatalog.length === 0 ? ( -
    - -

    - {integrationCatalog.length === 0 ? "No integrations available." : "No integrations match your filter."} -

    -
    - ) : ( -
    - {filteredIntegrationCatalog.map((item) => { - const connectedCount = item.instances.length; - - return ( -
    -
    -
    -
    - + {/* Integrations */} + {organizationIntegrations.length > 0 && ( +
    +

    Connected

    +
    + {[...organizationIntegrations] + .sort((a, b) => + (a.metadata?.name || a.spec?.integrationName || "").localeCompare( + b.metadata?.name || b.spec?.integrationName || "", + ), + ) + .map((integration) => { + const integrationDefinition = availableIntegrations.find( + (a) => a.name === integration.spec?.integrationName, + ); + const integrationLabel = + integrationDefinition?.label || + getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || + integration.spec?.integrationName; + const integrationDisplayName = + integration.metadata?.name || + getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || + integration.spec?.integrationName; + const integrationName = integrationDefinition?.name || integration.spec?.integrationName; + const statusLabel = integration.status?.state + ? integration.status.state.charAt(0).toUpperCase() + integration.status.state.slice(1) + : "Unknown"; + + return ( +
    +
    +
    + +
    +
    +

    + {integrationDisplayName} +

    + {integrationLabel && integrationDisplayName !== integrationLabel ? ( +

    Type: {integrationLabel}

    + ) : null} + {integrationDefinition?.description ? ( +

    + {integrationDefinition.description} +

    + ) : null} +

    {item.providerLabel}

    @@ -253,23 +181,31 @@ export function Integrations({ organizationId }: IntegrationsProps) { ) : null}
    - -
    +
    + )} + + {/* Available Integrations */} +
    +

    Available

    +
    + {availableIntegrations.length === 0 ? ( +
    + +

    No integrations available.

    +
    + ) : ( +
    + {[...availableIntegrations] + .sort((a, b) => (a.label || a.name || "").localeCompare(b.label || b.name || "")) + .map((app) => { + const appName = app.name; + return ( +
    {item.integrationDef ? "Connect" : "Unavailable"} diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index 43de43eed6..c849ad679b 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -1139,13 +1139,13 @@ export const ComponentSidebar = ({ Integration Name * +

    A unique name for this integration

    setConfigureIntegrationName(e.target.value)} placeholder="e.g., my-app-integration" /> -

    A unique name for this integration

    {configureIntegrationDefinition?.configuration && From 047b91cd63766819d53ae33fc6bf17294d217e83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Tue, 10 Feb 2026 16:19:37 -0300 Subject: [PATCH 089/160] fix: show integration error on the sidebar (#3020) image image Closes: https://github.com/superplanehq/superplane/issues/3010 Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/ui/componentSidebar/SettingsTab.tsx | 9 ++++----- web_src/src/ui/componentSidebar/index.tsx | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/web_src/src/ui/componentSidebar/SettingsTab.tsx b/web_src/src/ui/componentSidebar/SettingsTab.tsx index aa16fb1adc..3a50857356 100644 --- a/web_src/src/ui/componentSidebar/SettingsTab.tsx +++ b/web_src/src/ui/componentSidebar/SettingsTab.tsx @@ -370,7 +370,6 @@ export function SettingsTab({
    {selectedIntegrationFull && ( <> -

    Connection

    {(() => { const hasIntegrationError = selectedIntegrationFull.status?.state === "error" && @@ -378,7 +377,7 @@ export function SettingsTab({ const integrationStatusCard = (
    {selectedIntegrationFull.status?.state diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index c849ad679b..927b0ea509 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -6,7 +6,7 @@ import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { getIntegrationTypeDisplayName } from "@/utils/integrationDisplayName"; import { resolveIcon } from "@/lib/utils"; -import { Check, Copy, Loader2, Settings, TriangleAlert, X } from "lucide-react"; +import { Check, Copy, Loader2, TriangleAlert, X } from "lucide-react"; import React, { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { getHeaderIconSrc, IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { From b83ab47b5b020971bb370edf4ae9e7c1b56cc7cf Mon Sep 17 00:00:00 2001 From: Nwankwo Uroy <127946005+devroy10@users.noreply.github.com> Date: Tue, 10 Feb 2026 21:10:28 +0100 Subject: [PATCH 090/160] feat: Add rootly.createEvent component (#2979) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Description Closes #2821 This PR adds the Rootly **Create Event** action so SuperPlane workflows can post timeline notes/annotations to Rootly incidents. The action targets Rootly’s incident events endpoint and supports optional visibility (`internal`/`external`) while keeping defaults consistent with Rootly. ## Video Demo https://github.com/user-attachments/assets/578e7a93-97f9-4e66-b79b-6264601e1e5b ## **Backend Implementation** - Added `rootly.createEvent` action with `incidentId`, `event`, and optional `visibility` validation in setup. - Implemented Rootly client support for `POST /incidents/{id}/events` with JSON:API request/response structs for incident events. - Emits `rootly.incident.event` on success with `id`, `event`, `visibility`, `occurred_at`, and `created_at`. - Added example output fixture and unit tests covering success and error paths. ## **Frontend Implementation** - Added Rootly Create Event mapper to render execution details and metadata for incident ID and visibility. - Introduced `IncidentEvent` type and details formatter to display event content, visibility, and timestamps. - Registered the new mapper and event state registry for consistent execution state handling. - Reuses Rootly icon and color patterns to match existing integration styling. ## Documentation - Auto-generated component documentation via relevant make command ## Checklist - [x] Tests (existing + new component) pass - [x] Code compiles without errors - [x] Signed-off commits - [x] Example output JSON (if required) - [x] Updated component documentation - [x] Attached video of component working --------- Signed-off-by: devroy10 Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/rootly/client.go | 88 +++++++++++-------- pkg/integrations/rootly/example.go | 10 --- pkg/integrations/rootly/rootly.go | 1 - .../pages/workflowv2/mappers/rootly/base.ts | 20 ----- .../workflowv2/mappers/rootly/create_event.ts | 23 ++++- .../pages/workflowv2/mappers/rootly/index.ts | 3 - 6 files changed, 72 insertions(+), 73 deletions(-) diff --git a/pkg/integrations/rootly/client.go b/pkg/integrations/rootly/client.go index ad5688af2c..fd1a73e530 100644 --- a/pkg/integrations/rootly/client.go +++ b/pkg/integrations/rootly/client.go @@ -259,42 +259,6 @@ type IncidentEventResponse struct { Data IncidentEventData `json:"data"` } -// severityString extracts the severity slug from the API response. -// Rootly returns severity as a string (slug) or an object with slug/name fields. -func severityString(v any) string { - switch s := v.(type) { - case string: - return s - case map[string]any: - if slug, ok := s["slug"].(string); ok { - return slug - } - if name, ok := s["name"].(string); ok { - return name - } - } - - return "" -} - -// incidentFromData converts a JSON:API IncidentData to a flat Incident struct. -func incidentFromData(data IncidentData) *Incident { - return &Incident{ - ID: data.ID, - SequentialID: data.Attributes.SequentialID, - Title: data.Attributes.Title, - Slug: data.Attributes.Slug, - Summary: data.Attributes.Summary, - Status: data.Attributes.Status, - Severity: severityString(data.Attributes.Severity), - StartedAt: data.Attributes.StartedAt, - ResolvedAt: data.Attributes.ResolvedAt, - MitigatedAt: data.Attributes.MitigatedAt, - UpdatedAt: data.Attributes.UpdatedAt, - URL: data.Attributes.URL, - } -} - // CreateIncidentRequest represents the request to create an incident type CreateIncidentRequest struct { Data CreateIncidentData `json:"data"` @@ -395,6 +359,58 @@ func (c *Client) CreateIncidentEvent(incidentID, event, visibility string) (*Inc }, nil } +// CreateIncidentEventRequest represents the request to create an incident event. +type CreateIncidentEventRequest struct { + Data CreateIncidentEventData `json:"data"` +} + +type CreateIncidentEventData struct { + Type string `json:"type"` + Attributes CreateIncidentEventAttributes `json:"attributes"` +} + +type CreateIncidentEventAttributes struct { + Event string `json:"event"` + Visibility string `json:"visibility,omitempty"` +} + +func (c *Client) CreateIncidentEvent(incidentID, event, visibility string) (*IncidentEvent, error) { + request := CreateIncidentEventRequest{ + Data: CreateIncidentEventData{ + Type: "incident_events", + Attributes: CreateIncidentEventAttributes{ + Event: event, + Visibility: visibility, + }, + }, + } + + body, err := json.Marshal(request) + if err != nil { + return nil, fmt.Errorf("error marshaling request: %v", err) + } + + url := fmt.Sprintf("%s/incidents/%s/events", c.BaseURL, incidentID) + responseBody, err := c.execRequest(http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return nil, err + } + + var response IncidentEventResponse + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, fmt.Errorf("error parsing response: %v", err) + } + + return &IncidentEvent{ + ID: response.Data.ID, + Event: response.Data.Attributes.Event, + Visibility: response.Data.Attributes.Visibility, + OccurredAt: response.Data.Attributes.OccurredAt, + CreatedAt: response.Data.Attributes.CreatedAt, + }, nil +} + func (c *Client) GetIncident(id string) (*Incident, error) { url := fmt.Sprintf("%s/incidents/%s", c.BaseURL, id) responseBody, err := c.execRequest(http.MethodGet, url, nil) diff --git a/pkg/integrations/rootly/example.go b/pkg/integrations/rootly/example.go index 89e88c19bf..21d8e4741b 100644 --- a/pkg/integrations/rootly/example.go +++ b/pkg/integrations/rootly/example.go @@ -19,12 +19,6 @@ var exampleOutputCreateEventBytes []byte var exampleOutputCreateEventOnce sync.Once var exampleOutputCreateEvent map[string]any -//go:embed example_output_update_incident.json -var exampleOutputUpdateIncidentBytes []byte - -var exampleOutputUpdateIncidentOnce sync.Once -var exampleOutputUpdateIncident map[string]any - //go:embed example_data_on_incident.json var exampleDataOnIncidentBytes []byte @@ -39,10 +33,6 @@ func (c *CreateEvent) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateEventOnce, exampleOutputCreateEventBytes, &exampleOutputCreateEvent) } -func (c *UpdateIncident) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON(&exampleOutputUpdateIncidentOnce, exampleOutputUpdateIncidentBytes, &exampleOutputUpdateIncident) -} - func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/rootly/rootly.go b/pkg/integrations/rootly/rootly.go index cf32d97772..ba96ead14e 100644 --- a/pkg/integrations/rootly/rootly.go +++ b/pkg/integrations/rootly/rootly.go @@ -63,7 +63,6 @@ func (r *Rootly) Components() []core.Component { return []core.Component{ &CreateIncident{}, &CreateEvent{}, - &UpdateIncident{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/rootly/base.ts b/web_src/src/pages/workflowv2/mappers/rootly/base.ts index 92b5905dc3..33f17f7711 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/base.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/base.ts @@ -1,25 +1,5 @@ -import { EventSection } from "@/ui/componentBase"; -import { getState, getTriggerRenderer } from ".."; -import { ExecutionInfo, NodeInfo } from "../types"; -import { formatTimeAgo } from "@/utils/date"; import { Incident, IncidentEvent } from "./types"; -export function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { - const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); - const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); - - return [ - { - receivedAt: new Date(execution.createdAt!), - eventTitle: title, - eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), - eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id!, - }, - ]; -} - export function getDetailsForIncident(incident: Incident): Record { const details: Record = {}; diff --git a/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts b/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts index c735804a10..279a2cee89 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts @@ -1,10 +1,11 @@ -import { ComponentBaseProps } from "@/ui/componentBase"; +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; import { getBackgroundColorClass } from "@/utils/colors"; -import { getStateMap } from ".."; +import { getState, getStateMap, getTriggerRenderer } from ".."; import { ComponentBaseContext, ComponentBaseMapper, ExecutionDetailsContext, + ExecutionInfo, NodeInfo, OutputPayload, SubtitleContext, @@ -12,7 +13,7 @@ import { import { MetadataItem } from "@/ui/metadataList"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import { IncidentEvent } from "./types"; -import { baseEventSections, getDetailsForIncidentEvent } from "./base"; +import { getDetailsForIncidentEvent } from "./base"; import { formatTimeAgo } from "@/utils/date"; export const createEventMapper: ComponentBaseMapper = { @@ -65,3 +66,19 @@ function metadataList(node: NodeInfo): MetadataItem[] { return metadata; } + +function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/index.ts b/web_src/src/pages/workflowv2/mappers/rootly/index.ts index a02e55f3fd..d4176f273d 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/index.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/index.ts @@ -2,13 +2,11 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { onIncidentTriggerRenderer } from "./on_incident"; import { createIncidentMapper } from "./create_incident"; import { createEventMapper } from "./create_event"; -import { updateIncidentMapper } from "./update_incident"; import { buildActionStateRegistry } from "../utils"; export const componentMappers: Record = { createIncident: createIncidentMapper, createEvent: createEventMapper, - updateIncident: updateIncidentMapper, }; export const triggerRenderers: Record = { @@ -18,5 +16,4 @@ export const triggerRenderers: Record = { export const eventStateRegistry: Record = { createIncident: buildActionStateRegistry("created"), createEvent: buildActionStateRegistry("created"), - updateIncident: buildActionStateRegistry("updated"), }; From 91819f2b3ba9ef107b085684abbf6de6bf9217f1 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Tue, 10 Feb 2026 18:34:57 -0300 Subject: [PATCH 091/160] feat: DockerHub integration (#2969) Add Docker Hub integration, including a `dockerhub.getImageTag` component and a `dockerhub.onImagePush` trigger, to enable SuperPlane users to manage and react to Docker Hub events. Webhook provisioning is manual, since DockerHub does not offer a reliable way to provision webhooks. --------- Signed-off-by: Lucas Pinheiro Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 + pkg/public/server.go | 2 ++ pkg/server/server.go | 1 - web_src/src/pages/workflowv2/mappers/index.ts | 24 +------------------ 4 files changed, 4 insertions(+), 24 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 523874b102..0e86340fa0 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -115,6 +115,7 @@ type WebhookRequestContext struct { WorkflowID string NodeID string Configuration any + Metadata MetadataContext Logger *log.Entry Webhook NodeWebhookContext Events EventContext diff --git a/pkg/public/server.go b/pkg/public/server.go index 4c8eb55d61..4ddd80c222 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -807,6 +807,7 @@ func (s *Server) executeTriggerNode(ctx context.Context, body []byte, headers ht WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + Metadata: contexts.NewNodeMetadataContext(tx, &node), Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), @@ -841,6 +842,7 @@ func (s *Server) executeComponentNode(ctx context.Context, body []byte, headers WorkflowID: node.WorkflowID.String(), NodeID: node.NodeID, Configuration: node.Configuration.Data(), + Metadata: contexts.NewNodeMetadataContext(tx, &node), Logger: logger, HTTP: s.registry.HTTPContext(), Webhook: contexts.NewNodeWebhookContext(ctx, tx, s.encryptor, &node, s.BaseURL+s.BasePath), diff --git a/pkg/server/server.go b/pkg/server/server.go index 2ad5d330be..9286713019 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -42,7 +42,6 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/daytona" _ "github.com/superplanehq/superplane/pkg/integrations/discord" _ "github.com/superplanehq/superplane/pkg/integrations/dockerhub" - _ "github.com/superplanehq/superplane/pkg/integrations/grafana" _ "github.com/superplanehq/superplane/pkg/integrations/github" _ "github.com/superplanehq/superplane/pkg/integrations/gitlab" _ "github.com/superplanehq/superplane/pkg/integrations/jira" diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index a80e55fc11..0717e3b68d 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -106,28 +106,12 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; -import { - componentMappers as prometheusComponentMappers, - customFieldRenderers as prometheusCustomFieldRenderers, - triggerRenderers as prometheusTriggerRenderers, - eventStateRegistry as prometheusEventStateRegistry, -} from "./prometheus/index"; -import { - componentMappers as cursorComponentMappers, - triggerRenderers as cursorTriggerRenderers, - eventStateRegistry as cursorEventStateRegistry, -} from "./cursor/index"; import { componentMappers as dockerhubComponentMappers, customFieldRenderers as dockerhubCustomFieldRenderers, triggerRenderers as dockerhubTriggerRenderers, eventStateRegistry as dockerhubEventStateRegistry, } from "./dockerhub"; -import { - componentMappers as grafanaComponentMappers, - triggerRenderers as grafanaTriggerRenderers, - eventStateRegistry as grafanaEventStateRegistry, -} from "./grafana/index"; import { filterMapper, FILTER_STATE_REGISTRY } from "./filter"; import { sshMapper, SSH_STATE_REGISTRY } from "./ssh"; import { waitCustomFieldRenderer, waitMapper, WAIT_STATE_REGISTRY } from "./wait"; @@ -178,10 +162,7 @@ const appMappers: Record> = { openai: openaiComponentMappers, circleci: circleCIComponentMappers, claude: claudeComponentMappers, - prometheus: prometheusComponentMappers, - cursor: cursorComponentMappers, dockerhub: dockerhubComponentMappers, - grafana: grafanaComponentMappers, }; const appTriggerRenderers: Record> = { @@ -203,10 +184,7 @@ const appTriggerRenderers: Record> = { openai: openaiTriggerRenderers, circleci: circleCITriggerRenderers, claude: claudeTriggerRenderers, - prometheus: prometheusTriggerRenderers, - cursor: cursorTriggerRenderers, dockerhub: dockerhubTriggerRenderers, - grafana: grafanaTriggerRenderers, }; const appEventStateRegistries: Record> = { @@ -228,6 +206,7 @@ const appEventStateRegistries: Record claude: claudeEventStateRegistry, aws: awsEventStateRegistry, gitlab: gitlabEventStateRegistry, + dockerhub: dockerhubEventStateRegistry, }; const componentAdditionalDataBuilders: Record = { @@ -253,7 +232,6 @@ const customFieldRenderers: Record = { const appCustomFieldRenderers: Record> = { github: githubCustomFieldRenderers, - prometheus: prometheusCustomFieldRenderers, dockerhub: dockerhubCustomFieldRenderers, }; From 87db6291b67a851f5f2b6ca34821950d676206ff Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Wed, 11 Feb 2026 12:53:33 +0500 Subject: [PATCH 092/160] fixes for the trigger webhook url + for datashource uid Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 + pkg/integrations/sendgrid/on_email_event_test.go | 4 ++++ pkg/workers/contexts/node_webhook_context.go | 8 ++++++++ test/support/contexts/contexts.go | 5 +++++ web_src/src/ui/componentSidebar/index.tsx | 11 ----------- 5 files changed, 18 insertions(+), 11 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index 0e86340fa0..b5c3fad91a 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -134,6 +134,7 @@ type WebhookRequestContext struct { type NodeWebhookContext interface { Setup() (string, error) + GetURL() (string, error) GetSecret() ([]byte, error) ResetSecret() ([]byte, []byte, error) GetBaseURL() string diff --git a/pkg/integrations/sendgrid/on_email_event_test.go b/pkg/integrations/sendgrid/on_email_event_test.go index 7e0282d9c2..a18f7aad48 100644 --- a/pkg/integrations/sendgrid/on_email_event_test.go +++ b/pkg/integrations/sendgrid/on_email_event_test.go @@ -171,6 +171,10 @@ func (t *testNodeWebhookContext) Setup() (string, error) { return "", nil } +func (t *testNodeWebhookContext) GetURL() (string, error) { + return "", nil +} + func (t *testNodeWebhookContext) GetSecret() ([]byte, error) { return t.secret, nil } diff --git a/pkg/workers/contexts/node_webhook_context.go b/pkg/workers/contexts/node_webhook_context.go index a0b1f78b69..a1c0e487be 100644 --- a/pkg/workers/contexts/node_webhook_context.go +++ b/pkg/workers/contexts/node_webhook_context.go @@ -66,6 +66,14 @@ func (c *NodeWebhookContext) ResetSecret() ([]byte, []byte, error) { return []byte(plainKey), encryptedKey, nil } +func (c *NodeWebhookContext) GetURL() (string, error) { + if c.node.WebhookID == nil { + return "", fmt.Errorf("node does not have a webhook") + } + + return fmt.Sprintf("%s/webhooks/%s", c.GetBaseURL(), c.node.WebhookID.String()), nil +} + func (c *NodeWebhookContext) Setup() (string, error) { webhook, err := c.findOrCreateWebhook() if err != nil { diff --git a/test/support/contexts/contexts.go b/test/support/contexts/contexts.go index 5fe47703d1..45d86eb63c 100644 --- a/test/support/contexts/contexts.go +++ b/test/support/contexts/contexts.go @@ -49,6 +49,11 @@ func (w *WebhookContext) Setup() (string, error) { return id.String(), nil } +func (w *WebhookContext) GetURL() (string, error) { + id := uuid.New() + return fmt.Sprintf("%s/webhooks/%s", w.GetBaseURL(), id.String()), nil +} + func (w *WebhookContext) GetBaseURL() string { return "http://localhost:3000/api/v1" } diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index 927b0ea509..fc625ff9b6 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -283,17 +283,6 @@ export const ComponentSidebar = ({ ); const selectedIntegrationForDialog = isCreateIntegrationDialogOpen ? createIntegrationDefinition : undefined; const selectedInstructions = selectedIntegrationForDialog?.instructions?.trim(); - const integrationHomeHref = useMemo(() => { - if (!domainId) return "#"; - const selectedIntegrationId = - integrationRef?.id || - integrations?.find((integration) => integration.spec?.integrationName === selectedIntegrationForDialog?.name) - ?.metadata?.id; - if (selectedIntegrationId) { - return `/${domainId}/settings/integrations/${selectedIntegrationId}`; - } - return `/${domainId}/settings/integrations`; - }, [domainId, integrationRef?.id, integrations, selectedIntegrationForDialog?.name]); const nodeWebhookUrl = useMemo(() => { if (!nodeId) return ""; const node = workflowNodes.find((n) => n.id === nodeId); From e8d7b27ef654135be7a20fd306543dd1e4d15bbf Mon Sep 17 00:00:00 2001 From: yinebebt <84726535+yinebebt@users.noreply.github.com> Date: Wed, 11 Feb 2026 19:18:41 +0300 Subject: [PATCH 093/160] feat: AWS CodeArtifact expansion - repos + package version operations (#2944) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Adds six AWS CodeArtifact components and fixes canvas save when a node references a removed integration. ## New components - **CodeArtifact • Create Repository** - Create a repository in a domain - **CodeArtifact • Delete Repository** - Delete a repository from a domain - **CodeArtifact • Update Package Versions Status** - Set version status (Archived / Published / Unlisted) - **CodeArtifact • Copy Package Versions** - Copy versions between repos in the same domain - **CodeArtifact • Delete Package Versions** - Permanently delete versions and assets - **CodeArtifact • Dispose Package Versions** - Delete assets and set status to Disposed ## Bug fixes - **AWS error formatting:** Avoid double colon in error messages when `Code` is empty; guard against nil integration in `CredentialsFromInstallation`. - **Logging:** “Already being processed” for queue items is now logged at Debug level. ## Docs and tooling - **docs/components/AWS.mdx** - All six CodeArtifact components documented - **docs/development/aws-codeartifact-expansion-plan.md** - Plan, manual testing notes, integration dropdown reminder - **docs/contributing/connecting-to-3rdparty-services-from-development.md** - AWS OIDC + tunnel setup and troubleshooting - **Makefile** - New `check` target (format.go, lint, check.build.app, format.js, check.build.ui) ## Testing - All six package operations tested manually (Create/Delete Repository; Update Status, Copy, Delete, Dispose Package Versions). - Demo video is uploaded off-GitHub due to file size: [Watch on Vimeo](https://vimeo.com/1162979840?share=copy&fl=sv&fe=ci). --- Closes #2779 --------- Signed-off-by: yinebebt Signed-off-by: Lucas Pinheiro Co-authored-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/pages/workflowv2/mappers/aws/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/web_src/src/pages/workflowv2/mappers/aws/index.ts b/web_src/src/pages/workflowv2/mappers/aws/index.ts index e431554a64..8e7e2c980b 100644 --- a/web_src/src/pages/workflowv2/mappers/aws/index.ts +++ b/web_src/src/pages/workflowv2/mappers/aws/index.ts @@ -14,7 +14,6 @@ import { deletePackageVersionsMapper } from "./codeartifact/delete_package_versi import { deleteRepositoryMapper } from "./codeartifact/delete_repository"; import { disposePackageVersionsMapper } from "./codeartifact/dispose_package_versions"; import { updatePackageVersionsStatusMapper } from "./codeartifact/update_package_versions_status"; -import { onAlarmTriggerRenderer } from "./cloudwatch/on_alarm"; export const componentMappers: Record = { "lambda.runFunction": runFunctionMapper, From 898d20ff5278727e596117993ecda5a18ece2795 Mon Sep 17 00:00:00 2001 From: Vikram Date: Wed, 11 Feb 2026 22:42:14 +0530 Subject: [PATCH 094/160] feat: Add CircleCI integration (#2916) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What Changed Added CircleCI integration with API token authentication. This includes two components: a trigger that listens for pipeline completions and an action that starts pipelines and tracks them to completion. ## Why This integration enables SuperPlane users to orchestrate CircleCI pipelines within their workflows. Users can trigger builds, monitor pipeline status, and react to completion events without leaving SuperPlane. ## How **Base Integration:** - Uses CircleCI Personal API Token for authentication - Implements webhook provisioning for real-time events - Follows the Semaphore integration pattern **On Pipeline Completed (Trigger):** - Registers webhooks with CircleCI to receive workflow completion events - Verifies webhook signatures using HMAC SHA256 - Emits events when workflows finish (success/failed/canceled) **Trigger Pipeline (Action):** - Starts pipelines via CircleCI API with configurable parameters - Monitors completion using webhook + polling fallback (10s interval) - Routes to success/failed output channels based on final workflow status - Automatically injects SuperPlane execution context as pipeline parameters **Tests & Documentation:** - Unit tests for all components - Integration tests for webhook handling - Full documentation in `docs/components/CircleCI.mdx` ## Demo Video https://www.youtube.com/watch?v=PvzHGetsujk The video shows: 1. Integration setup with API token 2. On Pipeline Completed trigger detecting a pipeline completion 3. Trigger Pipeline action starting a pipeline and tracking it to success ## Related Issues Closes #1957 ## Breaking Changes None --------- Signed-off-by: vikramships Signed-off-by: Ramesh Kumar Voodi Signed-off-by: Igor Šarčević Co-authored-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/assets/icons/integrations/circleci.svg | 14 +++----------- .../src/ui/componentSidebar/integrationIcons.tsx | 2 -- 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/web_src/src/assets/icons/integrations/circleci.svg b/web_src/src/assets/icons/integrations/circleci.svg index 6c5d6cd8c9..c08ac7ffa4 100644 --- a/web_src/src/assets/icons/integrations/circleci.svg +++ b/web_src/src/assets/icons/integrations/circleci.svg @@ -1,12 +1,4 @@ - - - - - + + + diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index fbfa0360e4..1391f8329e 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -2,8 +2,6 @@ import { resolveIcon } from "@/lib/utils"; import React from "react"; import awsIcon from "@/assets/icons/integrations/aws.svg"; import awsLambdaIcon from "@/assets/icons/integrations/aws.lambda.svg"; -import circleciIcon from "@/assets/icons/integrations/circleci.svg"; -import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; import cloudflareIcon from "@/assets/icons/integrations/cloudflare.svg"; import dash0Icon from "@/assets/icons/integrations/dash0.svg"; import datadogIcon from "@/assets/icons/integrations/datadog.svg"; From f1275dd6e58ac63e67daabf603d8b1ea44ad2e55 Mon Sep 17 00:00:00 2001 From: Vikram Date: Wed, 11 Feb 2026 22:54:55 +0530 Subject: [PATCH 095/160] feat: Add render.getService and render.getDeploy components (#3016) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements #2989 (1/3) Adds two read-only components to the Render integration: - **Get Service** — retrieve service details by ID - **Get Deploy** — fetch deploy information by ID Also includes a fix for `Service.Suspended` field type (was `bool`, Render API actually returns a string like `"not_suspended"`). Shared code included: `payloads.go` (payload helpers used by subsequent PRs) and `base.ts` (shared frontend mapper base). ## Video demo https://youtu.be/Ok8-S31hdbI --------- Signed-off-by: vikramships Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Render.mdx | 127 ------------------ pkg/integrations/render/example.go | 56 -------- pkg/integrations/render/render.go | 4 - .../pages/workflowv2/mappers/render/index.ts | 5 - 4 files changed, 192 deletions(-) diff --git a/docs/components/Render.mdx b/docs/components/Render.mdx index b98dd5a24d..89b766a1b4 100644 --- a/docs/components/Render.mdx +++ b/docs/components/Render.mdx @@ -20,9 +20,6 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; - - - ## Instructions @@ -313,127 +310,3 @@ Emits a `render.service` payload containing service fields like `serviceId`, `se } ``` - - -## Purge Cache - -The Purge Cache component requests a build cache purge for a Render service. - -### Use Cases - -- **Cache reset**: Force a clean rebuild when you suspect stale dependencies or build artifacts -- **Operational tooling**: Provide a one-click cache purge in incident response workflows - -### Configuration - -- **Service**: Render service whose build cache should be purged - -### Output - -Emits a `render.cache.purge.requested` payload with `serviceId` and a `status` field indicating the request was accepted. - -### Example Output - -```json -{ - "data": { - "serviceId": "srv-cukouhrtq21c73e9scng", - "status": "accepted" - }, - "timestamp": "2026-02-05T16:20:00.000000Z", - "type": "render.cache.purge.requested" -} -``` - - - -## Rollback Deploy - -The Rollback Deploy component triggers a rollback deploy for a Render service and waits for it to complete. - -### Use Cases - -- **Automated recovery**: Roll back after detecting errors in a new deploy -- **One-click rollback**: Trigger rollbacks from an incident workflow - -### How It Works - -1. Triggers a rollback deploy for the selected Render service via the Render API -2. Waits for the deploy to complete (via deploy_ended webhook and optional polling fallback) -3. Routes execution based on deploy outcome: - - **Success channel**: Deploy completed successfully (status is `live`) - - **Failed channel**: Deploy failed or was cancelled - -### Configuration - -- **Service**: Render service to roll back -- **Deploy ID**: The deploy ID to roll back to (supports expressions) - -### Output Channels - -- **Success**: Emitted when the rollback deploy completes successfully -- **Failed**: Emitted when the rollback deploy fails or is cancelled - -### Notes - -- Uses the existing integration webhook for deploy_ended events -- Falls back to polling if the webhook does not arrive -- Includes `rollbackToDeployId` in the output payload for reference -- Requires a Render API key configured on the integration - -### Example Output - -```json -{ - "data": { - "createdAt": "2026-02-05T16:18:00.000000Z", - "deployId": "dep-cukouhrtq21c73e9scng", - "rollbackToDeployId": "dep-cukouhrtq21c73e9scnf", - "serviceId": "srv-cukouhrtq21c73e9scng", - "status": "build_in_progress", - "trigger": "rollback" - }, - "timestamp": "2026-02-05T16:18:00.000000Z", - "type": "render.deploy" -} -``` - - - -## Update Env Var - -The Update Env Var component updates a Render service environment variable. - -### Use Cases - -- **Rotate secrets**: Generate a new value for an env var (for example, API tokens) and optionally emit it -- **Configuration changes**: Update non-secret environment values as part of a workflow - -### Configuration - -- **Service**: Render service that owns the env var -- **Key**: Env var key to update -- **Value Strategy**: - - `Set value`: provide the `Value` field - - `Generate value`: request Render to generate a new value -- **Value**: New env var value (sensitive). Required when using `Set value` -- **Emit Value**: When enabled, include the env var `value` in output. Disabled by default to avoid leaking secrets. - -### Output - -Emits a `render.envVar.updated` payload with `serviceId`, `key`, and a `valueGenerated` boolean. The `value` field is only included when `emitValue` is enabled. - -### Example Output - -```json -{ - "data": { - "key": "DATABASE_URL", - "serviceId": "srv-cukouhrtq21c73e9scng", - "valueGenerated": false - }, - "timestamp": "2026-02-05T16:25:00.000000Z", - "type": "render.envVar.updated" -} -``` - diff --git a/pkg/integrations/render/example.go b/pkg/integrations/render/example.go index 488cf96b07..6adc182241 100644 --- a/pkg/integrations/render/example.go +++ b/pkg/integrations/render/example.go @@ -22,18 +22,6 @@ var exampleOutputGetServiceBytes []byte //go:embed example_output_get_deploy.json var exampleOutputGetDeployBytes []byte -//go:embed example_output_cancel_deploy.json -var exampleOutputCancelDeployBytes []byte - -//go:embed example_output_rollback_deploy.json -var exampleOutputRollbackDeployBytes []byte - -//go:embed example_output_purge_cache.json -var exampleOutputPurgeCacheBytes []byte - -//go:embed example_output_update_env_var.json -var exampleOutputUpdateEnvVarBytes []byte - var exampleDataOnDeployOnce sync.Once var exampleDataOnDeploy map[string]any @@ -49,18 +37,6 @@ var exampleOutputGetService map[string]any var exampleOutputGetDeployOnce sync.Once var exampleOutputGetDeploy map[string]any -var exampleOutputCancelDeployOnce sync.Once -var exampleOutputCancelDeploy map[string]any - -var exampleOutputRollbackDeployOnce sync.Once -var exampleOutputRollbackDeploy map[string]any - -var exampleOutputPurgeCacheOnce sync.Once -var exampleOutputPurgeCache map[string]any - -var exampleOutputUpdateEnvVarOnce sync.Once -var exampleOutputUpdateEnvVar map[string]any - func (t *OnDeploy) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON( &exampleDataOnDeployOnce, @@ -100,35 +76,3 @@ func (c *GetDeploy) ExampleOutput() map[string]any { &exampleOutputGetDeploy, ) } - -func (c *CancelDeploy) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON( - &exampleOutputCancelDeployOnce, - exampleOutputCancelDeployBytes, - &exampleOutputCancelDeploy, - ) -} - -func (c *RollbackDeploy) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON( - &exampleOutputRollbackDeployOnce, - exampleOutputRollbackDeployBytes, - &exampleOutputRollbackDeploy, - ) -} - -func (c *PurgeCache) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON( - &exampleOutputPurgeCacheOnce, - exampleOutputPurgeCacheBytes, - &exampleOutputPurgeCache, - ) -} - -func (c *UpdateEnvVar) ExampleOutput() map[string]any { - return utils.UnmarshalEmbeddedJSON( - &exampleOutputUpdateEnvVarOnce, - exampleOutputUpdateEnvVarBytes, - &exampleOutputUpdateEnvVar, - ) -} diff --git a/pkg/integrations/render/render.go b/pkg/integrations/render/render.go index 22d470a429..96130edaf4 100644 --- a/pkg/integrations/render/render.go +++ b/pkg/integrations/render/render.go @@ -102,10 +102,6 @@ func (r *Render) Components() []core.Component { &Deploy{}, &GetService{}, &GetDeploy{}, - &CancelDeploy{}, - &RollbackDeploy{}, - &PurgeCache{}, - &UpdateEnvVar{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/render/index.ts b/web_src/src/pages/workflowv2/mappers/render/index.ts index d2e55e4bfa..f35e323b4b 100644 --- a/web_src/src/pages/workflowv2/mappers/render/index.ts +++ b/web_src/src/pages/workflowv2/mappers/render/index.ts @@ -1,6 +1,5 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; import { deployMapper, DEPLOY_STATE_REGISTRY } from "./deploy"; -import { cancelDeployMapper } from "./cancel_deploy"; import { getDeployMapper } from "./get_deploy"; import { getServiceMapper } from "./get_service"; import { onBuildTriggerRenderer } from "./on_build"; @@ -13,10 +12,6 @@ export const componentMappers: Record = { deploy: deployMapper, getService: getServiceMapper, getDeploy: getDeployMapper, - cancelDeploy: cancelDeployMapper, - rollbackDeploy: rollbackDeployMapper, - purgeCache: purgeCacheMapper, - updateEnvVar: updateEnvVarMapper, }; export const triggerRenderers: Record = { From c175368ff76e223529dc37dd7a8174dcdada985f Mon Sep 17 00:00:00 2001 From: Vikram Date: Thu, 12 Feb 2026 01:07:53 +0530 Subject: [PATCH 096/160] feat: Add render.cancelDeploy and render.rollbackDeploy components (#3017) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements #2989 (2/3) — depends on #3016 Adds two deploy management components to the Render integration: - **Cancel Deploy** — cancel an in-progress deploy - **Rollback Deploy** — rollback to a previous deploy by ID Both reuse `deployDataFromDeployResponse` from the shared `payloads.go` added in #3016. ## Video demo https://youtu.be/Ok8-S31hdbI --------- Signed-off-by: vikramships Signed-off-by: Pedro F. Leao Co-authored-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Render.mdx | 54 +++++++++++++++++++ pkg/integrations/render/example.go | 27 ++++++++++ pkg/integrations/render/render.go | 2 + .../pages/workflowv2/mappers/render/index.ts | 6 +-- 4 files changed, 86 insertions(+), 3 deletions(-) diff --git a/docs/components/Render.mdx b/docs/components/Render.mdx index 89b766a1b4..0060052dbf 100644 --- a/docs/components/Render.mdx +++ b/docs/components/Render.mdx @@ -20,6 +20,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + ## Instructions @@ -310,3 +311,56 @@ Emits a `render.service` payload containing service fields like `serviceId`, `se } ``` + + +## Rollback Deploy + +The Rollback Deploy component triggers a rollback deploy for a Render service and waits for it to complete. + +### Use Cases + +- **Automated recovery**: Roll back after detecting errors in a new deploy +- **One-click rollback**: Trigger rollbacks from an incident workflow + +### How It Works + +1. Triggers a rollback deploy for the selected Render service via the Render API +2. Waits for the deploy to complete (via deploy_ended webhook and optional polling fallback) +3. Routes execution based on deploy outcome: + - **Success channel**: Deploy completed successfully (status is `live`) + - **Failed channel**: Deploy failed or was cancelled + +### Configuration + +- **Service**: Render service to roll back +- **Deploy ID**: The deploy ID to roll back to (supports expressions) + +### Output Channels + +- **Success**: Emitted when the rollback deploy completes successfully +- **Failed**: Emitted when the rollback deploy fails or is cancelled + +### Notes + +- Uses the existing integration webhook for deploy_ended events +- Falls back to polling if the webhook does not arrive +- Includes `rollbackToDeployId` in the output payload for reference +- Requires a Render API key configured on the integration + +### Example Output + +```json +{ + "data": { + "createdAt": "2026-02-05T16:18:00.000000Z", + "deployId": "dep-cukouhrtq21c73e9scng", + "rollbackToDeployId": "dep-cukouhrtq21c73e9scnf", + "serviceId": "srv-cukouhrtq21c73e9scng", + "status": "build_in_progress", + "trigger": "rollback" + }, + "timestamp": "2026-02-05T16:18:00.000000Z", + "type": "render.deploy" +} +``` + diff --git a/pkg/integrations/render/example.go b/pkg/integrations/render/example.go index 6adc182241..cc5fb0fbbf 100644 --- a/pkg/integrations/render/example.go +++ b/pkg/integrations/render/example.go @@ -22,6 +22,12 @@ var exampleOutputGetServiceBytes []byte //go:embed example_output_get_deploy.json var exampleOutputGetDeployBytes []byte +//go:embed example_output_cancel_deploy.json +var exampleOutputCancelDeployBytes []byte + +//go:embed example_output_rollback_deploy.json +var exampleOutputRollbackDeployBytes []byte + var exampleDataOnDeployOnce sync.Once var exampleDataOnDeploy map[string]any @@ -37,6 +43,12 @@ var exampleOutputGetService map[string]any var exampleOutputGetDeployOnce sync.Once var exampleOutputGetDeploy map[string]any +var exampleOutputCancelDeployOnce sync.Once +var exampleOutputCancelDeploy map[string]any + +var exampleOutputRollbackDeployOnce sync.Once +var exampleOutputRollbackDeploy map[string]any + func (t *OnDeploy) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON( &exampleDataOnDeployOnce, @@ -76,3 +88,18 @@ func (c *GetDeploy) ExampleOutput() map[string]any { &exampleOutputGetDeploy, ) } +func (c *CancelDeploy) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleOutputCancelDeployOnce, + exampleOutputCancelDeployBytes, + &exampleOutputCancelDeploy, + ) +} + +func (c *RollbackDeploy) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleOutputRollbackDeployOnce, + exampleOutputRollbackDeployBytes, + &exampleOutputRollbackDeploy, + ) +} diff --git a/pkg/integrations/render/render.go b/pkg/integrations/render/render.go index 96130edaf4..66381782f8 100644 --- a/pkg/integrations/render/render.go +++ b/pkg/integrations/render/render.go @@ -102,6 +102,8 @@ func (r *Render) Components() []core.Component { &Deploy{}, &GetService{}, &GetDeploy{}, + &CancelDeploy{}, + &RollbackDeploy{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/render/index.ts b/web_src/src/pages/workflowv2/mappers/render/index.ts index f35e323b4b..e2f44b2e7e 100644 --- a/web_src/src/pages/workflowv2/mappers/render/index.ts +++ b/web_src/src/pages/workflowv2/mappers/render/index.ts @@ -1,17 +1,18 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../types"; import { deployMapper, DEPLOY_STATE_REGISTRY } from "./deploy"; +import { cancelDeployMapper } from "./cancel_deploy"; import { getDeployMapper } from "./get_deploy"; import { getServiceMapper } from "./get_service"; import { onBuildTriggerRenderer } from "./on_build"; import { onDeployTriggerRenderer } from "./on_deploy"; -import { PURGE_CACHE_STATE_REGISTRY, purgeCacheMapper } from "./purge_cache"; import { rollbackDeployMapper } from "./rollback_deploy"; -import { updateEnvVarMapper } from "./update_env_var"; export const componentMappers: Record = { deploy: deployMapper, getService: getServiceMapper, getDeploy: getDeployMapper, + cancelDeploy: cancelDeployMapper, + rollbackDeploy: rollbackDeployMapper, }; export const triggerRenderers: Record = { @@ -23,5 +24,4 @@ export const eventStateRegistry: Record = { deploy: DEPLOY_STATE_REGISTRY, cancelDeploy: DEPLOY_STATE_REGISTRY, rollbackDeploy: DEPLOY_STATE_REGISTRY, - purgeCache: PURGE_CACHE_STATE_REGISTRY, }; From 631f1a39e637c0fa503a5747c5d80d318d85272c Mon Sep 17 00:00:00 2001 From: Vikram Date: Thu, 12 Feb 2026 01:59:19 +0530 Subject: [PATCH 097/160] feat: Add Render Purge Cache and Update Env Var components (#3018) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements #2989 (3/3) — depends on #3016, #3017 Adds two service management components to the Render integration: - **Purge Cache** — clear build cache for a service - **Update Env Var** — update environment variables with set or generate strategies ## Video demo https://youtu.be/Ok8-S31hdbI --------- Signed-off-by: vikramships Signed-off-by: Pedro F. Leao Co-authored-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Render.mdx | 73 +++++++++++++++++++ pkg/integrations/render/example.go | 29 ++++++++ pkg/integrations/render/render.go | 2 + .../pages/workflowv2/mappers/render/index.ts | 5 ++ 4 files changed, 109 insertions(+) diff --git a/docs/components/Render.mdx b/docs/components/Render.mdx index 0060052dbf..b98dd5a24d 100644 --- a/docs/components/Render.mdx +++ b/docs/components/Render.mdx @@ -20,7 +20,9 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + + ## Instructions @@ -311,6 +313,38 @@ Emits a `render.service` payload containing service fields like `serviceId`, `se } ``` + + +## Purge Cache + +The Purge Cache component requests a build cache purge for a Render service. + +### Use Cases + +- **Cache reset**: Force a clean rebuild when you suspect stale dependencies or build artifacts +- **Operational tooling**: Provide a one-click cache purge in incident response workflows + +### Configuration + +- **Service**: Render service whose build cache should be purged + +### Output + +Emits a `render.cache.purge.requested` payload with `serviceId` and a `status` field indicating the request was accepted. + +### Example Output + +```json +{ + "data": { + "serviceId": "srv-cukouhrtq21c73e9scng", + "status": "accepted" + }, + "timestamp": "2026-02-05T16:20:00.000000Z", + "type": "render.cache.purge.requested" +} +``` + ## Rollback Deploy @@ -364,3 +398,42 @@ The Rollback Deploy component triggers a rollback deploy for a Render service an } ``` + + +## Update Env Var + +The Update Env Var component updates a Render service environment variable. + +### Use Cases + +- **Rotate secrets**: Generate a new value for an env var (for example, API tokens) and optionally emit it +- **Configuration changes**: Update non-secret environment values as part of a workflow + +### Configuration + +- **Service**: Render service that owns the env var +- **Key**: Env var key to update +- **Value Strategy**: + - `Set value`: provide the `Value` field + - `Generate value`: request Render to generate a new value +- **Value**: New env var value (sensitive). Required when using `Set value` +- **Emit Value**: When enabled, include the env var `value` in output. Disabled by default to avoid leaking secrets. + +### Output + +Emits a `render.envVar.updated` payload with `serviceId`, `key`, and a `valueGenerated` boolean. The `value` field is only included when `emitValue` is enabled. + +### Example Output + +```json +{ + "data": { + "key": "DATABASE_URL", + "serviceId": "srv-cukouhrtq21c73e9scng", + "valueGenerated": false + }, + "timestamp": "2026-02-05T16:25:00.000000Z", + "type": "render.envVar.updated" +} +``` + diff --git a/pkg/integrations/render/example.go b/pkg/integrations/render/example.go index cc5fb0fbbf..488cf96b07 100644 --- a/pkg/integrations/render/example.go +++ b/pkg/integrations/render/example.go @@ -28,6 +28,12 @@ var exampleOutputCancelDeployBytes []byte //go:embed example_output_rollback_deploy.json var exampleOutputRollbackDeployBytes []byte +//go:embed example_output_purge_cache.json +var exampleOutputPurgeCacheBytes []byte + +//go:embed example_output_update_env_var.json +var exampleOutputUpdateEnvVarBytes []byte + var exampleDataOnDeployOnce sync.Once var exampleDataOnDeploy map[string]any @@ -49,6 +55,12 @@ var exampleOutputCancelDeploy map[string]any var exampleOutputRollbackDeployOnce sync.Once var exampleOutputRollbackDeploy map[string]any +var exampleOutputPurgeCacheOnce sync.Once +var exampleOutputPurgeCache map[string]any + +var exampleOutputUpdateEnvVarOnce sync.Once +var exampleOutputUpdateEnvVar map[string]any + func (t *OnDeploy) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON( &exampleDataOnDeployOnce, @@ -88,6 +100,7 @@ func (c *GetDeploy) ExampleOutput() map[string]any { &exampleOutputGetDeploy, ) } + func (c *CancelDeploy) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON( &exampleOutputCancelDeployOnce, @@ -103,3 +116,19 @@ func (c *RollbackDeploy) ExampleOutput() map[string]any { &exampleOutputRollbackDeploy, ) } + +func (c *PurgeCache) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleOutputPurgeCacheOnce, + exampleOutputPurgeCacheBytes, + &exampleOutputPurgeCache, + ) +} + +func (c *UpdateEnvVar) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON( + &exampleOutputUpdateEnvVarOnce, + exampleOutputUpdateEnvVarBytes, + &exampleOutputUpdateEnvVar, + ) +} diff --git a/pkg/integrations/render/render.go b/pkg/integrations/render/render.go index 66381782f8..22d470a429 100644 --- a/pkg/integrations/render/render.go +++ b/pkg/integrations/render/render.go @@ -104,6 +104,8 @@ func (r *Render) Components() []core.Component { &GetDeploy{}, &CancelDeploy{}, &RollbackDeploy{}, + &PurgeCache{}, + &UpdateEnvVar{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/render/index.ts b/web_src/src/pages/workflowv2/mappers/render/index.ts index e2f44b2e7e..d2e55e4bfa 100644 --- a/web_src/src/pages/workflowv2/mappers/render/index.ts +++ b/web_src/src/pages/workflowv2/mappers/render/index.ts @@ -5,7 +5,9 @@ import { getDeployMapper } from "./get_deploy"; import { getServiceMapper } from "./get_service"; import { onBuildTriggerRenderer } from "./on_build"; import { onDeployTriggerRenderer } from "./on_deploy"; +import { PURGE_CACHE_STATE_REGISTRY, purgeCacheMapper } from "./purge_cache"; import { rollbackDeployMapper } from "./rollback_deploy"; +import { updateEnvVarMapper } from "./update_env_var"; export const componentMappers: Record = { deploy: deployMapper, @@ -13,6 +15,8 @@ export const componentMappers: Record = { getDeploy: getDeployMapper, cancelDeploy: cancelDeployMapper, rollbackDeploy: rollbackDeployMapper, + purgeCache: purgeCacheMapper, + updateEnvVar: updateEnvVarMapper, }; export const triggerRenderers: Record = { @@ -24,4 +28,5 @@ export const eventStateRegistry: Record = { deploy: DEPLOY_STATE_REGISTRY, cancelDeploy: DEPLOY_STATE_REGISTRY, rollbackDeploy: DEPLOY_STATE_REGISTRY, + purgeCache: PURGE_CACHE_STATE_REGISTRY, }; From bab8b0a8e5cee020486a07b95ed9416364be8aae Mon Sep 17 00:00:00 2001 From: Petar Perovic Date: Thu, 12 Feb 2026 07:29:56 +0100 Subject: [PATCH 098/160] chore: Multi step integrations setup UI improvement (#3062) Signed-off-by: Muhammad Fuzail Zubari --- .../organization/settings/IntegrationDetails.tsx | 2 +- web_src/src/ui/IntegrationInstructions.tsx | 2 +- web_src/src/ui/componentSidebar/SettingsTab.tsx | 6 +++--- web_src/src/ui/componentSidebar/index.tsx | 14 ++------------ 4 files changed, 7 insertions(+), 17 deletions(-) diff --git a/web_src/src/pages/organization/settings/IntegrationDetails.tsx b/web_src/src/pages/organization/settings/IntegrationDetails.tsx index c6de6c25f4..ecf459ac6e 100644 --- a/web_src/src/pages/organization/settings/IntegrationDetails.tsx +++ b/web_src/src/pages/organization/settings/IntegrationDetails.tsx @@ -257,7 +257,6 @@ export function IntegrationDetails({ organizationId }: IntegrationDetailsProps) Integration Name * -

    A unique name for this integration

    +

    A unique name for this integration

    {integrationDef.configuration.map((field: ConfigurationField) => ( diff --git a/web_src/src/ui/IntegrationInstructions.tsx b/web_src/src/ui/IntegrationInstructions.tsx index 8ece26cf1c..0654f44c2a 100644 --- a/web_src/src/ui/IntegrationInstructions.tsx +++ b/web_src/src/ui/IntegrationInstructions.tsx @@ -39,7 +39,7 @@ export function IntegrationInstructions({ description, onContinue, className = " ol: ({ children }) =>
      {children}
    , li: ({ children }) =>
  • {children}
  • , a: ({ children, href }) => ( - + {children} ), diff --git a/web_src/src/ui/componentSidebar/SettingsTab.tsx b/web_src/src/ui/componentSidebar/SettingsTab.tsx index 3a50857356..016e7967bc 100644 --- a/web_src/src/ui/componentSidebar/SettingsTab.tsx +++ b/web_src/src/ui/componentSidebar/SettingsTab.tsx @@ -404,10 +404,10 @@ export function SettingsTab({ {selectedIntegrationFull.status?.state diff --git a/web_src/src/ui/componentSidebar/index.tsx b/web_src/src/ui/componentSidebar/index.tsx index fc625ff9b6..bb8f5b9b03 100644 --- a/web_src/src/ui/componentSidebar/index.tsx +++ b/web_src/src/ui/componentSidebar/index.tsx @@ -6,7 +6,7 @@ import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { getIntegrationTypeDisplayName } from "@/utils/integrationDisplayName"; import { resolveIcon } from "@/lib/utils"; -import { Check, Copy, Loader2, TriangleAlert, X } from "lucide-react"; +import { Check, Copy, Loader2, Settings, TriangleAlert, X } from "lucide-react"; import React, { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { getHeaderIconSrc, IntegrationIcon } from "@/ui/componentSidebar/integrationIcons"; import { @@ -283,16 +283,6 @@ export const ComponentSidebar = ({ ); const selectedIntegrationForDialog = isCreateIntegrationDialogOpen ? createIntegrationDefinition : undefined; const selectedInstructions = selectedIntegrationForDialog?.instructions?.trim(); - const nodeWebhookUrl = useMemo(() => { - if (!nodeId) return ""; - const node = workflowNodes.find((n) => n.id === nodeId); - const metadata = node?.metadata as Record | undefined; - if (!metadata) return ""; - const webhookUrl = typeof metadata.webhookUrl === "string" ? metadata.webhookUrl : ""; - const webhookUrlSnake = typeof metadata.webhook_url === "string" ? metadata.webhook_url : ""; - const legacyUrl = typeof metadata.url === "string" ? metadata.url : ""; - return webhookUrl || webhookUrlSnake || legacyUrl || ""; - }, [nodeId, workflowNodes]); const handleCopyNodeId = useCallback(async () => { const textToCopy = nodeWebhookUrl || nodeId; @@ -1128,13 +1118,13 @@ export const ComponentSidebar = ({ Integration Name * -

    A unique name for this integration

    setConfigureIntegrationName(e.target.value)} placeholder="e.g., my-app-integration" /> +

    A unique name for this integration

    {configureIntegrationDefinition?.configuration && From 430644fea27abea5ddf540e316d150b6eef281c7 Mon Sep 17 00:00:00 2001 From: harxhist Date: Thu, 12 Feb 2026 18:30:07 +0530 Subject: [PATCH 099/160] feat: Add Cursor Integration (#2991) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #2618 ## Description This PR introduces the **Cursor** integration to SuperPlane, allowing users to build workflows utilizing Cursor's AI-powered capabilities. It includes the base integration setup and two starter components: 1. **Launch Cloud Agent (Action):** Triggers a Cursor Cloud Agent on a specific repository/branch and tracks the execution state to completion. It links to the Cloud Agent and PR in the output. This agent has no limits(except credit limit). 2. **Get Daily Usage Data (Action):** Fetches daily team usage metrics from the Cursor Admin API for reporting and cost tracking. ### Implementation Notes - **Authentication:** Connects via Cursor BasicAuth (Admin API and Cloud Agents API). - **Code Logic:** The `Launch Cloud Agent` implementation handles significant logic to track the agent's lifecycle (polling status, handling completion, etc.). The code is structured to robustly handle this weight to ensure reliable execution tracking. ## Video Demo [Watch the Loom Video](https://www.loom.com/share/2f6f3f98ab6b47ce88444a15f93afe45) ## Checklist - [x] My code follows the style guidelines of this project - [x] I have performed a self-review of my own code - [x] I have commented my code, particularly in hard-to-understand areas - [x] I have made corresponding changes to the documentation (`make gen.components.docs`) - [x] I have added tests that prove my fix is effective or that my feature works - [x] New and existing unit tests pass locally with my changes - [x] I have signed off my commits (`git commit -s`) --------- Signed-off-by: Harsh Signed-off-by: Igor Šarčević Co-authored-by: Igor Šarčević Co-authored-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Cursor.mdx | 51 ------------------- pkg/integrations/cursor/client.go | 30 ----------- pkg/integrations/cursor/cursor.go | 1 - pkg/integrations/cursor/cursor_test.go | 3 +- .../pages/workflowv2/mappers/cursor/index.ts | 3 -- web_src/src/pages/workflowv2/mappers/index.ts | 8 +++ 6 files changed, 9 insertions(+), 87 deletions(-) diff --git a/docs/components/Cursor.mdx b/docs/components/Cursor.mdx index 37f746ed29..3c869807c7 100644 --- a/docs/components/Cursor.mdx +++ b/docs/components/Cursor.mdx @@ -10,7 +10,6 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; - @@ -87,56 +86,6 @@ The output includes per-user daily metrics: } ``` - - -## Get Last Message - -The Get Last Message component retrieves the last message from a Cursor Cloud Agent's conversation history. - -### Use Cases - -- **Message tracking**: Get the latest response or prompt from an agent conversation -- **Workflow automation**: Use the last message as input for downstream components -- **Status monitoring**: Check what the agent last communicated - -### How It Works - -1. Fetches the conversation history for the specified agent ID -2. Extracts the last message from the conversation -3. Returns the message details including ID, type (user_message or assistant_message), and text - -### Configuration - -- **Agent ID**: The unique identifier for the cloud agent (e.g., bc_abc123) - -### Output - -The output includes: -- **Agent ID**: The identifier of the agent -- **Message**: The last message object containing: - - **ID**: Unique message identifier - - **Type**: Either "user_message" or "assistant_message" - - **Text**: The message content - -### Notes - -- Requires a valid Cursor Cloud Agent API key configured in the integration -- If the agent has been deleted, the conversation cannot be accessed -- Returns nil if the conversation has no messages - -### Example Output - -```json -{ - "agentId": "bc_abc123", - "message": { - "id": "msg_005", - "text": "I've added a troubleshooting section to the README.", - "type": "assistant_message" - } -} -``` - ## Launch Cloud Agent diff --git a/pkg/integrations/cursor/client.go b/pkg/integrations/cursor/client.go index 4842cbb907..53e3e017dc 100644 --- a/pkg/integrations/cursor/client.go +++ b/pkg/integrations/cursor/client.go @@ -52,17 +52,6 @@ type ModelsResponse struct { Models []string `json:"models"` } -type ConversationMessage struct { - ID string `json:"id"` - Type string `json:"type"` - Text string `json:"text"` -} - -type ConversationResponse struct { - ID string `json:"id"` - Messages []ConversationMessage `json:"messages"` -} - func (c *Client) ListModels() ([]string, error) { if c.LaunchAgentKey == "" { return nil, fmt.Errorf("Cloud Agent API key is not configured") @@ -174,25 +163,6 @@ func (c *Client) CancelAgent(agentID string) error { return err } -func (c *Client) GetAgentConversation(agentID string) (*ConversationResponse, error) { - if c.LaunchAgentKey == "" { - return nil, fmt.Errorf("Cloud Agent API key is not configured") - } - - url := fmt.Sprintf("%s/v0/agents/%s/conversation", c.BaseURL, agentID) - responseBody, err := c.execRequest(http.MethodGet, url, nil, c.LaunchAgentKey) - if err != nil { - return nil, err - } - - var response ConversationResponse - if err := json.Unmarshal(responseBody, &response); err != nil { - return nil, fmt.Errorf("failed to unmarshal conversation response: %w", err) - } - - return &response, nil -} - func (c *Client) execRequest(method, URL string, body io.Reader, apiKey string) ([]byte, error) { req, err := http.NewRequest(method, URL, body) if err != nil { diff --git a/pkg/integrations/cursor/cursor.go b/pkg/integrations/cursor/cursor.go index c24e3743b5..611ad102e7 100644 --- a/pkg/integrations/cursor/cursor.go +++ b/pkg/integrations/cursor/cursor.go @@ -96,7 +96,6 @@ func (i *Cursor) Components() []core.Component { return []core.Component{ &LaunchAgent{}, &GetDailyUsageData{}, - &GetLastMessage{}, } } diff --git a/pkg/integrations/cursor/cursor_test.go b/pkg/integrations/cursor/cursor_test.go index 21e038f4eb..bcb7a4a5d4 100644 --- a/pkg/integrations/cursor/cursor_test.go +++ b/pkg/integrations/cursor/cursor_test.go @@ -177,7 +177,7 @@ func Test__Cursor__Components(t *testing.T) { c := &Cursor{} components := c.Components() - assert.Len(t, components, 3) + assert.Len(t, components, 2) names := make([]string, len(components)) for i, comp := range components { @@ -186,7 +186,6 @@ func Test__Cursor__Components(t *testing.T) { assert.Contains(t, names, "cursor.launchAgent") assert.Contains(t, names, "cursor.getDailyUsageData") - assert.Contains(t, names, "cursor.getLastMessage") } func Test__Cursor__ListResources(t *testing.T) { diff --git a/web_src/src/pages/workflowv2/mappers/cursor/index.ts b/web_src/src/pages/workflowv2/mappers/cursor/index.ts index 455e80de74..d9ea382696 100644 --- a/web_src/src/pages/workflowv2/mappers/cursor/index.ts +++ b/web_src/src/pages/workflowv2/mappers/cursor/index.ts @@ -2,12 +2,10 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { buildActionStateRegistry } from "../utils"; import { launchAgentMapper } from "./launch_agent"; import { getDailyUsageDataMapper } from "./get_daily_usage_data"; -import { getLastMessageMapper } from "./get_last_message"; export const componentMappers: Record = { launchAgent: launchAgentMapper, getDailyUsageData: getDailyUsageDataMapper, - getLastMessage: getLastMessageMapper, }; export const triggerRenderers: Record = {}; @@ -15,5 +13,4 @@ export const triggerRenderers: Record = {}; export const eventStateRegistry: Record = { launchAgent: buildActionStateRegistry("completed"), getDailyUsageData: buildActionStateRegistry("completed"), - getLastMessage: buildActionStateRegistry("completed"), }; diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 0717e3b68d..78eaf17ae6 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -106,6 +106,11 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; +import { + componentMappers as cursorComponentMappers, + triggerRenderers as cursorTriggerRenderers, + eventStateRegistry as cursorEventStateRegistry, +} from "./cursor/index"; import { componentMappers as dockerhubComponentMappers, customFieldRenderers as dockerhubCustomFieldRenderers, @@ -162,6 +167,7 @@ const appMappers: Record> = { openai: openaiComponentMappers, circleci: circleCIComponentMappers, claude: claudeComponentMappers, + cursor: cursorComponentMappers, dockerhub: dockerhubComponentMappers, }; @@ -184,6 +190,7 @@ const appTriggerRenderers: Record> = { openai: openaiTriggerRenderers, circleci: circleCITriggerRenderers, claude: claudeTriggerRenderers, + cursor: cursorTriggerRenderers, dockerhub: dockerhubTriggerRenderers, }; @@ -205,6 +212,7 @@ const appEventStateRegistries: Record circleci: circleCIEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, + cursor: cursorEventStateRegistry, gitlab: gitlabEventStateRegistry, dockerhub: dockerhubEventStateRegistry, }; From a70f18a4dd41781e474bc5d466b25f3626bb4ed9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Thu, 12 Feb 2026 14:29:16 +0100 Subject: [PATCH 100/160] feat: Add new cursor.getLastMessage component (#3075) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Cursor.mdx | 51 +++++++++++++++++++ pkg/integrations/cursor/client.go | 30 +++++++++++ pkg/integrations/cursor/cursor.go | 1 + pkg/integrations/cursor/cursor_test.go | 3 +- .../pages/workflowv2/mappers/cursor/index.ts | 3 ++ 5 files changed, 87 insertions(+), 1 deletion(-) diff --git a/docs/components/Cursor.mdx b/docs/components/Cursor.mdx index 3c869807c7..37f746ed29 100644 --- a/docs/components/Cursor.mdx +++ b/docs/components/Cursor.mdx @@ -10,6 +10,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + @@ -86,6 +87,56 @@ The output includes per-user daily metrics: } ``` + + +## Get Last Message + +The Get Last Message component retrieves the last message from a Cursor Cloud Agent's conversation history. + +### Use Cases + +- **Message tracking**: Get the latest response or prompt from an agent conversation +- **Workflow automation**: Use the last message as input for downstream components +- **Status monitoring**: Check what the agent last communicated + +### How It Works + +1. Fetches the conversation history for the specified agent ID +2. Extracts the last message from the conversation +3. Returns the message details including ID, type (user_message or assistant_message), and text + +### Configuration + +- **Agent ID**: The unique identifier for the cloud agent (e.g., bc_abc123) + +### Output + +The output includes: +- **Agent ID**: The identifier of the agent +- **Message**: The last message object containing: + - **ID**: Unique message identifier + - **Type**: Either "user_message" or "assistant_message" + - **Text**: The message content + +### Notes + +- Requires a valid Cursor Cloud Agent API key configured in the integration +- If the agent has been deleted, the conversation cannot be accessed +- Returns nil if the conversation has no messages + +### Example Output + +```json +{ + "agentId": "bc_abc123", + "message": { + "id": "msg_005", + "text": "I've added a troubleshooting section to the README.", + "type": "assistant_message" + } +} +``` + ## Launch Cloud Agent diff --git a/pkg/integrations/cursor/client.go b/pkg/integrations/cursor/client.go index 53e3e017dc..4842cbb907 100644 --- a/pkg/integrations/cursor/client.go +++ b/pkg/integrations/cursor/client.go @@ -52,6 +52,17 @@ type ModelsResponse struct { Models []string `json:"models"` } +type ConversationMessage struct { + ID string `json:"id"` + Type string `json:"type"` + Text string `json:"text"` +} + +type ConversationResponse struct { + ID string `json:"id"` + Messages []ConversationMessage `json:"messages"` +} + func (c *Client) ListModels() ([]string, error) { if c.LaunchAgentKey == "" { return nil, fmt.Errorf("Cloud Agent API key is not configured") @@ -163,6 +174,25 @@ func (c *Client) CancelAgent(agentID string) error { return err } +func (c *Client) GetAgentConversation(agentID string) (*ConversationResponse, error) { + if c.LaunchAgentKey == "" { + return nil, fmt.Errorf("Cloud Agent API key is not configured") + } + + url := fmt.Sprintf("%s/v0/agents/%s/conversation", c.BaseURL, agentID) + responseBody, err := c.execRequest(http.MethodGet, url, nil, c.LaunchAgentKey) + if err != nil { + return nil, err + } + + var response ConversationResponse + if err := json.Unmarshal(responseBody, &response); err != nil { + return nil, fmt.Errorf("failed to unmarshal conversation response: %w", err) + } + + return &response, nil +} + func (c *Client) execRequest(method, URL string, body io.Reader, apiKey string) ([]byte, error) { req, err := http.NewRequest(method, URL, body) if err != nil { diff --git a/pkg/integrations/cursor/cursor.go b/pkg/integrations/cursor/cursor.go index 611ad102e7..c24e3743b5 100644 --- a/pkg/integrations/cursor/cursor.go +++ b/pkg/integrations/cursor/cursor.go @@ -96,6 +96,7 @@ func (i *Cursor) Components() []core.Component { return []core.Component{ &LaunchAgent{}, &GetDailyUsageData{}, + &GetLastMessage{}, } } diff --git a/pkg/integrations/cursor/cursor_test.go b/pkg/integrations/cursor/cursor_test.go index bcb7a4a5d4..21e038f4eb 100644 --- a/pkg/integrations/cursor/cursor_test.go +++ b/pkg/integrations/cursor/cursor_test.go @@ -177,7 +177,7 @@ func Test__Cursor__Components(t *testing.T) { c := &Cursor{} components := c.Components() - assert.Len(t, components, 2) + assert.Len(t, components, 3) names := make([]string, len(components)) for i, comp := range components { @@ -186,6 +186,7 @@ func Test__Cursor__Components(t *testing.T) { assert.Contains(t, names, "cursor.launchAgent") assert.Contains(t, names, "cursor.getDailyUsageData") + assert.Contains(t, names, "cursor.getLastMessage") } func Test__Cursor__ListResources(t *testing.T) { diff --git a/web_src/src/pages/workflowv2/mappers/cursor/index.ts b/web_src/src/pages/workflowv2/mappers/cursor/index.ts index d9ea382696..455e80de74 100644 --- a/web_src/src/pages/workflowv2/mappers/cursor/index.ts +++ b/web_src/src/pages/workflowv2/mappers/cursor/index.ts @@ -2,10 +2,12 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { buildActionStateRegistry } from "../utils"; import { launchAgentMapper } from "./launch_agent"; import { getDailyUsageDataMapper } from "./get_daily_usage_data"; +import { getLastMessageMapper } from "./get_last_message"; export const componentMappers: Record = { launchAgent: launchAgentMapper, getDailyUsageData: getDailyUsageDataMapper, + getLastMessage: getLastMessageMapper, }; export const triggerRenderers: Record = {}; @@ -13,4 +15,5 @@ export const triggerRenderers: Record = {}; export const eventStateRegistry: Record = { launchAgent: buildActionStateRegistry("completed"), getDailyUsageData: buildActionStateRegistry("completed"), + getLastMessage: buildActionStateRegistry("completed"), }; From 742d74bfd4e739dc33f18569fd16e7c37d83d9c7 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Thu, 12 Feb 2026 12:54:21 -0300 Subject: [PATCH 101/160] feat: aws.cloudwatch.onAlarm trigger (#3023) Bootstrapping the AWS CloudWatch integration, adding a `aws.cloudwatch.onAlarm` trigger to receive events when AWS CloudWatch alarms transitions to a different state. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/pages/workflowv2/mappers/aws/index.ts | 1 + web_src/src/ui/componentSidebar/integrationIcons.tsx | 1 + 2 files changed, 2 insertions(+) diff --git a/web_src/src/pages/workflowv2/mappers/aws/index.ts b/web_src/src/pages/workflowv2/mappers/aws/index.ts index 8e7e2c980b..e431554a64 100644 --- a/web_src/src/pages/workflowv2/mappers/aws/index.ts +++ b/web_src/src/pages/workflowv2/mappers/aws/index.ts @@ -14,6 +14,7 @@ import { deletePackageVersionsMapper } from "./codeartifact/delete_package_versi import { deleteRepositoryMapper } from "./codeartifact/delete_repository"; import { disposePackageVersionsMapper } from "./codeartifact/dispose_package_versions"; import { updatePackageVersionsStatusMapper } from "./codeartifact/update_package_versions_status"; +import { onAlarmTriggerRenderer } from "./cloudwatch/on_alarm"; export const componentMappers: Record = { "lambda.runFunction": runFunctionMapper, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 1391f8329e..31284132df 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -2,6 +2,7 @@ import { resolveIcon } from "@/lib/utils"; import React from "react"; import awsIcon from "@/assets/icons/integrations/aws.svg"; import awsLambdaIcon from "@/assets/icons/integrations/aws.lambda.svg"; +import circleciIcon from "@/assets/icons/integrations/circleci.svg"; import cloudflareIcon from "@/assets/icons/integrations/cloudflare.svg"; import dash0Icon from "@/assets/icons/integrations/dash0.svg"; import datadogIcon from "@/assets/icons/integrations/datadog.svg"; From 9e672de517863925573e9297352f5ecd191d5820 Mon Sep 17 00:00:00 2001 From: Manideep Chopperla <130681531+Manideepchopperla@users.noreply.github.com> Date: Thu, 12 Feb 2026 22:17:24 +0530 Subject: [PATCH 102/160] feat: Add rootly.updateIncident action (#2978) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Closes #2541 Adds `rootly.updateIncident` component that updates existing incidents in Rootly via their REST API (`PUT /v1/incidents/{id}`). ### Backend - **UpdateIncident component** with 8 config fields: `incidentId` (required), `title`, `summary`, `status` (select), `severity` (integration resource), `services` (multi), `teams` (multi), `labels` (key-value list) - **UpdateIncident client method** with JSON:API request/response handling - **ListTeams client method** for team resource picker - **`incidentFromData()` helper** to eliminate field mapping duplication across constructors - Extended `Incident` struct with `sequential_id`, `slug`, `updated_at` fields - Added `team` resource type to `list_resources.go` - **9 tests** (7 Setup + 2 Execute) using `test/support/contexts` ### Frontend - `update_incident.ts` mapper with metadata display (incident ID + fields being updated) - Registered in `componentMappers` and `eventStateRegistry` - Extended `Incident` type with new fields - Added `Updated At` to `getDetailsForIncident` helper ### Docs - Auto-generated `docs/components/Rootly.mdx` ## Demo https://github.com/user-attachments/assets/7c691754-1ba3-4b9d-b0a1-6d2ef1bda4af --------- Signed-off-by: Manideep Co-authored-by: Pedro Leão <60622592+forestileao@users.noreply.github.com> Signed-off-by: Muhammad Fuzail Zubari --- pkg/integrations/rootly/client.go | 88 ++++++++----------- pkg/integrations/rootly/example.go | 10 +++ pkg/integrations/rootly/rootly.go | 1 + .../pages/workflowv2/mappers/rootly/base.ts | 20 +++++ .../workflowv2/mappers/rootly/create_event.ts | 23 +---- .../pages/workflowv2/mappers/rootly/index.ts | 3 + 6 files changed, 73 insertions(+), 72 deletions(-) diff --git a/pkg/integrations/rootly/client.go b/pkg/integrations/rootly/client.go index fd1a73e530..ad5688af2c 100644 --- a/pkg/integrations/rootly/client.go +++ b/pkg/integrations/rootly/client.go @@ -259,6 +259,42 @@ type IncidentEventResponse struct { Data IncidentEventData `json:"data"` } +// severityString extracts the severity slug from the API response. +// Rootly returns severity as a string (slug) or an object with slug/name fields. +func severityString(v any) string { + switch s := v.(type) { + case string: + return s + case map[string]any: + if slug, ok := s["slug"].(string); ok { + return slug + } + if name, ok := s["name"].(string); ok { + return name + } + } + + return "" +} + +// incidentFromData converts a JSON:API IncidentData to a flat Incident struct. +func incidentFromData(data IncidentData) *Incident { + return &Incident{ + ID: data.ID, + SequentialID: data.Attributes.SequentialID, + Title: data.Attributes.Title, + Slug: data.Attributes.Slug, + Summary: data.Attributes.Summary, + Status: data.Attributes.Status, + Severity: severityString(data.Attributes.Severity), + StartedAt: data.Attributes.StartedAt, + ResolvedAt: data.Attributes.ResolvedAt, + MitigatedAt: data.Attributes.MitigatedAt, + UpdatedAt: data.Attributes.UpdatedAt, + URL: data.Attributes.URL, + } +} + // CreateIncidentRequest represents the request to create an incident type CreateIncidentRequest struct { Data CreateIncidentData `json:"data"` @@ -359,58 +395,6 @@ func (c *Client) CreateIncidentEvent(incidentID, event, visibility string) (*Inc }, nil } -// CreateIncidentEventRequest represents the request to create an incident event. -type CreateIncidentEventRequest struct { - Data CreateIncidentEventData `json:"data"` -} - -type CreateIncidentEventData struct { - Type string `json:"type"` - Attributes CreateIncidentEventAttributes `json:"attributes"` -} - -type CreateIncidentEventAttributes struct { - Event string `json:"event"` - Visibility string `json:"visibility,omitempty"` -} - -func (c *Client) CreateIncidentEvent(incidentID, event, visibility string) (*IncidentEvent, error) { - request := CreateIncidentEventRequest{ - Data: CreateIncidentEventData{ - Type: "incident_events", - Attributes: CreateIncidentEventAttributes{ - Event: event, - Visibility: visibility, - }, - }, - } - - body, err := json.Marshal(request) - if err != nil { - return nil, fmt.Errorf("error marshaling request: %v", err) - } - - url := fmt.Sprintf("%s/incidents/%s/events", c.BaseURL, incidentID) - responseBody, err := c.execRequest(http.MethodPost, url, bytes.NewReader(body)) - if err != nil { - return nil, err - } - - var response IncidentEventResponse - err = json.Unmarshal(responseBody, &response) - if err != nil { - return nil, fmt.Errorf("error parsing response: %v", err) - } - - return &IncidentEvent{ - ID: response.Data.ID, - Event: response.Data.Attributes.Event, - Visibility: response.Data.Attributes.Visibility, - OccurredAt: response.Data.Attributes.OccurredAt, - CreatedAt: response.Data.Attributes.CreatedAt, - }, nil -} - func (c *Client) GetIncident(id string) (*Incident, error) { url := fmt.Sprintf("%s/incidents/%s", c.BaseURL, id) responseBody, err := c.execRequest(http.MethodGet, url, nil) diff --git a/pkg/integrations/rootly/example.go b/pkg/integrations/rootly/example.go index 21d8e4741b..89e88c19bf 100644 --- a/pkg/integrations/rootly/example.go +++ b/pkg/integrations/rootly/example.go @@ -19,6 +19,12 @@ var exampleOutputCreateEventBytes []byte var exampleOutputCreateEventOnce sync.Once var exampleOutputCreateEvent map[string]any +//go:embed example_output_update_incident.json +var exampleOutputUpdateIncidentBytes []byte + +var exampleOutputUpdateIncidentOnce sync.Once +var exampleOutputUpdateIncident map[string]any + //go:embed example_data_on_incident.json var exampleDataOnIncidentBytes []byte @@ -33,6 +39,10 @@ func (c *CreateEvent) ExampleOutput() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateEventOnce, exampleOutputCreateEventBytes, &exampleOutputCreateEvent) } +func (c *UpdateIncident) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputUpdateIncidentOnce, exampleOutputUpdateIncidentBytes, &exampleOutputUpdateIncident) +} + func (t *OnIncident) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIncidentOnce, exampleDataOnIncidentBytes, &exampleDataOnIncident) } diff --git a/pkg/integrations/rootly/rootly.go b/pkg/integrations/rootly/rootly.go index ba96ead14e..cf32d97772 100644 --- a/pkg/integrations/rootly/rootly.go +++ b/pkg/integrations/rootly/rootly.go @@ -63,6 +63,7 @@ func (r *Rootly) Components() []core.Component { return []core.Component{ &CreateIncident{}, &CreateEvent{}, + &UpdateIncident{}, } } diff --git a/web_src/src/pages/workflowv2/mappers/rootly/base.ts b/web_src/src/pages/workflowv2/mappers/rootly/base.ts index 33f17f7711..92b5905dc3 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/base.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/base.ts @@ -1,5 +1,25 @@ +import { EventSection } from "@/ui/componentBase"; +import { getState, getTriggerRenderer } from ".."; +import { ExecutionInfo, NodeInfo } from "../types"; +import { formatTimeAgo } from "@/utils/date"; import { Incident, IncidentEvent } from "./types"; +export function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent! }); + + return [ + { + receivedAt: new Date(execution.createdAt!), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent!.id!, + }, + ]; +} + export function getDetailsForIncident(incident: Incident): Record { const details: Record = {}; diff --git a/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts b/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts index 279a2cee89..c735804a10 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/create_event.ts @@ -1,11 +1,10 @@ -import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { ComponentBaseProps } from "@/ui/componentBase"; import { getBackgroundColorClass } from "@/utils/colors"; -import { getState, getStateMap, getTriggerRenderer } from ".."; +import { getStateMap } from ".."; import { ComponentBaseContext, ComponentBaseMapper, ExecutionDetailsContext, - ExecutionInfo, NodeInfo, OutputPayload, SubtitleContext, @@ -13,7 +12,7 @@ import { import { MetadataItem } from "@/ui/metadataList"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import { IncidentEvent } from "./types"; -import { getDetailsForIncidentEvent } from "./base"; +import { baseEventSections, getDetailsForIncidentEvent } from "./base"; import { formatTimeAgo } from "@/utils/date"; export const createEventMapper: ComponentBaseMapper = { @@ -66,19 +65,3 @@ function metadataList(node: NodeInfo): MetadataItem[] { return metadata; } - -function baseEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { - const rootTriggerNode = nodes.find((n) => n.id === execution.rootEvent?.nodeId); - const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!); - const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); - - return [ - { - receivedAt: new Date(execution.createdAt!), - eventTitle: title, - eventSubtitle: formatTimeAgo(new Date(execution.createdAt!)), - eventState: getState(componentName)(execution), - eventId: execution.rootEvent!.id!, - }, - ]; -} diff --git a/web_src/src/pages/workflowv2/mappers/rootly/index.ts b/web_src/src/pages/workflowv2/mappers/rootly/index.ts index d4176f273d..a02e55f3fd 100644 --- a/web_src/src/pages/workflowv2/mappers/rootly/index.ts +++ b/web_src/src/pages/workflowv2/mappers/rootly/index.ts @@ -2,11 +2,13 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { onIncidentTriggerRenderer } from "./on_incident"; import { createIncidentMapper } from "./create_incident"; import { createEventMapper } from "./create_event"; +import { updateIncidentMapper } from "./update_incident"; import { buildActionStateRegistry } from "../utils"; export const componentMappers: Record = { createIncident: createIncidentMapper, createEvent: createEventMapper, + updateIncident: updateIncidentMapper, }; export const triggerRenderers: Record = { @@ -16,4 +18,5 @@ export const triggerRenderers: Record = { export const eventStateRegistry: Record = { createIncident: buildActionStateRegistry("created"), createEvent: buildActionStateRegistry("created"), + updateIncident: buildActionStateRegistry("updated"), }; From 08cfbce575866d7de87131eddb15636725496c00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Le=C3=A3o?= <60622592+forestileao@users.noreply.github.com> Date: Thu, 12 Feb 2026 13:58:50 -0300 Subject: [PATCH 103/160] feat: Prometheus Base integration (#3068) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Add a new **Prometheus** integration to SuperPlane with: - Prometheus connection setup (`baseURL` + `none/basic/bearer` API auth) - `On Alert` trigger from Alertmanager webhooks (with optional bearer-secret validation) - `Get Alert` action to fetch an alert by name/state from Prometheus ## What’s included - Backend integration (`pkg/integrations/prometheus/*`) - Prometheus API client (`/api/v1/alerts`, `/api/v1/query`) with auth support, response-size limits, and error handling - `On Alert` trigger pipeline: - Alertmanager webhook parsing - status + alert name filtering - one `prometheus.alert` event emitted per matching alert - Webhook auth validation using `Authorization: Bearer ` with constant-time token comparison - Frontend mappers + icon + registry wiring for Prometheus trigger/action - New docs page: `docs/components/Prometheus.mdx` - Server integration registration (`pkg/server/server.go`) - Integration state-description size increase to 1024 chars (to preserve longer sync/error messages) - Test coverage for client, sync, trigger, webhook handling, and action behavior https://github.com/user-attachments/assets/055a987b-c07c-4a16-a930-0caeaad2c21c image image --------- Signed-off-by: Pedro F. Leao Signed-off-by: Muhammad Fuzail Zubari --- web_src/src/pages/workflowv2/mappers/index.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 78eaf17ae6..129d06bb10 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -106,6 +106,12 @@ import { triggerRenderers as claudeTriggerRenderers, eventStateRegistry as claudeEventStateRegistry, } from "./claude/index"; +import { + componentMappers as prometheusComponentMappers, + customFieldRenderers as prometheusCustomFieldRenderers, + triggerRenderers as prometheusTriggerRenderers, + eventStateRegistry as prometheusEventStateRegistry, +} from "./prometheus/index"; import { componentMappers as cursorComponentMappers, triggerRenderers as cursorTriggerRenderers, @@ -167,6 +173,7 @@ const appMappers: Record> = { openai: openaiComponentMappers, circleci: circleCIComponentMappers, claude: claudeComponentMappers, + prometheus: prometheusComponentMappers, cursor: cursorComponentMappers, dockerhub: dockerhubComponentMappers, }; @@ -190,6 +197,7 @@ const appTriggerRenderers: Record> = { openai: openaiTriggerRenderers, circleci: circleCITriggerRenderers, claude: claudeTriggerRenderers, + prometheus: prometheusTriggerRenderers, cursor: cursorTriggerRenderers, dockerhub: dockerhubTriggerRenderers, }; @@ -212,6 +220,7 @@ const appEventStateRegistries: Record circleci: circleCIEventStateRegistry, claude: claudeEventStateRegistry, aws: awsEventStateRegistry, + prometheus: prometheusEventStateRegistry, cursor: cursorEventStateRegistry, gitlab: gitlabEventStateRegistry, dockerhub: dockerhubEventStateRegistry, @@ -240,6 +249,7 @@ const customFieldRenderers: Record = { const appCustomFieldRenderers: Record> = { github: githubCustomFieldRenderers, + prometheus: prometheusCustomFieldRenderers, dockerhub: dockerhubCustomFieldRenderers, }; From 90339f29949dd6bc7dd70d30904d07debad2c80b Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Thu, 12 Feb 2026 20:43:02 -0300 Subject: [PATCH 104/160] feat: GitLab triggers for merge requests, milestones, tags, releases and vulnerabilities (#3084) New GitLab triggers for multiple resources: - gitlab.onMergeRequest - for receiving events about merge requests - gitlab.onMilestone - for receiving events about milestones - gitlab.onTag - for receiving events about tags - gitlab.onRelease - for receiving events about releases - gitlab.onVulnerability - for receiving events about vulnerabilities --------- Signed-off-by: Cursor Agent Signed-off-by: Lucas Pinheiro Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitLab.mdx | 391 ++++++++++++++++++ pkg/integrations/gitlab/example.go | 50 +++ pkg/integrations/gitlab/gitlab.go | 5 + pkg/integrations/gitlab/gitlab_test.go | 13 + pkg/integrations/gitlab/hooks.go | 6 + pkg/integrations/gitlab/webhook_handler.go | 4 + .../pages/workflowv2/mappers/gitlab/index.ts | 10 + 7 files changed, 479 insertions(+) diff --git a/docs/components/GitLab.mdx b/docs/components/GitLab.mdx index 3e56b7ae24..4264d7f665 100644 --- a/docs/components/GitLab.mdx +++ b/docs/components/GitLab.mdx @@ -8,6 +8,11 @@ Manage and react to changes in your GitLab repositories + + + + + import { CardGrid, LinkCard } from "@astrojs/starlight/components"; @@ -126,6 +131,392 @@ This trigger automatically sets up a GitLab webhook when configured. The webhook } ``` + + +## On Merge Request + +The On Merge Request trigger starts a workflow execution when merge request events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which merge request actions to listen for (open, close, merge, etc.). Default: open. + +### Outputs + +- **Default channel**: Emits merge request payload data with action, project, and object attributes + +### Example Data + +```json +{ + "data": { + "assignees": [ + { + "avatar_url": "https://www.gravatar.com/avatar/ab12cd34?s=80\u0026d=identicon", + "email": "jrivera@example.com", + "id": 4, + "name": "Jamie Rivera", + "username": "jrivera" + } + ], + "changes": { + "title": { + "current": "Add merge request trigger", + "previous": "Add trigger" + } + }, + "event_type": "merge_request", + "labels": [ + { + "id": 101, + "title": "backend" + } + ], + "object_attributes": { + "action": "open", + "description": "Adds support for additional GitLab webhook trigger types.", + "id": 93, + "iid": 12, + "state": "opened", + "title": "Add merge request trigger" + }, + "object_kind": "merge_request", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "main", + "description": "Project used to demonstrate merge request webhook payloads.", + "git_http_url": "https://gitlab.example.com/group/example.git", + "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", + "id": 1, + "name": "Example Project", + "namespace": "group", + "path_with_namespace": "group/example", + "visibility_level": 20, + "web_url": "https://gitlab.example.com/group/example" + }, + "repository": { + "description": "Project used to demonstrate merge request webhook payloads.", + "git_http_url": "https://gitlab.example.com/group/example.git", + "git_ssh_url": "ssh://git@gitlab.example.com:group/example.git", + "homepage": "https://gitlab.example.com/group/example", + "name": "Example Project", + "url": "ssh://git@gitlab.example.com/group/example.git", + "visibility_level": 20 + }, + "reviewers": [ + { + "avatar_url": "https://www.gravatar.com/avatar/ef56gh78?s=80\u0026d=identicon", + "email": "mlee@example.com", + "id": 6, + "name": "Morgan Lee", + "state": "unreviewed", + "username": "mlee" + } + ], + "user": { + "avatar_url": "https://www.gravatar.com/avatar/1a29da0ccd099482194440fac762f5ccb4ec53227761d1859979367644a889a5?s=80\u0026d=identicon", + "email": "agarcia@example.com", + "id": 1, + "name": "Alex Garcia", + "username": "agarcia" + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.mergeRequest" +} +``` + + + +## On Milestone + +The On Milestone trigger starts a workflow execution when milestone events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which milestone actions to listen for. Default: create. + +### Outputs + +- **Default channel**: Emits milestone payload data with action, project, and object attributes + +### Example Data + +```json +{ + "data": { + "action": "create", + "event_type": "milestone", + "object_attributes": { + "created_at": "2025-06-16 14:10:57 UTC", + "description": "First stable release", + "due_date": "2025-06-30", + "group_id": null, + "id": 61, + "iid": 10, + "project_id": 1, + "start_date": "2025-06-16", + "state": "active", + "title": "v1.0", + "updated_at": "2025-06-16 14:10:57 UTC" + }, + "object_kind": "milestone", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "master", + "description": "Aut reprehenderit ut est.", + "git_http_url": "http://example.com/gitlabhq/gitlab-test.git", + "git_ssh_url": "git@example.com:gitlabhq/gitlab-test.git", + "homepage": "http://example.com/gitlabhq/gitlab-test", + "http_url": "http://example.com/gitlabhq/gitlab-test.git", + "id": 1, + "name": "Gitlab Test", + "namespace": "GitlabHQ", + "path_with_namespace": "gitlabhq/gitlab-test", + "ssh_url": "git@example.com:gitlabhq/gitlab-test.git", + "url": "http://example.com/gitlabhq/gitlab-test.git", + "visibility_level": 20, + "web_url": "http://example.com/gitlabhq/gitlab-test" + } + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.milestone" +} +``` + + + +## On Release + +The On Release trigger starts a workflow execution when release events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Actions** (required): Select which release actions to listen for. Default: create. + +### Outputs + +- **Default channel**: Emits release payload data with action and release metadata + +### Example Data + +```json +{ + "data": { + "action": "create", + "assets": { + "count": 2, + "links": [ + { + "id": 1, + "link_type": "other", + "name": "Changelog", + "url": "https://example.net/changelog" + } + ], + "sources": [ + { + "format": "zip", + "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.zip" + }, + { + "format": "tar.gz", + "url": "https://example.com/gitlab-org/release-webhook-example/-/archive/v1.1/release-webhook-example-v1.1.tar.gz" + } + ] + }, + "commit": { + "author": { + "email": "user@example.com", + "name": "Example User" + }, + "id": "ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8", + "message": "Release v1.1", + "timestamp": "2020-10-31T14:58:32+11:00", + "title": "Release v1.1", + "url": "https://example.com/gitlab-org/release-webhook-example/-/commit/ee0a3fb31ac16e11b9dbb596ad16d4af654d08f8" + }, + "created_at": "2020-11-02 12:55:12 UTC", + "description": "v1.1 has been released", + "id": 1, + "name": "v1.1", + "object_kind": "release", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "master", + "description": "", + "git_http_url": "https://example.com/gitlab-org/release-webhook-example.git", + "git_ssh_url": "ssh://git@example.com/gitlab-org/release-webhook-example.git", + "id": 1, + "name": "release-webhook-example", + "namespace": "Gitlab", + "path_with_namespace": "gitlab-org/release-webhook-example", + "visibility_level": 0, + "web_url": "https://example.com/gitlab-org/release-webhook-example" + }, + "released_at": "2020-11-02 12:55:12 UTC", + "tag": "v1.1", + "url": "https://example.com/gitlab-org/release-webhook-example/-/releases/v1.1" + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.release" +} +``` + + + +## On Tag + +The On Tag trigger starts a workflow execution when tag push events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Tags** (required): Configure tag filters using predicates. You can match full refs (refs/tags/v1.0.0) or tag names (v1.0.0). + +### Outputs + +- **Default channel**: Emits tag push payload data including ref, before/after SHA, and project information + +### Example Data + +```json +{ + "data": { + "after": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "before": "0000000000000000000000000000000000000000", + "checkout_sha": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "commits": [], + "event_name": "tag_push", + "message": "Tag message", + "object_kind": "tag_push", + "project": { + "avatar_url": null, + "ci_config_path": null, + "default_branch": "master", + "description": "", + "git_http_url": "http://example.com/jsmith/example.git", + "git_ssh_url": "git@example.com:jsmith/example.git", + "id": 1, + "name": "Example", + "namespace": "Jsmith", + "path_with_namespace": "jsmith/example", + "visibility_level": 0, + "web_url": "http://example.com/jsmith/example" + }, + "push_options": {}, + "ref": "refs/tags/v1.0.0", + "ref_protected": true, + "repository": { + "description": "", + "git_http_url": "http://example.com/jsmith/example.git", + "git_ssh_url": "git@example.com:jsmith/example.git", + "homepage": "http://example.com/jsmith/example", + "name": "Example", + "url": "ssh://git@example.com/jsmith/example.git", + "visibility_level": 0 + }, + "total_commits_count": 0, + "user_email": "john@example.com", + "user_id": 1, + "user_name": "John Smith", + "user_username": "jsmith" + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.tag" +} +``` + + + +## On Vulnerability + +The On Vulnerability trigger starts a workflow execution when vulnerability events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor + +### Outputs + +- **Default channel**: Emits vulnerability payload data including severity, state, location, and linked issues + +### Example Data + +```json +{ + "data": { + "object_attributes": { + "auto_resolved": false, + "confidence": "unknown", + "confidence_overridden": false, + "confirmed_at": "2025-01-08T00:46:14.413Z", + "confirmed_by_id": 1, + "created_at": "2025-01-08T00:46:14.413Z", + "cvss": [ + { + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + "vendor": "NVD" + } + ], + "dismissed_at": null, + "dismissed_by_id": null, + "identifiers": [ + { + "external_id": "29dce398-220a-4315-8c84-16cd8b6d9b05", + "external_type": "gemnasium", + "name": "Gemnasium-29dce398-220a-4315-8c84-16cd8b6d9b05", + "url": "https://gitlab.com/gitlab-org/security-products/gemnasium-db/-/blob/master/gem/rexml/CVE-2024-41123.yml" + }, + { + "external_id": "CVE-2024-41123", + "external_type": "cve", + "name": "CVE-2024-41123", + "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-41123" + } + ], + "issues": [ + { + "created_at": "2025-01-08T00:46:14.429Z", + "title": "REXML ReDoS vulnerability", + "updated_at": "2025-01-08T00:46:14.429Z", + "url": "https://example.com/flightjs/Flight/-/issues/1" + } + ], + "location": { + "dependency": { + "package": { + "name": "rexml" + }, + "version": "3.3.1" + }, + "file": "Gemfile.lock" + }, + "project_id": 1, + "report_type": "dependency_scanning", + "resolved_at": null, + "resolved_by_id": null, + "resolved_on_default_branch": false, + "severity": "high", + "severity_overridden": false, + "state": "confirmed", + "title": "REXML DoS vulnerability", + "updated_at": "2025-01-08T00:46:14.413Z", + "url": "https://example.com/flightjs/Flight/-/security/vulnerabilities/1" + }, + "object_kind": "vulnerability" + }, + "timestamp": "2026-02-12T20:40:00.000000000Z", + "type": "gitlab.vulnerability" +} +``` + ## Create Issue diff --git a/pkg/integrations/gitlab/example.go b/pkg/integrations/gitlab/example.go index f328477b63..397360118d 100644 --- a/pkg/integrations/gitlab/example.go +++ b/pkg/integrations/gitlab/example.go @@ -10,9 +10,59 @@ import ( //go:embed example_data_on_issue.json var exampleDataOnIssueBytes []byte +//go:embed example_data_on_merge_request.json +var exampleDataOnMergeRequestBytes []byte + +//go:embed example_data_on_milestone.json +var exampleDataOnMilestoneBytes []byte + +//go:embed example_data_on_release.json +var exampleDataOnReleaseBytes []byte + +//go:embed example_data_on_tag.json +var exampleDataOnTagBytes []byte + +//go:embed example_data_on_vulnerability.json +var exampleDataOnVulnerabilityBytes []byte + var exampleDataOnIssueOnce sync.Once var exampleDataOnIssue map[string]any +var exampleDataOnMergeRequestOnce sync.Once +var exampleDataOnMergeRequest map[string]any + +var exampleDataOnMilestoneOnce sync.Once +var exampleDataOnMilestone map[string]any + +var exampleDataOnReleaseOnce sync.Once +var exampleDataOnRelease map[string]any + +var exampleDataOnTagOnce sync.Once +var exampleDataOnTag map[string]any + +var exampleDataOnVulnerabilityOnce sync.Once +var exampleDataOnVulnerability map[string]any + func (i *OnIssue) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnIssueOnce, exampleDataOnIssueBytes, &exampleDataOnIssue) } + +func (m *OnMergeRequest) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnMergeRequestOnce, exampleDataOnMergeRequestBytes, &exampleDataOnMergeRequest) +} + +func (m *OnMilestone) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnMilestoneOnce, exampleDataOnMilestoneBytes, &exampleDataOnMilestone) +} + +func (r *OnRelease) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnReleaseOnce, exampleDataOnReleaseBytes, &exampleDataOnRelease) +} + +func (t *OnTag) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnTagOnce, exampleDataOnTagBytes, &exampleDataOnTag) +} + +func (v *OnVulnerability) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnVulnerabilityOnce, exampleDataOnVulnerabilityBytes, &exampleDataOnVulnerability) +} diff --git a/pkg/integrations/gitlab/gitlab.go b/pkg/integrations/gitlab/gitlab.go index 6b7f64d989..f54d2e94c1 100644 --- a/pkg/integrations/gitlab/gitlab.go +++ b/pkg/integrations/gitlab/gitlab.go @@ -175,6 +175,11 @@ func (g *GitLab) Components() []core.Component { func (g *GitLab) Triggers() []core.Trigger { return []core.Trigger{ &OnIssue{}, + &OnMergeRequest{}, + &OnMilestone{}, + &OnRelease{}, + &OnTag{}, + &OnVulnerability{}, } } diff --git a/pkg/integrations/gitlab/gitlab_test.go b/pkg/integrations/gitlab/gitlab_test.go index 6a5cc6e36a..9fc96fe415 100644 --- a/pkg/integrations/gitlab/gitlab_test.go +++ b/pkg/integrations/gitlab/gitlab_test.go @@ -387,3 +387,16 @@ func Test__GitLab__BaseURLNormalization(t *testing.T) { }) } } + +func gitlabHeaders(event, token string) http.Header { + headers := http.Header{} + if event != "" { + headers.Set("X-Gitlab-Event", event) + } + + if token != "" { + headers.Set("X-Gitlab-Token", token) + } + + return headers +} diff --git a/pkg/integrations/gitlab/hooks.go b/pkg/integrations/gitlab/hooks.go index e060f15131..887681221a 100644 --- a/pkg/integrations/gitlab/hooks.go +++ b/pkg/integrations/gitlab/hooks.go @@ -28,6 +28,8 @@ type Hook struct { WikiPageEvents bool `json:"wiki_page_events"` DeploymentEvents bool `json:"deployment_events"` ReleasesEvents bool `json:"releases_events"` + MilestoneEvents bool `json:"milestone_events"` + VulnerabilityEvents bool `json:"vulnerability_events"` } type HookEvents struct { @@ -41,6 +43,8 @@ type HookEvents struct { WikiPageEvents bool DeploymentEvents bool ReleasesEvents bool + MilestoneEvents bool + VulnerabilityEvents bool } func NewHooksClient(httpClient core.HTTPContext, ctx core.IntegrationContext) (*HooksClient, error) { @@ -84,6 +88,8 @@ func (c *HooksClient) CreateHook(projectID string, webhookURL string, secret str "wiki_page_events": events.WikiPageEvents, "deployment_events": events.DeploymentEvents, "releases_events": events.ReleasesEvents, + "milestone_events": events.MilestoneEvents, + "vulnerability_events": events.VulnerabilityEvents, } body, err := json.Marshal(payload) diff --git a/pkg/integrations/gitlab/webhook_handler.go b/pkg/integrations/gitlab/webhook_handler.go index a5055f2ec1..65c140aa29 100644 --- a/pkg/integrations/gitlab/webhook_handler.go +++ b/pkg/integrations/gitlab/webhook_handler.go @@ -68,6 +68,10 @@ func (h *GitLabWebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error events.PipelineEvents = true case "releases": events.ReleasesEvents = true + case "milestone": + events.MilestoneEvents = true + case "vulnerability": + events.VulnerabilityEvents = true } hook, err := hooksClient.CreateHook(config.ProjectID, ctx.Webhook.GetURL(), string(secret), events) diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts index 6d78e8e9a4..59c3c9c17d 100644 --- a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts +++ b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts @@ -2,6 +2,11 @@ import { ComponentBaseMapper, EventStateRegistry, TriggerRenderer } from "../typ import { buildActionStateRegistry } from "../utils"; import { createIssueMapper } from "./create_issue"; import { onIssueTriggerRenderer } from "./on_issue"; +import { onMergeRequestTriggerRenderer } from "./on_merge_request"; +import { onMilestoneTriggerRenderer } from "./on_milestone"; +import { onReleaseTriggerRenderer } from "./on_release"; +import { onTagTriggerRenderer } from "./on_tag"; +import { onVulnerabilityTriggerRenderer } from "./on_vulnerability"; export const eventStateRegistry: Record = { createIssue: buildActionStateRegistry("created"), @@ -13,4 +18,9 @@ export const componentMappers: Record = { export const triggerRenderers: Record = { onIssue: onIssueTriggerRenderer, + onMergeRequest: onMergeRequestTriggerRenderer, + onMilestone: onMilestoneTriggerRenderer, + onRelease: onReleaseTriggerRenderer, + onTag: onTagTriggerRenderer, + onVulnerability: onVulnerabilityTriggerRenderer, }; From 16c7573b08e385523bcb000cce7c0771f95f0a62 Mon Sep 17 00:00:00 2001 From: Muhammad Fuzail Zubari Date: Fri, 13 Feb 2026 11:19:02 +0500 Subject: [PATCH 105/160] GetURl unused method removed (#1945) Signed-off-by: Muhammad Fuzail Zubari --- pkg/core/trigger.go | 1 - pkg/integrations/sendgrid/on_email_event_test.go | 4 ---- pkg/workers/contexts/node_webhook_context.go | 8 -------- test/support/contexts/contexts.go | 6 ------ 4 files changed, 19 deletions(-) diff --git a/pkg/core/trigger.go b/pkg/core/trigger.go index b5c3fad91a..0e86340fa0 100644 --- a/pkg/core/trigger.go +++ b/pkg/core/trigger.go @@ -134,7 +134,6 @@ type WebhookRequestContext struct { type NodeWebhookContext interface { Setup() (string, error) - GetURL() (string, error) GetSecret() ([]byte, error) ResetSecret() ([]byte, []byte, error) GetBaseURL() string diff --git a/pkg/integrations/sendgrid/on_email_event_test.go b/pkg/integrations/sendgrid/on_email_event_test.go index a18f7aad48..7e0282d9c2 100644 --- a/pkg/integrations/sendgrid/on_email_event_test.go +++ b/pkg/integrations/sendgrid/on_email_event_test.go @@ -171,10 +171,6 @@ func (t *testNodeWebhookContext) Setup() (string, error) { return "", nil } -func (t *testNodeWebhookContext) GetURL() (string, error) { - return "", nil -} - func (t *testNodeWebhookContext) GetSecret() ([]byte, error) { return t.secret, nil } diff --git a/pkg/workers/contexts/node_webhook_context.go b/pkg/workers/contexts/node_webhook_context.go index a1c0e487be..a0b1f78b69 100644 --- a/pkg/workers/contexts/node_webhook_context.go +++ b/pkg/workers/contexts/node_webhook_context.go @@ -66,14 +66,6 @@ func (c *NodeWebhookContext) ResetSecret() ([]byte, []byte, error) { return []byte(plainKey), encryptedKey, nil } -func (c *NodeWebhookContext) GetURL() (string, error) { - if c.node.WebhookID == nil { - return "", fmt.Errorf("node does not have a webhook") - } - - return fmt.Sprintf("%s/webhooks/%s", c.GetBaseURL(), c.node.WebhookID.String()), nil -} - func (c *NodeWebhookContext) Setup() (string, error) { webhook, err := c.findOrCreateWebhook() if err != nil { diff --git a/test/support/contexts/contexts.go b/test/support/contexts/contexts.go index 45d86eb63c..f3e78e48ec 100644 --- a/test/support/contexts/contexts.go +++ b/test/support/contexts/contexts.go @@ -1,7 +1,6 @@ package contexts import ( - "fmt" "net/http" "time" @@ -49,11 +48,6 @@ func (w *WebhookContext) Setup() (string, error) { return id.String(), nil } -func (w *WebhookContext) GetURL() (string, error) { - id := uuid.New() - return fmt.Sprintf("%s/webhooks/%s", w.GetBaseURL(), id.String()), nil -} - func (w *WebhookContext) GetBaseURL() string { return "http://localhost:3000/api/v1" } From 161b1a652898b5d727240d293296619eb5145d44 Mon Sep 17 00:00:00 2001 From: Petar Perovic Date: Fri, 13 Feb 2026 13:03:35 +0100 Subject: [PATCH 106/160] chore: Multi instance integrations UI (#3094) Signed-off-by: Muhammad Fuzail Zubari --- .../assets/icons/integrations/circleci.svg | 14 +- .../organization/settings/Integrations.tsx | 222 +++++++++++------- .../src/ui/componentSidebar/SettingsTab.tsx | 3 +- 3 files changed, 156 insertions(+), 83 deletions(-) diff --git a/web_src/src/assets/icons/integrations/circleci.svg b/web_src/src/assets/icons/integrations/circleci.svg index c08ac7ffa4..6c5d6cd8c9 100644 --- a/web_src/src/assets/icons/integrations/circleci.svg +++ b/web_src/src/assets/icons/integrations/circleci.svg @@ -1,4 +1,12 @@ - - - + + + + + diff --git a/web_src/src/pages/organization/settings/Integrations.tsx b/web_src/src/pages/organization/settings/Integrations.tsx index 3bc2dc3ed4..cc95e14ab7 100644 --- a/web_src/src/pages/organization/settings/Integrations.tsx +++ b/web_src/src/pages/organization/settings/Integrations.tsx @@ -45,6 +45,90 @@ export function Integrations({ organizationId }: IntegrationsProps) { organizationIntegrations.map((integration) => integration.metadata?.name?.trim()).filter(Boolean) as string[], ); }, [organizationIntegrations]); + const connectedInstancesByProvider = useMemo(() => { + const groups = new Map(); + + organizationIntegrations.forEach((integration) => { + const provider = integration.spec?.integrationName; + if (!provider) return; + const current = groups.get(provider) || []; + current.push(integration); + groups.set(provider, current); + }); + + return groups; + }, [organizationIntegrations]); + const integrationCatalog = useMemo(() => { + const catalogByProvider = new Map< + string, + { + providerName: string; + providerLabel: string; + integrationDef: IntegrationsIntegrationDefinition | null; + instances: typeof organizationIntegrations; + } + >(); + + availableIntegrations.forEach((integrationDef) => { + const providerName = integrationDef.name || ""; + const providerLabel = + integrationDef.label || + getIntegrationTypeDisplayName(undefined, integrationDef.name) || + integrationDef.name || + "Integration"; + const instances = [...(connectedInstancesByProvider.get(providerName) || [])].sort((a, b) => + (a.metadata?.name || providerLabel).localeCompare(b.metadata?.name || providerLabel), + ); + + catalogByProvider.set(providerName, { + providerName, + providerLabel, + integrationDef, + instances, + }); + }); + + connectedInstancesByProvider.forEach((instances, providerName) => { + if (catalogByProvider.has(providerName)) { + return; + } + + const providerLabel = getIntegrationTypeDisplayName(undefined, providerName) || providerName || "Integration"; + const sortedInstances = [...instances].sort((a, b) => + (a.metadata?.name || providerLabel).localeCompare(b.metadata?.name || providerLabel), + ); + + catalogByProvider.set(providerName, { + providerName, + providerLabel, + integrationDef: null, + instances: sortedInstances, + }); + }); + + return [...catalogByProvider.values()].sort((a, b) => a.providerLabel.localeCompare(b.providerLabel)); + }, [availableIntegrations, connectedInstancesByProvider]); + const filteredIntegrationCatalog = useMemo(() => { + const normalizedQuery = filterQuery.trim().toLowerCase(); + if (!normalizedQuery) { + return integrationCatalog; + } + + return integrationCatalog.filter((item) => { + const providerText = [item.providerLabel, item.providerName, item.integrationDef?.description] + .filter(Boolean) + .join(" ") + .toLowerCase(); + + if (providerText.includes(normalizedQuery)) { + return true; + } + + return item.instances.some((instance) => + (instance.metadata?.name || instance.spec?.integrationName || "").toLowerCase().includes(normalizedQuery), + ); + }); + }, [filterQuery, integrationCatalog]); const selectedInstructions = useMemo(() => { return selectedIntegration?.instructions?.trim(); @@ -117,60 +201,48 @@ export function Integrations({ organizationId }: IntegrationsProps) { return (
    - {/* Integrations */} - {organizationIntegrations.length > 0 && ( -
    -

    Connected

    -
    - {[...organizationIntegrations] - .sort((a, b) => - (a.metadata?.name || a.spec?.integrationName || "").localeCompare( - b.metadata?.name || b.spec?.integrationName || "", - ), - ) - .map((integration) => { - const integrationDefinition = availableIntegrations.find( - (a) => a.name === integration.spec?.integrationName, - ); - const integrationLabel = - integrationDefinition?.label || - getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || - integration.spec?.integrationName; - const integrationDisplayName = - integration.metadata?.name || - getIntegrationTypeDisplayName(undefined, integration.spec?.integrationName) || - integration.spec?.integrationName; - const integrationName = integrationDefinition?.name || integration.spec?.integrationName; - const statusLabel = integration.status?.state - ? integration.status.state.charAt(0).toUpperCase() + integration.status.state.slice(1) - : "Unknown"; - - return ( -
    -
    -
    - -
    -
    -

    - {integrationDisplayName} -

    - {integrationLabel && integrationDisplayName !== integrationLabel ? ( -

    Type: {integrationLabel}

    - ) : null} - {integrationDefinition?.description ? ( -

    - {integrationDefinition.description} -

    - ) : null} -
    +
    + + setFilterQuery(e.target.value)} + placeholder="Filter integrations..." + className="pl-9 pr-9" + /> + {filterQuery.length > 0 ? ( + + ) : null} +
    + {filteredIntegrationCatalog.length === 0 ? ( +
    + +

    + {integrationCatalog.length === 0 ? "No integrations available." : "No integrations match your filter."} +

    +
    + ) : ( +
    + {filteredIntegrationCatalog.map((item) => { + const connectedCount = item.instances.length; + + return ( +
    +
    +
    +
    +

    {item.providerLabel}

    @@ -181,31 +253,23 @@ export function Integrations({ organizationId }: IntegrationsProps) { ) : null}
    - ); - })} -
    -
    - )} - - {/* Available Integrations */} -
    -

    Available

    -
    - {availableIntegrations.length === 0 ? ( -
    - -

    No integrations available.

    -
    - ) : ( -
    - {[...availableIntegrations] - .sort((a, b) => (a.label || a.name || "").localeCompare(b.label || b.name || "")) - .map((app) => { - const appName = app.name; - return ( -
    + diff --git a/web_src/src/ui/componentSidebar/SettingsTab.tsx b/web_src/src/ui/componentSidebar/SettingsTab.tsx index 016e7967bc..aa16fb1adc 100644 --- a/web_src/src/ui/componentSidebar/SettingsTab.tsx +++ b/web_src/src/ui/componentSidebar/SettingsTab.tsx @@ -370,6 +370,7 @@ export function SettingsTab({
    {selectedIntegrationFull && ( <> +

    Connection

    {(() => { const hasIntegrationError = selectedIntegrationFull.status?.state === "error" && @@ -377,7 +378,7 @@ export function SettingsTab({ const integrationStatusCard = (
    Date: Fri, 13 Feb 2026 20:46:55 +0500 Subject: [PATCH 107/160] fix(grafana): restore integration icons and unblock dev startup (#1945) Signed-off-by: Muhammad Fuzail Zubari --- test/support/contexts/contexts.go | 1 + web_src/src/pages/workflowv2/mappers/index.ts | 5 +++++ web_src/src/ui/BuildingBlocksSidebar/index.tsx | 2 ++ web_src/src/ui/componentSidebar/integrationIcons.tsx | 3 +++ 4 files changed, 11 insertions(+) diff --git a/test/support/contexts/contexts.go b/test/support/contexts/contexts.go index f3e78e48ec..5fe47703d1 100644 --- a/test/support/contexts/contexts.go +++ b/test/support/contexts/contexts.go @@ -1,6 +1,7 @@ package contexts import ( + "fmt" "net/http" "time" diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 129d06bb10..5de22c24c7 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -96,6 +96,11 @@ import { triggerRenderers as openaiTriggerRenderers, eventStateRegistry as openaiEventStateRegistry, } from "./openai/index"; +import { + componentMappers as grafanaComponentMappers, + triggerRenderers as grafanaTriggerRenderers, + eventStateRegistry as grafanaEventStateRegistry, +} from "./grafana/index"; import { componentMappers as circleCIComponentMappers, triggerRenderers as circleCITriggerRenderers, diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index 36499a1ac1..a8a7c2bc6a 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -406,6 +406,7 @@ function CategorySection({ discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -484,6 +485,7 @@ function CategorySection({ discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 31284132df..d22d7a7644 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -10,6 +10,7 @@ import daytonaIcon from "@/assets/icons/integrations/daytona.svg"; import discordIcon from "@/assets/icons/integrations/discord.svg"; import githubIcon from "@/assets/icons/integrations/github.svg"; import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import grafanaIcon from "@/assets/icons/integrations/grafana.svg"; import jiraIcon from "@/assets/icons/integrations/jira.svg"; import openAiIcon from "@/assets/icons/integrations/openai.svg"; import claudeIcon from "@/assets/icons/integrations/claude.svg"; @@ -35,6 +36,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -61,6 +63,7 @@ export const APP_LOGO_MAP: Record> = { discord: discordIcon, github: githubIcon, gitlab: gitlabIcon, + grafana: grafanaIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, From 56a52f7a7e00ef6b497936e1ca7018e4ad8c086b Mon Sep 17 00:00:00 2001 From: Fadhili Juma <12300986+fadhilijuma@users.noreply.github.com> Date: Fri, 13 Feb 2026 22:33:50 +0300 Subject: [PATCH 108/160] feat: bootstrap AWS SNS integration components (#2963) This PR bootstraps AWS SNS integration support. Implemented SNS components: - `aws.sns.onTopicMessage` - `aws.sns.getTopic` - `aws.sns.getSubscription` - `aws.sns.createTopic` - `aws.sns.deleteTopic` - `aws.sns.publishMessage` --------- Signed-off-by: Fadhili Juma Signed-off-by: Lucas Pinheiro Co-authored-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- docs/components/AWS.mdx | 208 ++++++++ pkg/grpc/query_parser.go | 22 +- pkg/integrations/aws/aws.go | 9 +- pkg/integrations/aws/aws_test.go | 45 ++ pkg/integrations/aws/common/webhook.go | 19 + pkg/integrations/aws/resources.go | 7 + pkg/integrations/aws/sns/client.go | 402 +++++++++++++++ pkg/integrations/aws/sns/create_topic.go | 139 +++++ pkg/integrations/aws/sns/create_topic_test.go | 87 ++++ pkg/integrations/aws/sns/delete_topic.go | 135 +++++ pkg/integrations/aws/sns/delete_topic_test.go | 66 +++ pkg/integrations/aws/sns/example.go | 74 +++ .../sns/example_data_on_topic_message.json | 27 + .../aws/sns/example_output_create_topic.json | 17 + .../aws/sns/example_output_delete_topic.json | 8 + .../sns/example_output_get_subscription.json | 19 + .../aws/sns/example_output_get_topic.json | 17 + .../sns/example_output_publish_message.json | 8 + pkg/integrations/aws/sns/fields.go | 50 ++ pkg/integrations/aws/sns/get_subscription.go | 166 ++++++ .../aws/sns/get_subscription_test.go | 97 ++++ pkg/integrations/aws/sns/get_topic.go | 135 +++++ pkg/integrations/aws/sns/get_topic_test.go | 126 +++++ pkg/integrations/aws/sns/on_topic_message.go | 473 ++++++++++++++++++ .../aws/sns/on_topic_message_test.go | 320 ++++++++++++ pkg/integrations/aws/sns/publish_message.go | 214 ++++++++ .../aws/sns/publish_message_test.go | 180 +++++++ pkg/integrations/aws/sns/resources.go | 75 +++ pkg/integrations/aws/sns/types.go | 108 ++++ pkg/integrations/aws/sns/validation.go | 49 ++ pkg/integrations/aws/webhook_handler.go | 112 +++++ pkg/public/server.go | 1 - .../src/assets/icons/integrations/aws.sns.svg | 18 + .../src/pages/workflowv2/mappers/aws/index.ts | 17 + .../workflowv2/mappers/aws/sns/common.ts | 61 +++ .../mappers/aws/sns/create_topic.ts | 67 +++ .../mappers/aws/sns/delete_topic.ts | 60 +++ .../mappers/aws/sns/get_subscription.ts | 76 +++ .../workflowv2/mappers/aws/sns/get_topic.ts | 68 +++ .../mappers/aws/sns/on_topic_message.ts | 82 +++ .../mappers/aws/sns/publish_message.ts | 78 +++ .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + 43 files changed, 3940 insertions(+), 8 deletions(-) create mode 100644 pkg/integrations/aws/common/webhook.go create mode 100644 pkg/integrations/aws/sns/client.go create mode 100644 pkg/integrations/aws/sns/create_topic.go create mode 100644 pkg/integrations/aws/sns/create_topic_test.go create mode 100644 pkg/integrations/aws/sns/delete_topic.go create mode 100644 pkg/integrations/aws/sns/delete_topic_test.go create mode 100644 pkg/integrations/aws/sns/example.go create mode 100644 pkg/integrations/aws/sns/example_data_on_topic_message.json create mode 100644 pkg/integrations/aws/sns/example_output_create_topic.json create mode 100644 pkg/integrations/aws/sns/example_output_delete_topic.json create mode 100644 pkg/integrations/aws/sns/example_output_get_subscription.json create mode 100644 pkg/integrations/aws/sns/example_output_get_topic.json create mode 100644 pkg/integrations/aws/sns/example_output_publish_message.json create mode 100644 pkg/integrations/aws/sns/fields.go create mode 100644 pkg/integrations/aws/sns/get_subscription.go create mode 100644 pkg/integrations/aws/sns/get_subscription_test.go create mode 100644 pkg/integrations/aws/sns/get_topic.go create mode 100644 pkg/integrations/aws/sns/get_topic_test.go create mode 100644 pkg/integrations/aws/sns/on_topic_message.go create mode 100644 pkg/integrations/aws/sns/on_topic_message_test.go create mode 100644 pkg/integrations/aws/sns/publish_message.go create mode 100644 pkg/integrations/aws/sns/publish_message_test.go create mode 100644 pkg/integrations/aws/sns/resources.go create mode 100644 pkg/integrations/aws/sns/types.go create mode 100644 pkg/integrations/aws/sns/validation.go create mode 100644 pkg/integrations/aws/webhook_handler.go create mode 100644 web_src/src/assets/icons/integrations/aws.sns.svg create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/common.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/create_topic.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/delete_topic.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/get_subscription.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/get_topic.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/on_topic_message.ts create mode 100644 web_src/src/pages/workflowv2/mappers/aws/sns/publish_message.ts diff --git a/docs/components/AWS.mdx b/docs/components/AWS.mdx index 2204b13964..5f821fe4cd 100644 --- a/docs/components/AWS.mdx +++ b/docs/components/AWS.mdx @@ -11,6 +11,7 @@ Manage resources and execute AWS commands in workflows + import { CardGrid, LinkCard } from "@astrojs/starlight/components"; @@ -29,6 +30,11 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + + + + + ## Instructions @@ -262,6 +268,54 @@ Each image scan event includes: } ``` + + +## SNS • On Topic Message + +The On Topic Message trigger starts a workflow execution when a message is published to an AWS SNS topic. + +### Use Cases + +- **Event-driven automation**: React to messages published by external systems +- **Notification processing**: Handle SNS payloads in workflow steps +- **Routing and enrichment**: Trigger downstream workflows based on topic activity + +### How it works + +During setup, SuperPlane creates a webhook endpoint for this trigger and subscribes it to the selected SNS topic using HTTPS. SNS sends notification payloads to the webhook endpoint, which then emits workflow events. + +### Example Data + +```json +{ + "data": { + "account": "123456789012", + "detail": { + "message": "{\"orderId\":\"ord_123\",\"status\":\"created\"}", + "messageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "subject": "order.created", + "timestamp": "2026-01-10T10:00:00Z", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "message": "{\"orderId\":\"ord_123\",\"status\":\"created\"}", + "messageAttributes": { + "eventType": { + "Type": "String", + "Value": "order.created" + } + }, + "messageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "region": "us-east-1", + "subject": "order.created", + "timestamp": "2026-01-10T10:00:00Z", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "type": "Notification" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic.message" +} +``` + ## CodeArtifact • Copy Package Versions @@ -701,3 +755,157 @@ The Run Lambda component invokes a Lambda function. } ``` + + +## SNS • Create Topic + +The Create Topic component creates an AWS SNS topic and returns its metadata. + +### Use Cases + +- **Provisioning workflows**: Create topics as part of environment setup +- **Automation bootstrap**: Prepare topics before publishing messages +- **Self-service operations**: Provision messaging resources on demand + +### Example Output + +```json +{ + "data": { + "attributes": { + "DisplayName": "Orders Events", + "Owner": "123456789012", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "contentBasedDeduplication": false, + "displayName": "Orders Events", + "fifoTopic": false, + "name": "orders-events", + "owner": "123456789012", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic" +} +``` + + + +## SNS • Delete Topic + +The Delete Topic component deletes an AWS SNS topic. + +### Use Cases + +- **Cleanup workflows**: Remove temporary topics after execution +- **Lifecycle management**: Decommission unused messaging resources +- **Rollback automation**: Remove topics created in failed provisioning runs + +### Example Output + +```json +{ + "data": { + "deleted": true, + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic.deleted" +} +``` + + + +## SNS • Get Subscription + +The Get Subscription component retrieves metadata and attributes for an AWS SNS subscription. + +### Use Cases + +- **Subscription audits**: Inspect endpoint and delivery configuration +- **Workflow enrichment**: Load subscription metadata before downstream actions +- **Validation**: Confirm subscription existence and protocol + +### Example Output + +```json +{ + "data": { + "attributes": { + "Endpoint": "https://example.com/sns/events", + "Protocol": "https", + "RawMessageDelivery": "true", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "endpoint": "https://example.com/sns/events", + "owner": "123456789012", + "pendingConfirmation": false, + "protocol": "https", + "rawMessageDelivery": true, + "subscriptionArn": "arn:aws:sns:us-east-1:123456789012:orders-events:7f8a3d50-f160-4d2d-8f8a-fb95d7f86a51", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.subscription" +} +``` + + + +## SNS • Get Topic + +The Get Topic component retrieves metadata and attributes for an AWS SNS topic. + +### Use Cases + +- **Configuration audits**: Verify topic settings and attributes +- **Workflow enrichment**: Load topic metadata before downstream actions +- **Validation**: Confirm topic existence and ownership + +### Example Output + +```json +{ + "data": { + "attributes": { + "DisplayName": "Orders Events", + "Owner": "123456789012", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "contentBasedDeduplication": false, + "displayName": "Orders Events", + "fifoTopic": false, + "name": "orders-events", + "owner": "123456789012", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic" +} +``` + + + +## SNS • Publish Message + +The Publish Message component sends a message to an AWS SNS topic. + +### Use Cases + +- **Event fan-out**: Broadcast workflow results to multiple subscribers +- **Notifications**: Send operational updates to users and systems +- **Automation**: Trigger downstream subscribers through SNS delivery + +### Example Output + +```json +{ + "data": { + "messageId": "a730a53a-a86d-5fcb-9ad1-ff72b8d0f104", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.message.published" +} +``` + diff --git a/pkg/grpc/query_parser.go b/pkg/grpc/query_parser.go index 3527b009ba..9fcb11d786 100644 --- a/pkg/grpc/query_parser.go +++ b/pkg/grpc/query_parser.go @@ -26,13 +26,23 @@ func (p *QueryParser) Parse(target proto.Message, values url.Values, filter *uti func populateListIntegrationResourcesParams(values url.Values, r *pb.ListIntegrationResourcesRequest) error { parameters := map[string]string{} - encodedParameters := values.Encode() - queryParams := strings.Split(encodedParameters, "&") - for _, queryParam := range queryParams { - parts := strings.Split(queryParam, "=") - if len(parts) == 2 { - parameters[parts[0]] = parts[1] + for key, vals := range values { + if len(vals) == 0 { + continue } + decodedVals := make([]string, 0, len(vals)) + for _, val := range vals { + decoded, err := url.QueryUnescape(val) + if err != nil { + decoded = val + } + decodedVals = append(decodedVals, decoded) + } + if len(decodedVals) == 1 { + parameters[key] = decodedVals[0] + continue + } + parameters[key] = strings.Join(decodedVals, ",") } r.Parameters = parameters diff --git a/pkg/integrations/aws/aws.go b/pkg/integrations/aws/aws.go index 135915f9a5..cec64c6c56 100644 --- a/pkg/integrations/aws/aws.go +++ b/pkg/integrations/aws/aws.go @@ -23,6 +23,7 @@ import ( "github.com/superplanehq/superplane/pkg/integrations/aws/eventbridge" "github.com/superplanehq/superplane/pkg/integrations/aws/iam" "github.com/superplanehq/superplane/pkg/integrations/aws/lambda" + "github.com/superplanehq/superplane/pkg/integrations/aws/sns" "github.com/superplanehq/superplane/pkg/registry" ) @@ -33,7 +34,7 @@ const ( ) func init() { - registry.RegisterIntegration("aws", &AWS{}) + registry.RegisterIntegrationWithWebhookHandler("aws", &AWS{}, &WebhookHandler{}) } type AWS struct{} @@ -138,6 +139,11 @@ func (a *AWS) Components() []core.Component { &codeartifact.DisposePackageVersions{}, &codeartifact.GetPackageVersion{}, &codeartifact.UpdatePackageVersionsStatus{}, + &sns.GetTopic{}, + &sns.GetSubscription{}, + &sns.CreateTopic{}, + &sns.DeleteTopic{}, + &sns.PublishMessage{}, &ecr.GetImage{}, &ecr.GetImageScanFindings{}, &ecr.ScanImage{}, @@ -151,6 +157,7 @@ func (a *AWS) Triggers() []core.Trigger { &codeartifact.OnPackageVersion{}, &ecr.OnImageScan{}, &ecr.OnImagePush{}, + &sns.OnTopicMessage{}, } } diff --git a/pkg/integrations/aws/aws_test.go b/pkg/integrations/aws/aws_test.go index 50970400be..1b3cd58af3 100644 --- a/pkg/integrations/aws/aws_test.go +++ b/pkg/integrations/aws/aws_test.go @@ -340,6 +340,51 @@ func Test__AWS__ListResources(t *testing.T) { require.Len(t, httpContext.Requests, 1) assert.Equal(t, "https://api.ecr.us-east-1.amazonaws.com/", httpContext.Requests[0].URL.String()) }) + + t.Run("sns.topic returns topics", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + + + arn:aws:sns:us-east-1:123456789012:orders-events + + + + + `)), + }, + }, + } + + integrationCtx := &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + } + + resources, err := a.ListResources("sns.topic", core.ListResourcesContext{ + Integration: integrationCtx, + Logger: logrus.NewEntry(logrus.New()), + HTTP: httpContext, + Parameters: map[string]string{"region": "us-east-1"}, + }) + + require.NoError(t, err) + require.Len(t, resources, 1) + assert.Equal(t, "sns.topic", resources[0].Type) + assert.Equal(t, "orders-events", resources[0].Name) + assert.Equal(t, "arn:aws:sns:us-east-1:123456789012:orders-events", resources[0].ID) + + require.Len(t, httpContext.Requests, 1) + assert.Equal(t, "https://sns.us-east-1.amazonaws.com/", httpContext.Requests[0].URL.String()) + }) } func stsResponse(token string, expiration string) string { diff --git a/pkg/integrations/aws/common/webhook.go b/pkg/integrations/aws/common/webhook.go new file mode 100644 index 0000000000..6f71b819d0 --- /dev/null +++ b/pkg/integrations/aws/common/webhook.go @@ -0,0 +1,19 @@ +package common + +const ( + WebhookTypeSNS = "sns" +) + +type WebhookConfiguration struct { + Region string `json:"region"` + Type string `json:"type"` + SNS *SNSWebhookConfiguration `json:"sns"` +} + +type SNSWebhookConfiguration struct { + TopicArn string `json:"topicArn"` +} + +type SNSWebhookMetadata struct { + SubscriptionArn string `json:"subscriptionArn"` +} diff --git a/pkg/integrations/aws/resources.go b/pkg/integrations/aws/resources.go index 7d0590db02..2b29f97732 100644 --- a/pkg/integrations/aws/resources.go +++ b/pkg/integrations/aws/resources.go @@ -5,6 +5,7 @@ import ( "github.com/superplanehq/superplane/pkg/integrations/aws/codeartifact" "github.com/superplanehq/superplane/pkg/integrations/aws/ecr" "github.com/superplanehq/superplane/pkg/integrations/aws/lambda" + "github.com/superplanehq/superplane/pkg/integrations/aws/sns" ) func (a *AWS) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { @@ -21,6 +22,12 @@ func (a *AWS) ListResources(resourceType string, ctx core.ListResourcesContext) case "codeartifact.domain": return codeartifact.ListDomains(ctx, resourceType) + case "sns.topic": + return sns.ListTopics(ctx, resourceType) + + case "sns.subscription": + return sns.ListSubscriptions(ctx, resourceType) + default: return []core.IntegrationResource{}, nil } diff --git a/pkg/integrations/aws/sns/client.go b/pkg/integrations/aws/sns/client.go new file mode 100644 index 0000000000..5f05804bc6 --- /dev/null +++ b/pkg/integrations/aws/sns/client.go @@ -0,0 +1,402 @@ +package sns + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "encoding/xml" + "fmt" + "io" + "net/http" + "net/url" + "sort" + "strconv" + "strings" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + v4 "github.com/aws/aws-sdk-go-v2/aws/signer/v4" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +const ( + snsServiceName = "sns" + snsAPIVersion = "2010-03-31" + snsContentType = "application/x-www-form-urlencoded; charset=utf-8" +) + +// Client provides lightweight SNS API operations through signed HTTP requests. +type Client struct { + http core.HTTPContext + region string + endpoint string + credentials *aws.Credentials + signer *v4.Signer +} + +// NewClient creates a region-scoped SNS client. +func NewClient(httpCtx core.HTTPContext, credentials *aws.Credentials, region string) *Client { + normalizedRegion := strings.TrimSpace(region) + return &Client{ + http: httpCtx, + region: normalizedRegion, + endpoint: fmt.Sprintf("https://sns.%s.amazonaws.com/", normalizedRegion), + credentials: credentials, + signer: v4.NewSigner(), + } +} + +// GetTopic returns a topic with normalized attributes from GetTopicAttributes. +func (c *Client) GetTopic(topicArn string) (*Topic, error) { + params := map[string]string{ + "TopicArn": topicArn, + } + + var response getTopicAttributesResponse + if err := c.postForm("GetTopicAttributes", params, &response); err != nil { + return nil, fmt.Errorf("sns client: failed to get topic attributes for %q: %w", topicArn, err) + } + + attributes := attributeEntriesToMap(response.Entries) + return &Topic{ + TopicArn: topicArn, + Name: topicNameFromArn(topicArn), + DisplayName: strings.TrimSpace(attributes["DisplayName"]), + Owner: strings.TrimSpace(attributes["Owner"]), + KmsMasterKeyID: strings.TrimSpace(attributes["KmsMasterKeyId"]), + FifoTopic: boolAttribute(attributes, "FifoTopic"), + ContentBasedDeduplication: boolAttribute(attributes, "ContentBasedDeduplication"), + Attributes: attributes, + }, nil +} + +func (c *Client) CreateTopic(name string) (*Topic, error) { + params := map[string]string{ + "Name": name, + } + + var response createTopicResponse + if err := c.postForm("CreateTopic", params, &response); err != nil { + return nil, fmt.Errorf("sns client: failed to create topic %q: %w", name, err) + } + + topic, err := c.GetTopic(strings.TrimSpace(response.TopicArn)) + if err != nil { + return nil, fmt.Errorf("sns client: failed to load created topic %q: %w", name, err) + } + + return topic, nil +} + +// DeleteTopic deletes the topic associated with the provided ARN. +func (c *Client) DeleteTopic(topicArn string) error { + if err := c.postForm("DeleteTopic", map[string]string{ + "TopicArn": topicArn, + }, nil); err != nil { + return fmt.Errorf("sns client: failed to delete topic %q: %w", topicArn, err) + } + + return nil +} + +// PublishMessage publishes a message to a topic and returns publish metadata. +func (c *Client) PublishMessage(parameters PublishMessageParameters) (*PublishResult, error) { + params := map[string]string{ + "TopicArn": parameters.TopicArn, + "Message": parameters.Message, + } + + if subject := strings.TrimSpace(parameters.Subject); subject != "" { + params["Subject"] = subject + } + + for index, key := range sortedKeys(parameters.MessageAttributes) { + entry := strconv.Itoa(index + 1) + value := parameters.MessageAttributes[key] + params["MessageAttributes.entry."+entry+".Name"] = key + params["MessageAttributes.entry."+entry+".Value.DataType"] = "String" + params["MessageAttributes.entry."+entry+".Value.StringValue"] = value + } + + var response publishResponse + if err := c.postForm("Publish", params, &response); err != nil { + return nil, fmt.Errorf("sns client: failed to publish message to topic %q: %w", parameters.TopicArn, err) + } + + return &PublishResult{ + MessageID: strings.TrimSpace(response.MessageID), + SequenceNumber: strings.TrimSpace(response.SequenceNumber), + TopicArn: parameters.TopicArn, + }, nil +} + +// GetSubscription returns a subscription with normalized attributes. +func (c *Client) GetSubscription(subscriptionArn string) (*Subscription, error) { + params := map[string]string{ + "SubscriptionArn": subscriptionArn, + } + + var response getSubscriptionAttributesResponse + if err := c.postForm("GetSubscriptionAttributes", params, &response); err != nil { + return nil, fmt.Errorf("sns client: failed to get subscription attributes for %q: %w", subscriptionArn, err) + } + + attributes := attributeEntriesToMap(response.Entries) + return &Subscription{ + SubscriptionArn: subscriptionArn, + TopicArn: strings.TrimSpace(attributes["TopicArn"]), + Protocol: strings.TrimSpace(attributes["Protocol"]), + Endpoint: strings.TrimSpace(attributes["Endpoint"]), + Owner: strings.TrimSpace(attributes["Owner"]), + PendingConfirmation: boolAttribute(attributes, "PendingConfirmation"), + RawMessageDelivery: boolAttribute(attributes, "RawMessageDelivery"), + Attributes: attributes, + }, nil +} + +// Subscribe creates an SNS subscription and returns the resulting metadata. +func (c *Client) Subscribe(parameters SubscribeParameters) (*Subscription, error) { + params := map[string]string{ + "TopicArn": parameters.TopicArn, + "Protocol": parameters.Protocol, + "Endpoint": parameters.Endpoint, + } + + if parameters.ReturnSubscriptionARN { + params["ReturnSubscriptionArn"] = "true" + } + + for index, key := range sortedKeys(parameters.Attributes) { + entry := strconv.Itoa(index + 1) + params["Attributes.entry."+entry+".key"] = key + params["Attributes.entry."+entry+".value"] = parameters.Attributes[key] + } + + var response subscribeResponse + if err := c.postForm("Subscribe", params, &response); err != nil { + return nil, fmt.Errorf("sns client: failed to subscribe endpoint %q to topic %q: %w", parameters.Endpoint, parameters.TopicArn, err) + } + + subscriptionArn := strings.TrimSpace(response.SubscriptionArn) + if strings.EqualFold(subscriptionArn, "pending confirmation") { + return &Subscription{ + SubscriptionArn: subscriptionArn, + TopicArn: parameters.TopicArn, + Protocol: parameters.Protocol, + Endpoint: parameters.Endpoint, + PendingConfirmation: true, + Attributes: map[string]string{}, + }, nil + } + + subscription, err := c.GetSubscription(subscriptionArn) + if err != nil { + return nil, fmt.Errorf("sns client: failed to load subscription %q: %w", subscriptionArn, err) + } + + return subscription, nil +} + +// Unsubscribe removes a subscription identified by the provided ARN. +func (c *Client) Unsubscribe(subscriptionArn string) error { + if err := c.postForm("Unsubscribe", map[string]string{ + "SubscriptionArn": subscriptionArn, + }, nil); err != nil { + return fmt.Errorf("sns client: failed to unsubscribe %q: %w", subscriptionArn, err) + } + + return nil +} + +// ListTopics returns all topics in the configured region. +func (c *Client) ListTopics() ([]Topic, error) { + topics := []Topic{} + nextToken := "" + + for { + params := map[string]string{} + if nextToken != "" { + params["NextToken"] = nextToken + } + + var response listTopicsResponse + if err := c.postForm("ListTopics", params, &response); err != nil { + return nil, fmt.Errorf("failed to list topics in region %q: %w", c.region, err) + } + + for _, item := range response.Topics { + topicArn := strings.TrimSpace(item.TopicArn) + if topicArn == "" { + continue + } + + topics = append(topics, Topic{ + TopicArn: topicArn, + Name: topicNameFromArn(topicArn), + }) + } + + nextToken = strings.TrimSpace(response.NextToken) + if nextToken == "" { + return topics, nil + } + } +} + +func (c *Client) ListSubscriptionsByTopic(topicArn string) ([]Subscription, error) { + subscriptions := []Subscription{} + nextToken := "" + baseParams := map[string]string{ + "TopicArn": topicArn, + } + + for { + params := map[string]string{} + for key, value := range baseParams { + params[key] = value + } + if nextToken != "" { + params["NextToken"] = nextToken + } + + var response listSubscriptionsResponse + if err := c.postForm("ListSubscriptionsByTopic", params, &response); err != nil { + return nil, fmt.Errorf("failed to list subscriptions: %w", err) + } + + for _, item := range response.SubscriptionsTopic { + subscriptions = append(subscriptions, Subscription{ + SubscriptionArn: item.SubscriptionArn, + TopicArn: topicArn, + }) + } + + if response.NextToken == "" { + return subscriptions, nil + } + } +} + +// postForm sends a signed SNS query request and decodes XML responses. +func (c *Client) postForm(action string, params map[string]string, out any) error { + values := url.Values{} + values.Set("Action", action) + values.Set("Version", snsAPIVersion) + for key, value := range params { + values.Set(key, value) + } + + body := values.Encode() + request, err := http.NewRequest(http.MethodPost, c.endpoint, strings.NewReader(body)) + if err != nil { + return fmt.Errorf("sns client: failed to build %s request: %w", action, err) + } + + request.Header.Set("Content-Type", snsContentType) + + if err := c.signRequest(request, []byte(body)); err != nil { + return fmt.Errorf("sns client: failed to sign %s request: %w", action, err) + } + + response, err := c.http.Do(request) + if err != nil { + return fmt.Errorf("sns client: %s request failed: %w", action, err) + } + defer response.Body.Close() + + responseBody, err := io.ReadAll(response.Body) + if err != nil { + return fmt.Errorf("sns client: failed to read %s response body: %w", action, err) + } + + if response.StatusCode < http.StatusOK || response.StatusCode >= http.StatusMultipleChoices { + if awsErr := parseSNSError(responseBody); awsErr != nil { + return fmt.Errorf("sns client: %s request failed: %w", action, awsErr) + } + return fmt.Errorf("sns client: %s request failed with status %d: %s", action, response.StatusCode, string(responseBody)) + } + + if out == nil { + return nil + } + + if err := xml.Unmarshal(responseBody, out); err != nil { + return fmt.Errorf("sns client: failed to decode %s response: %w", action, err) + } + + return nil +} + +// signRequest signs a request using SigV4 for SNS. +func (c *Client) signRequest(request *http.Request, payload []byte) error { + hash := sha256.Sum256(payload) + payloadHash := hex.EncodeToString(hash[:]) + return c.signer.SignHTTP(context.Background(), *c.credentials, request, payloadHash, snsServiceName, c.region, time.Now()) +} + +// attributeEntriesToMap converts XML attribute entries into a normalized map. +func attributeEntriesToMap(entries []attributeEntry) map[string]string { + attributes := make(map[string]string, len(entries)) + for _, entry := range entries { + key := strings.TrimSpace(entry.Key) + if key == "" { + continue + } + attributes[key] = strings.TrimSpace(entry.Value) + } + return attributes +} + +// sortedKeys returns sorted keys for deterministic query-parameter generation. +func sortedKeys(values map[string]string) []string { + if len(values) == 0 { + return nil + } + + var keys []string + for key := range values { + keys = append(keys, key) + } + sort.Strings(keys) + return keys +} + +// parseSNSError extracts AWS error information from SNS XML responses. +func parseSNSError(body []byte) *common.Error { + var payload snsErrorPayload + if err := xml.Unmarshal(body, &payload); err != nil { + return nil + } + + code := strings.TrimSpace(payload.Error.Code) + message := strings.TrimSpace(payload.Error.Message) + if code == "" && message == "" { + return nil + } + + return &common.Error{Code: code, Message: message} +} + +func topicNameFromArn(topicArn string) string { + parts := strings.Split(strings.TrimSpace(topicArn), ":") + if len(parts) == 0 { + return strings.TrimSpace(topicArn) + } + + name := strings.TrimSpace(parts[len(parts)-1]) + if name == "" { + return strings.TrimSpace(topicArn) + } + + return name +} + +func boolAttribute(attributes map[string]string, key string) bool { + value, ok := attributes[key] + if !ok { + return false + } + + return strings.EqualFold(strings.TrimSpace(value), "true") +} diff --git a/pkg/integrations/aws/sns/create_topic.go b/pkg/integrations/aws/sns/create_topic.go new file mode 100644 index 0000000000..9f3dfdd924 --- /dev/null +++ b/pkg/integrations/aws/sns/create_topic.go @@ -0,0 +1,139 @@ +package sns + +import ( + "fmt" + "net/http" + "strings" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +type CreateTopic struct{} + +type CreateTopicConfiguration struct { + Region string `json:"region" mapstructure:"region"` + Name string `json:"name" mapstructure:"name"` +} + +func (c *CreateTopic) Name() string { + return "aws.sns.createTopic" +} + +func (c *CreateTopic) Label() string { + return "SNS • Create Topic" +} + +func (c *CreateTopic) Description() string { + return "Create an AWS SNS topic" +} + +func (c *CreateTopic) Documentation() string { + return `The Create Topic component creates an AWS SNS topic and returns its metadata. + +## Use Cases + +- **Provisioning workflows**: Create topics as part of environment setup +- **Automation bootstrap**: Prepare topics before publishing messages +- **Self-service operations**: Provision messaging resources on demand` +} + +func (c *CreateTopic) Icon() string { + return "aws" +} + +func (c *CreateTopic) Color() string { + return "gray" +} + +func (c *CreateTopic) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateTopic) Configuration() []configuration.Field { + return []configuration.Field{ + regionField(), + { + Name: "name", + Label: "Topic Name", + Type: configuration.FieldTypeString, + Required: true, + Description: "Name of the SNS topic to create", + VisibilityConditions: []configuration.VisibilityCondition{ + { + Field: "region", + Values: []string{"*"}, + }, + }, + }, + } +} + +func (c *CreateTopic) Setup(ctx core.SetupContext) error { + var config CreateTopicConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode setup configuration: %w", err) + } + + if _, err := requireRegion(config.Region); err != nil { + return fmt.Errorf("invalid region: %w", err) + } + + name := strings.TrimSpace(config.Name) + if name == "" { + return fmt.Errorf("topic name is required") + } + + return nil +} + +func (c *CreateTopic) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateTopic) Execute(ctx core.ExecutionContext) error { + var config CreateTopicConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode execution configuration: %w", err) + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("failed to load AWS credentials from integration: %w", err) + } + + client := NewClient(ctx.HTTP, credentials, config.Region) + topic, err := client.CreateTopic(config.Name) + if err != nil { + return fmt.Errorf("failed to create topic %q: %w", config.Name, err) + } + + if err := ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "aws.sns.topic", []any{topic}); err != nil { + return fmt.Errorf("failed to emit created topic payload: %w", err) + } + + return nil +} + +func (c *CreateTopic) Actions() []core.Action { + return []core.Action{} +} + +func (c *CreateTopic) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *CreateTopic) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *CreateTopic) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateTopic) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/aws/sns/create_topic_test.go b/pkg/integrations/aws/sns/create_topic_test.go new file mode 100644 index 0000000000..f9921595ba --- /dev/null +++ b/pkg/integrations/aws/sns/create_topic_test.go @@ -0,0 +1,87 @@ +package sns + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__CreateTopic__Setup(t *testing.T) { + component := &CreateTopic{} + + t.Run("missing name -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + }, + }) + require.ErrorContains(t, err, "topic name is required") + }) +} + +func Test__CreateTopic__Execute(t *testing.T) { + component := &CreateTopic{} + + t.Run("valid request -> emits created topic payload", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + arn:aws:sns:us-east-1:123456789012:orders-events + + + `)), + }, + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + + DisplayNameOrders Events + + + + `)), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "name": "orders-events", + "attributes": map[string]any{ + "DisplayName": "Orders Events", + }, + }, + HTTP: httpContext, + ExecutionState: executionState, + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + }, + }) + + require.NoError(t, err) + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any)["data"] + topic, ok := payload.(*Topic) + require.True(t, ok) + assert.Equal(t, "orders-events", topic.Name) + assert.Equal(t, "Orders Events", topic.DisplayName) + }) +} diff --git a/pkg/integrations/aws/sns/delete_topic.go b/pkg/integrations/aws/sns/delete_topic.go new file mode 100644 index 0000000000..a53eab8fb5 --- /dev/null +++ b/pkg/integrations/aws/sns/delete_topic.go @@ -0,0 +1,135 @@ +package sns + +import ( + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +type DeleteTopic struct{} + +type DeleteTopicConfiguration struct { + Region string `json:"region" mapstructure:"region"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` +} + +func (c *DeleteTopic) Name() string { + return "aws.sns.deleteTopic" +} + +func (c *DeleteTopic) Label() string { + return "SNS • Delete Topic" +} + +func (c *DeleteTopic) Description() string { + return "Delete an AWS SNS topic" +} + +func (c *DeleteTopic) Documentation() string { + return `The Delete Topic component deletes an AWS SNS topic. + +## Use Cases + +- **Cleanup workflows**: Remove temporary topics after execution +- **Lifecycle management**: Decommission unused messaging resources +- **Rollback automation**: Remove topics created in failed provisioning runs` +} + +func (c *DeleteTopic) Icon() string { + return "aws" +} + +func (c *DeleteTopic) Color() string { + return "gray" +} + +func (c *DeleteTopic) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *DeleteTopic) Configuration() []configuration.Field { + return []configuration.Field{ + regionField(), + topicField(), + } +} + +func (c *DeleteTopic) Setup(ctx core.SetupContext) error { + var config DeleteTopicConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("%s: failed to decode setup configuration: %w", c.Name(), err) + } + + if _, err := requireRegion(config.Region); err != nil { + return fmt.Errorf("invalid region: %w", err) + } + + if _, err := requireTopicArn(config.TopicArn); err != nil { + return fmt.Errorf("invalid topic ARN: %w", err) + } + + return nil +} + +func (c *DeleteTopic) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *DeleteTopic) Execute(ctx core.ExecutionContext) error { + var config DeleteTopicConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("%s: failed to decode execution configuration: %w", c.Name(), err) + } + + region, err := requireRegion(config.Region) + if err != nil { + return fmt.Errorf("invalid region: %w", err) + } + + topicArn, err := requireTopicArn(config.TopicArn) + if err != nil { + return fmt.Errorf("invalid topic ARN: %w", err) + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("failed to load AWS credentials from integration: %w", err) + } + + client := NewClient(ctx.HTTP, credentials, region) + if err := client.DeleteTopic(topicArn); err != nil { + return fmt.Errorf("failed to delete topic %q: %w", topicArn, err) + } + + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "aws.sns.topic.deleted", []any{ + map[string]any{ + "topicArn": topicArn, + "deleted": true, + }, + }) +} + +func (c *DeleteTopic) Actions() []core.Action { + return []core.Action{} +} + +func (c *DeleteTopic) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *DeleteTopic) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *DeleteTopic) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *DeleteTopic) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/aws/sns/delete_topic_test.go b/pkg/integrations/aws/sns/delete_topic_test.go new file mode 100644 index 0000000000..6305bd369a --- /dev/null +++ b/pkg/integrations/aws/sns/delete_topic_test.go @@ -0,0 +1,66 @@ +package sns + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__DeleteTopic__Setup(t *testing.T) { + component := &DeleteTopic{} + + t.Run("missing topic arn -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + }, + }) + require.ErrorContains(t, err, "topic ARN is required") + }) +} + +func Test__DeleteTopic__Execute(t *testing.T) { + component := &DeleteTopic{} + + t.Run("valid request -> emits deleted payload", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + `)), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + HTTP: httpContext, + ExecutionState: executionState, + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + }, + }) + + require.NoError(t, err) + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any)["data"].(map[string]any) + assert.Equal(t, "arn:aws:sns:us-east-1:123456789012:orders-events", payload["topicArn"]) + assert.Equal(t, true, payload["deleted"]) + }) +} diff --git a/pkg/integrations/aws/sns/example.go b/pkg/integrations/aws/sns/example.go new file mode 100644 index 0000000000..6f89459781 --- /dev/null +++ b/pkg/integrations/aws/sns/example.go @@ -0,0 +1,74 @@ +package sns + +import ( + _ "embed" + "sync" + + "github.com/superplanehq/superplane/pkg/utils" +) + +//go:embed example_data_on_topic_message.json +var exampleDataOnTopicMessageBytes []byte + +//go:embed example_output_get_topic.json +var exampleOutputGetTopicBytes []byte + +//go:embed example_output_get_subscription.json +var exampleOutputGetSubscriptionBytes []byte + +//go:embed example_output_create_topic.json +var exampleOutputCreateTopicBytes []byte + +//go:embed example_output_delete_topic.json +var exampleOutputDeleteTopicBytes []byte + +//go:embed example_output_publish_message.json +var exampleOutputPublishMessageBytes []byte + +var exampleDataOnTopicMessageOnce sync.Once +var exampleDataOnTopicMessage map[string]any + +var exampleOutputGetTopicOnce sync.Once +var exampleOutputGetTopic map[string]any + +var exampleOutputGetSubscriptionOnce sync.Once +var exampleOutputGetSubscription map[string]any + +var exampleOutputCreateTopicOnce sync.Once +var exampleOutputCreateTopic map[string]any + +var exampleOutputDeleteTopicOnce sync.Once +var exampleOutputDeleteTopic map[string]any + +var exampleOutputPublishMessageOnce sync.Once +var exampleOutputPublishMessage map[string]any + +// ExampleData returns an example payload for OnTopicMessage events. +func (t *OnTopicMessage) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnTopicMessageOnce, exampleDataOnTopicMessageBytes, &exampleDataOnTopicMessage) +} + +// ExampleOutput returns an example payload for GetTopic. +func (c *GetTopic) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputGetTopicOnce, exampleOutputGetTopicBytes, &exampleOutputGetTopic) +} + +// ExampleOutput returns an example payload for GetSubscription. +func (c *GetSubscription) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputGetSubscriptionOnce, exampleOutputGetSubscriptionBytes, &exampleOutputGetSubscription) +} + +// ExampleOutput returns an example payload for CreateTopic. +func (c *CreateTopic) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateTopicOnce, exampleOutputCreateTopicBytes, &exampleOutputCreateTopic) +} + +// ExampleOutput returns an example payload for DeleteTopic. +func (c *DeleteTopic) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputDeleteTopicOnce, exampleOutputDeleteTopicBytes, &exampleOutputDeleteTopic) +} + +// ExampleOutput returns an example payload for PublishMessage. +func (c *PublishMessage) ExampleOutput() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleOutputPublishMessageOnce, exampleOutputPublishMessageBytes, &exampleOutputPublishMessage) +} diff --git a/pkg/integrations/aws/sns/example_data_on_topic_message.json b/pkg/integrations/aws/sns/example_data_on_topic_message.json new file mode 100644 index 0000000000..5612e78e20 --- /dev/null +++ b/pkg/integrations/aws/sns/example_data_on_topic_message.json @@ -0,0 +1,27 @@ +{ + "data": { + "type": "Notification", + "messageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "subject": "order.created", + "message": "{\"orderId\":\"ord_123\",\"status\":\"created\"}", + "timestamp": "2026-01-10T10:00:00Z", + "region": "us-east-1", + "account": "123456789012", + "messageAttributes": { + "eventType": { + "Type": "String", + "Value": "order.created" + } + }, + "detail": { + "messageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "subject": "order.created", + "message": "{\"orderId\":\"ord_123\",\"status\":\"created\"}", + "timestamp": "2026-01-10T10:00:00Z" + } + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic.message" +} diff --git a/pkg/integrations/aws/sns/example_output_create_topic.json b/pkg/integrations/aws/sns/example_output_create_topic.json new file mode 100644 index 0000000000..f33324b819 --- /dev/null +++ b/pkg/integrations/aws/sns/example_output_create_topic.json @@ -0,0 +1,17 @@ +{ + "data": { + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "name": "orders-events", + "displayName": "Orders Events", + "owner": "123456789012", + "fifoTopic": false, + "contentBasedDeduplication": false, + "attributes": { + "DisplayName": "Orders Events", + "Owner": "123456789012", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + } + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic" +} diff --git a/pkg/integrations/aws/sns/example_output_delete_topic.json b/pkg/integrations/aws/sns/example_output_delete_topic.json new file mode 100644 index 0000000000..0aae69870b --- /dev/null +++ b/pkg/integrations/aws/sns/example_output_delete_topic.json @@ -0,0 +1,8 @@ +{ + "data": { + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "deleted": true + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic.deleted" +} diff --git a/pkg/integrations/aws/sns/example_output_get_subscription.json b/pkg/integrations/aws/sns/example_output_get_subscription.json new file mode 100644 index 0000000000..1cc20eb667 --- /dev/null +++ b/pkg/integrations/aws/sns/example_output_get_subscription.json @@ -0,0 +1,19 @@ +{ + "data": { + "subscriptionArn": "arn:aws:sns:us-east-1:123456789012:orders-events:7f8a3d50-f160-4d2d-8f8a-fb95d7f86a51", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "protocol": "https", + "endpoint": "https://example.com/sns/events", + "owner": "123456789012", + "pendingConfirmation": false, + "rawMessageDelivery": true, + "attributes": { + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "Protocol": "https", + "Endpoint": "https://example.com/sns/events", + "RawMessageDelivery": "true" + } + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.subscription" +} diff --git a/pkg/integrations/aws/sns/example_output_get_topic.json b/pkg/integrations/aws/sns/example_output_get_topic.json new file mode 100644 index 0000000000..f33324b819 --- /dev/null +++ b/pkg/integrations/aws/sns/example_output_get_topic.json @@ -0,0 +1,17 @@ +{ + "data": { + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "name": "orders-events", + "displayName": "Orders Events", + "owner": "123456789012", + "fifoTopic": false, + "contentBasedDeduplication": false, + "attributes": { + "DisplayName": "Orders Events", + "Owner": "123456789012", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + } + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.topic" +} diff --git a/pkg/integrations/aws/sns/example_output_publish_message.json b/pkg/integrations/aws/sns/example_output_publish_message.json new file mode 100644 index 0000000000..76437454be --- /dev/null +++ b/pkg/integrations/aws/sns/example_output_publish_message.json @@ -0,0 +1,8 @@ +{ + "data": { + "messageId": "a730a53a-a86d-5fcb-9ad1-ff72b8d0f104", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }, + "timestamp": "2026-01-10T10:00:02.000000000Z", + "type": "aws.sns.message.published" +} diff --git a/pkg/integrations/aws/sns/fields.go b/pkg/integrations/aws/sns/fields.go new file mode 100644 index 0000000000..60c91e6975 --- /dev/null +++ b/pkg/integrations/aws/sns/fields.go @@ -0,0 +1,50 @@ +package sns + +import ( + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +func regionField() configuration.Field { + return configuration.Field{ + Name: "region", + Label: "Region", + Type: configuration.FieldTypeSelect, + Required: true, + Default: "us-east-1", + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: common.AllRegions, + }, + }, + } +} + +func topicField() configuration.Field { + return configuration.Field{ + Name: "topicArn", + Label: "Topic", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + Description: "ARN of the SNS topic", + VisibilityConditions: []configuration.VisibilityCondition{ + { + Field: "region", + Values: []string{"*"}, + }, + }, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "sns.topic", + Parameters: []configuration.ParameterRef{ + { + Name: "region", + ValueFrom: &configuration.ParameterValueFrom{ + Field: "region", + }, + }, + }, + }, + }, + } +} diff --git a/pkg/integrations/aws/sns/get_subscription.go b/pkg/integrations/aws/sns/get_subscription.go new file mode 100644 index 0000000000..7ee1017b37 --- /dev/null +++ b/pkg/integrations/aws/sns/get_subscription.go @@ -0,0 +1,166 @@ +package sns + +import ( + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +type GetSubscription struct{} + +type GetSubscriptionConfiguration struct { + Region string `json:"region" mapstructure:"region"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` + SubscriptionArn string `json:"subscriptionArn" mapstructure:"subscriptionArn"` +} + +func (c *GetSubscription) Name() string { + return "aws.sns.getSubscription" +} + +func (c *GetSubscription) Label() string { + return "SNS • Get Subscription" +} + +func (c *GetSubscription) Description() string { + return "Get an AWS SNS subscription by ARN" +} + +func (c *GetSubscription) Documentation() string { + return `The Get Subscription component retrieves metadata and attributes for an AWS SNS subscription. + +## Use Cases + +- **Subscription audits**: Inspect endpoint and delivery configuration +- **Workflow enrichment**: Load subscription metadata before downstream actions +- **Validation**: Confirm subscription existence and protocol` +} + +func (c *GetSubscription) Icon() string { + return "aws" +} + +func (c *GetSubscription) Color() string { + return "gray" +} + +func (c *GetSubscription) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetSubscription) Configuration() []configuration.Field { + return []configuration.Field{ + regionField(), + topicField(), + { + Name: "subscriptionArn", + Label: "Subscription", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + Description: "ARN of the SNS subscription", + VisibilityConditions: []configuration.VisibilityCondition{ + { + Field: "region", + Values: []string{"*"}, + }, + { + Field: "topicArn", + Values: []string{"*"}, + }, + }, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "sns.subscription", + Parameters: []configuration.ParameterRef{ + { + Name: "region", + ValueFrom: &configuration.ParameterValueFrom{ + Field: "region", + }, + }, + { + Name: "topicArn", + ValueFrom: &configuration.ParameterValueFrom{ + Field: "topicArn", + }, + }, + }, + }, + }, + }, + } +} + +func (c *GetSubscription) Setup(ctx core.SetupContext) error { + var config GetSubscriptionConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode setup configuration: %w", err) + } + + if config.Region == "" { + return fmt.Errorf("region is required") + } + + if config.TopicArn == "" { + return fmt.Errorf("topic ARN is required") + } + + if config.SubscriptionArn == "" { + return fmt.Errorf("subscription ARN is required") + } + + return nil +} + +func (c *GetSubscription) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetSubscription) Execute(ctx core.ExecutionContext) error { + var config GetSubscriptionConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("%s: failed to decode execution configuration: %w", c.Name(), err) + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("%s: failed to load AWS credentials from integration: %w", c.Name(), err) + } + + client := NewClient(ctx.HTTP, credentials, config.Region) + subscription, err := client.GetSubscription(config.SubscriptionArn) + if err != nil { + return fmt.Errorf("failed to get subscription %q: %w", config.SubscriptionArn, err) + } + + if err := ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "aws.sns.subscription", []any{subscription}); err != nil { + return fmt.Errorf("%s: failed to emit subscription payload: %w", c.Name(), err) + } + + return nil +} + +func (c *GetSubscription) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetSubscription) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetSubscription) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetSubscription) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetSubscription) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/aws/sns/get_subscription_test.go b/pkg/integrations/aws/sns/get_subscription_test.go new file mode 100644 index 0000000000..d38573a8e6 --- /dev/null +++ b/pkg/integrations/aws/sns/get_subscription_test.go @@ -0,0 +1,97 @@ +package sns + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetSubscription__Setup(t *testing.T) { + component := &GetSubscription{} + + t.Run("missing region -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "subscriptionArn": "arn:aws:sns:us-east-1:123456789012:orders-events:sub-123", + }, + }) + require.ErrorContains(t, err, "region is required") + }) + + t.Run("missing topic arn -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + }, + }) + require.ErrorContains(t, err, "topic ARN is required") + }) + + t.Run("missing subscription arn -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + }) + require.ErrorContains(t, err, "subscription ARN is required") + }) +} + +func Test__GetSubscription__Execute(t *testing.T) { + component := &GetSubscription{} + + t.Run("valid request -> emits subscription payload", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + + TopicArnarn:aws:sns:us-east-1:123456789012:orders-events + Protocolhttps + Endpointhttps://example.com/hook + RawMessageDeliverytrue + + + + `)), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "subscriptionArn": "arn:aws:sns:us-east-1:123456789012:orders-events:sub", + }, + HTTP: httpContext, + ExecutionState: executionState, + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + }, + }) + + require.NoError(t, err) + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any)["data"] + subscription, ok := payload.(*Subscription) + require.True(t, ok) + assert.Equal(t, "https", subscription.Protocol) + assert.Equal(t, "https://example.com/hook", subscription.Endpoint) + assert.True(t, subscription.RawMessageDelivery) + }) +} diff --git a/pkg/integrations/aws/sns/get_topic.go b/pkg/integrations/aws/sns/get_topic.go new file mode 100644 index 0000000000..30dec0afd8 --- /dev/null +++ b/pkg/integrations/aws/sns/get_topic.go @@ -0,0 +1,135 @@ +package sns + +import ( + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +type GetTopic struct{} + +type GetTopicConfiguration struct { + Region string `json:"region" mapstructure:"region"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` +} + +func (c *GetTopic) Name() string { + return "aws.sns.getTopic" +} + +func (c *GetTopic) Label() string { + return "SNS • Get Topic" +} + +func (c *GetTopic) Description() string { + return "Get an AWS SNS topic by ARN" +} + +func (c *GetTopic) Documentation() string { + return `The Get Topic component retrieves metadata and attributes for an AWS SNS topic. + +## Use Cases + +- **Configuration audits**: Verify topic settings and attributes +- **Workflow enrichment**: Load topic metadata before downstream actions +- **Validation**: Confirm topic existence and ownership` +} + +func (c *GetTopic) Icon() string { + return "aws" +} + +func (c *GetTopic) Color() string { + return "gray" +} + +func (c *GetTopic) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetTopic) Configuration() []configuration.Field { + return []configuration.Field{ + regionField(), + topicField(), + } +} + +func (c *GetTopic) Setup(ctx core.SetupContext) error { + var config GetTopicConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("%s: failed to decode setup configuration: %w", c.Name(), err) + } + + if _, err := requireRegion(config.Region); err != nil { + return fmt.Errorf("%s: invalid region: %w", c.Name(), err) + } + + if _, err := requireTopicArn(config.TopicArn); err != nil { + return fmt.Errorf("%s: invalid topic ARN: %w", c.Name(), err) + } + + return nil +} + +func (c *GetTopic) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetTopic) Execute(ctx core.ExecutionContext) error { + var config GetTopicConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("%s: failed to decode execution configuration: %w", c.Name(), err) + } + + region, err := requireRegion(config.Region) + if err != nil { + return fmt.Errorf("%s: invalid region: %w", c.Name(), err) + } + + topicArn, err := requireTopicArn(config.TopicArn) + if err != nil { + return fmt.Errorf("%s: invalid topic ARN: %w", c.Name(), err) + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("%s: failed to load AWS credentials from integration: %w", c.Name(), err) + } + + client := NewClient(ctx.HTTP, credentials, region) + topic, err := client.GetTopic(topicArn) + if err != nil { + return fmt.Errorf("%s: failed to get topic %q: %w", c.Name(), topicArn, err) + } + + if err := ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "aws.sns.topic", []any{topic}); err != nil { + return fmt.Errorf("%s: failed to emit topic payload: %w", c.Name(), err) + } + + return nil +} + +func (c *GetTopic) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetTopic) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetTopic) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetTopic) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetTopic) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/aws/sns/get_topic_test.go b/pkg/integrations/aws/sns/get_topic_test.go new file mode 100644 index 0000000000..42d4369b6f --- /dev/null +++ b/pkg/integrations/aws/sns/get_topic_test.go @@ -0,0 +1,126 @@ +package sns + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetTopic__Setup(t *testing.T) { + component := &GetTopic{} + + t.Run("missing region -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": " ", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + }) + require.ErrorContains(t, err, "region is required") + }) + + t.Run("invalid region -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "invalid-region", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + }) + require.ErrorContains(t, err, "invalid AWS region") + }) + + t.Run("missing topic arn -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + }, + }) + require.ErrorContains(t, err, "topic ARN is required") + }) + + t.Run("invalid topic arn format -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "invalid-arn", + }, + }) + require.ErrorContains(t, err, "invalid topic ARN format") + }) + + t.Run("valid configuration -> success", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + }) + require.NoError(t, err) + }) + + t.Run("valid china partition topic arn -> success", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "cn-north-1", + "topicArn": "arn:aws-cn:sns:cn-north-1:123456789012:orders-events", + }, + }) + require.NoError(t, err) + }) +} + +func Test__GetTopic__Execute(t *testing.T) { + component := &GetTopic{} + + t.Run("valid request -> emits topic payload", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + + DisplayNameOrders Events + Owner123456789012 + + + + `)), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + HTTP: httpContext, + ExecutionState: executionState, + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + }, + }) + + require.NoError(t, err) + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any)["data"] + topic, ok := payload.(*Topic) + require.True(t, ok) + assert.Equal(t, "orders-events", topic.Name) + assert.Equal(t, "Orders Events", topic.DisplayName) + assert.Equal(t, "https://sns.us-east-1.amazonaws.com/", httpContext.Requests[0].URL.String()) + }) +} diff --git a/pkg/integrations/aws/sns/on_topic_message.go b/pkg/integrations/aws/sns/on_topic_message.go new file mode 100644 index 0000000000..39799b6cad --- /dev/null +++ b/pkg/integrations/aws/sns/on_topic_message.go @@ -0,0 +1,473 @@ +package sns + +import ( + "crypto" + "crypto/rsa" + "crypto/sha1" + "crypto/sha256" + "crypto/x509" + "encoding/base64" + "encoding/json" + "encoding/pem" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +type OnTopicMessageConfiguration struct { + Region string `json:"region" mapstructure:"region"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` +} + +type OnTopicMessageMetadata struct { + Region string `json:"region" mapstructure:"region"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` +} + +type OnTopicMessage struct{} + +func (t *OnTopicMessage) Name() string { + return "aws.sns.onTopicMessage" +} + +func (t *OnTopicMessage) Label() string { + return "SNS • On Topic Message" +} + +func (t *OnTopicMessage) Description() string { + return "Listen to AWS SNS topic notifications" +} + +func (t *OnTopicMessage) Documentation() string { + return `The On Topic Message trigger starts a workflow execution when a message is published to an AWS SNS topic. + +## Use Cases + +- **Event-driven automation**: React to messages published by external systems +- **Notification processing**: Handle SNS payloads in workflow steps +- **Routing and enrichment**: Trigger downstream workflows based on topic activity + +## How it works + +During setup, SuperPlane creates a webhook endpoint for this trigger and subscribes it to the selected SNS topic using HTTPS. SNS sends notification payloads to the webhook endpoint, which then emits workflow events.` +} + +func (t *OnTopicMessage) Icon() string { + return "aws" +} + +func (t *OnTopicMessage) Color() string { + return "gray" +} + +func (t *OnTopicMessage) Configuration() []configuration.Field { + return []configuration.Field{ + regionField(), + topicField(), + } +} + +func (t *OnTopicMessage) Setup(ctx core.TriggerContext) error { + var config OnTopicMessageConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode trigger configuration: %w", err) + } + + var metadata OnTopicMessageMetadata + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode trigger metadata: %w", err) + } + + region, err := requireRegion(config.Region) + if err != nil { + return fmt.Errorf("invalid region: %w", err) + } + + topicArn, err := requireTopicArn(config.TopicArn) + if err != nil { + return fmt.Errorf("invalid topic ARN: %w", err) + } + + if metadata.Region == region && metadata.TopicArn == topicArn { + return nil + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("failed to load AWS credentials from integration: %w", err) + } + + client := NewClient(ctx.HTTP, credentials, region) + topic, err := client.GetTopic(topicArn) + if err != nil { + return fmt.Errorf("failed to get topic %q in region %q: %w", topicArn, region, err) + } + + err = ctx.Metadata.Set(OnTopicMessageMetadata{ + Region: region, + TopicArn: topicArn, + }) + + if err != nil { + return fmt.Errorf("failed to persist trigger metadata: %w", err) + } + + return ctx.Integration.RequestWebhook(common.WebhookConfiguration{ + Region: region, + Type: common.WebhookTypeSNS, + SNS: &common.SNSWebhookConfiguration{ + TopicArn: topic.TopicArn, + }, + }) +} + +func (t *OnTopicMessage) Actions() []core.Action { + return []core.Action{} +} + +func (t *OnTopicMessage) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +type SubscriptionMessage struct { + Type string `json:"Type"` + MessageID string `json:"MessageId"` + TopicArn string `json:"TopicArn"` + Subject string `json:"Subject"` + Message string `json:"Message"` + Timestamp string `json:"Timestamp"` + SignatureVersion string `json:"SignatureVersion"` + Signature string `json:"Signature"` + SigningCertURL string `json:"SigningCertURL"` + UnsubscribeURL string `json:"UnsubscribeURL"` + SubscribeURL string `json:"SubscribeURL"` + Token string `json:"Token"` + MessageAttributes map[string]MessageAttribute `json:"MessageAttributes"` +} + +type MessageAttribute struct { + Type string `json:"Type"` + Value string `json:"Value"` +} + +func (t *OnTopicMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnTopicMessageConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode trigger configuration: %w", err) + } + + var message SubscriptionMessage + if err := json.Unmarshal(ctx.Body, &message); err != nil { + return http.StatusBadRequest, fmt.Errorf("failed to decode SNS webhook payload: %w", err) + } + + if err := t.verifyMessageSignature(ctx, message); err != nil { + ctx.Logger.Errorf("failed to verify SNS signature: %v", err) + return http.StatusBadRequest, fmt.Errorf("invalid SNS message signature: %w", err) + } + + switch message.Type { + case "SubscriptionConfirmation": + return t.confirmSubscription(ctx, config, message) + + case "Notification": + return t.emitTopicNotification(ctx, message, config) + + case "UnsubscribeConfirmation": + return http.StatusOK, nil + + default: + return http.StatusBadRequest, fmt.Errorf("unsupported SNS message type %q", message.Type) + } +} + +func (t *OnTopicMessage) confirmSubscription(ctx core.WebhookRequestContext, config OnTopicMessageConfiguration, message SubscriptionMessage) (int, error) { + if strings.TrimSpace(message.TopicArn) != config.TopicArn { + ctx.Logger.Infof("message topic ARN %s does not match configured topic ARN %s, ignoring", message.TopicArn, config.TopicArn) + return http.StatusOK, nil + } + + if message.SubscribeURL == "" { + ctx.Logger.Errorf("missing SubscribeURL") + return http.StatusBadRequest, fmt.Errorf("missing SubscribeURL") + } + + subscribeURL, err := url.Parse(message.SubscribeURL) + if err != nil { + ctx.Logger.Errorf("invalid SubscribeURL: %v", err) + return http.StatusBadRequest, fmt.Errorf("invalid SubscribeURL: %w", err) + } + + if subscribeURL.Scheme != "https" { + ctx.Logger.Errorf("SubscribeURL must use https") + return http.StatusBadRequest, fmt.Errorf("SubscribeURL must use https") + } + + host := strings.ToLower(subscribeURL.Hostname()) + if host == "" { + ctx.Logger.Errorf("SubscribeURL host is required") + return http.StatusBadRequest, fmt.Errorf("SubscribeURL host is required") + } + + if !strings.HasSuffix(host, ".amazonaws.com") && !strings.HasSuffix(host, ".amazonaws.com.cn") { + ctx.Logger.Errorf("SubscribeURL host must be an AWS SNS domain") + return http.StatusBadRequest, fmt.Errorf("SubscribeURL host must be an AWS SNS domain") + } + + req, err := http.NewRequest(http.MethodGet, subscribeURL.String(), nil) + if err != nil { + ctx.Logger.Errorf("failed to create request to confirm subscription: %v", err) + return http.StatusInternalServerError, fmt.Errorf("failed to create request: %w", err) + } + + response, err := ctx.HTTP.Do(req) + if err != nil { + ctx.Logger.Errorf("failed to confirm SNS subscription: %v", err) + return http.StatusInternalServerError, fmt.Errorf("failed to confirm SNS subscription: %w", err) + } + + defer response.Body.Close() + + if response.StatusCode < http.StatusOK || response.StatusCode >= http.StatusMultipleChoices { + responseBody, readErr := io.ReadAll(response.Body) + if readErr != nil { + ctx.Logger.Errorf("failed to read response body: %v", readErr) + return http.StatusInternalServerError, fmt.Errorf( + "SNS subscription confirmation failed with status %d and unreadable body: %v", + response.StatusCode, + readErr, + ) + } + + ctx.Logger.Errorf("SNS subscription confirmation failed with status %d: %s", response.StatusCode, strings.TrimSpace(string(responseBody))) + return http.StatusInternalServerError, fmt.Errorf( + "SNS subscription confirmation failed with status %d: %s", + response.StatusCode, + strings.TrimSpace(string(responseBody)), + ) + } + + ctx.Logger.Info("Subscription confirmation was successful") + return http.StatusOK, nil +} + +func (t *OnTopicMessage) emitTopicNotification(ctx core.WebhookRequestContext, message SubscriptionMessage, config OnTopicMessageConfiguration) (int, error) { + topicArn := strings.TrimSpace(message.TopicArn) + if topicArn == "" { + ctx.Logger.Errorf("missing TopicArn in SNS notification payload") + return http.StatusBadRequest, fmt.Errorf("missing TopicArn in SNS notification payload") + } + + if topicArn != config.TopicArn { + ctx.Logger.Infof("message topic ARN %s does not match configured topic ARN %s, ignoring", topicArn, config.TopicArn) + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("aws.sns.topic.message", message); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to emit topic message event: %w", err) + } + + return http.StatusOK, nil +} + +func (t *OnTopicMessage) Cleanup(ctx core.TriggerContext) error { + return nil +} + +/* + * Verifies that the message comes from AWS SNS. + * See: https://docs.aws.amazon.com/sns/latest/dg/sns-verify-signature-of-message-verify-message-signature.html + */ +func (t *OnTopicMessage) verifyMessageSignature(ctx core.WebhookRequestContext, message SubscriptionMessage) error { + signature, err := base64.StdEncoding.DecodeString(message.Signature) + if err != nil { + return fmt.Errorf("failed to decode signature: %w", err) + } + + stringToSign, err := t.buildStringToSign(message) + if err != nil { + return fmt.Errorf("failed to build string to sign: %w", err) + } + + // + // TODO: it would be good to not fetch the certificate every time. + // + cert, err := t.fetchSigningCertificate(ctx, message.SigningCertURL) + if err != nil { + return fmt.Errorf("failed to fetch signing certificate: %w", err) + } + + hash, digest, err := t.getHashAndDigest(message.SignatureVersion, stringToSign) + if err != nil { + return fmt.Errorf("failed to get hash and digest: %w", err) + } + + publicKey, ok := cert.PublicKey.(*rsa.PublicKey) + if !ok { + return fmt.Errorf("unsupported signing certificate key type %T", cert.PublicKey) + } + + if err := rsa.VerifyPKCS1v15(publicKey, hash, digest, signature); err != nil { + return fmt.Errorf("signature verification failed: %w", err) + } + + return nil +} + +type SignableField struct { + name string + value string +} + +func (t *OnTopicMessage) buildStringToSign(message SubscriptionMessage) (string, error) { + signableFields, err := t.getSignableFields(message) + if err != nil { + return "", err + } + + for _, field := range signableFields { + if field.value == "" { + return "", fmt.Errorf("missing %s for SNS signature verification", field.name) + } + } + + var builder strings.Builder + for _, field := range signableFields { + builder.WriteString(field.name) + builder.WriteString("\n") + builder.WriteString(field.value) + builder.WriteString("\n") + } + + return builder.String(), nil +} + +func (t *OnTopicMessage) getSignableFields(message SubscriptionMessage) ([]SignableField, error) { + var fields []SignableField + + switch message.Type { + case "Notification": + fields = append(fields, SignableField{"Message", message.Message}) + fields = append(fields, SignableField{"MessageId", message.MessageID}) + if message.Subject != "" { + fields = append(fields, SignableField{"Subject", message.Subject}) + } + fields = append(fields, SignableField{"Timestamp", message.Timestamp}) + fields = append(fields, SignableField{"TopicArn", message.TopicArn}) + fields = append(fields, SignableField{"Type", message.Type}) + return fields, nil + + case "SubscriptionConfirmation", "UnsubscribeConfirmation": + fields = append(fields, SignableField{"Message", message.Message}) + fields = append(fields, SignableField{"MessageId", message.MessageID}) + fields = append(fields, SignableField{"SubscribeURL", message.SubscribeURL}) + fields = append(fields, SignableField{"Timestamp", message.Timestamp}) + fields = append(fields, SignableField{"Token", message.Token}) + fields = append(fields, SignableField{"TopicArn", message.TopicArn}) + fields = append(fields, SignableField{"Type", message.Type}) + return fields, nil + + default: + return nil, fmt.Errorf("unsupported SNS message type %q", message.Type) + } +} + +func (t *OnTopicMessage) fetchSigningCertificate(ctx core.WebhookRequestContext, signingCertURL string) (*x509.Certificate, error) { + parsedURL, err := url.Parse(signingCertURL) + if err != nil { + return nil, fmt.Errorf("invalid SigningCertURL: %w", err) + } + + if parsedURL.Scheme != "https" { + return nil, fmt.Errorf("SigningCertURL must use https") + } + + host := strings.ToLower(parsedURL.Hostname()) + if host == "" { + return nil, fmt.Errorf("SigningCertURL host is required") + } + + if !strings.HasPrefix(host, "sns.") { + return nil, fmt.Errorf("SigningCertURL host must start with sns") + } + + if !strings.HasSuffix(host, ".amazonaws.com") && !strings.HasSuffix(host, ".amazonaws.com.cn") { + return nil, fmt.Errorf("SigningCertURL host must be an AWS SNS domain") + } + + req, err := http.NewRequest(http.MethodGet, parsedURL.String(), nil) + if err != nil { + return nil, fmt.Errorf("failed to create certificate request: %w", err) + } + + response, err := ctx.HTTP.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to download signing certificate: %w", err) + } + + defer response.Body.Close() + + if response.StatusCode < http.StatusOK || response.StatusCode >= http.StatusMultipleChoices { + responseBody, readErr := io.ReadAll(response.Body) + if readErr != nil { + return nil, fmt.Errorf("failed to download signing certificate: status %d with unreadable body: %w", response.StatusCode, readErr) + } + return nil, fmt.Errorf("failed to download signing certificate: status %d: %s", response.StatusCode, strings.TrimSpace(string(responseBody))) + } + + certBytes, err := io.ReadAll(response.Body) + if err != nil { + return nil, fmt.Errorf("failed to read signing certificate: %w", err) + } + + var block *pem.Block + rest := certBytes + for { + block, rest = pem.Decode(rest) + if block == nil { + break + } + if block.Type == "CERTIFICATE" { + break + } + } + + if block == nil || block.Type != "CERTIFICATE" { + return nil, fmt.Errorf("SigningCertURL did not return a certificate") + } + + cert, err := x509.ParseCertificate(block.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse signing certificate: %w", err) + } + + now := time.Now() + if now.Before(cert.NotBefore) || now.After(cert.NotAfter) { + return nil, fmt.Errorf("signing certificate is not currently valid") + } + + return cert, nil +} + +func (t *OnTopicMessage) getHashAndDigest(signatureVersion, stringToSign string) (crypto.Hash, []byte, error) { + switch signatureVersion { + case "1": + sum := sha1.Sum([]byte(stringToSign)) + return crypto.SHA1, sum[:], nil + + case "2": + sum := sha256.Sum256([]byte(stringToSign)) + return crypto.SHA256, sum[:], nil + + default: + return 0, nil, fmt.Errorf("unsupported SignatureVersion %q", signatureVersion) + } +} diff --git a/pkg/integrations/aws/sns/on_topic_message_test.go b/pkg/integrations/aws/sns/on_topic_message_test.go new file mode 100644 index 0000000000..7038d14b2e --- /dev/null +++ b/pkg/integrations/aws/sns/on_topic_message_test.go @@ -0,0 +1,320 @@ +package sns + +import ( + "bytes" + "crypto" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "encoding/base64" + "encoding/json" + "encoding/pem" + "io" + "math/big" + "net/http" + "strings" + "testing" + "time" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnTopicMessage__Setup(t *testing.T) { + trigger := &OnTopicMessage{} + + t.Run("valid configuration -> requests webhook endpoint", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + + DisplayNameOrders Events + + + + `)), + }, + }, + } + + metadataContext := &contexts.MetadataContext{} + integration := &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + } + + err := trigger.Setup(core.TriggerContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + HTTP: httpContext, + Metadata: metadataContext, + Integration: integration, + }) + + require.NoError(t, err) + require.Len(t, httpContext.Requests, 1) + require.Len(t, integration.WebhookRequests, 1) + + metadata, ok := metadataContext.Metadata.(OnTopicMessageMetadata) + require.True(t, ok) + assert.Equal(t, "us-east-1", metadata.Region) + assert.Equal(t, "arn:aws:sns:us-east-1:123456789012:orders-events", metadata.TopicArn) + + webhookConfig, ok := integration.WebhookRequests[0].(common.WebhookConfiguration) + require.True(t, ok) + assert.Equal(t, "us-east-1", webhookConfig.Region) + assert.Equal(t, common.WebhookTypeSNS, webhookConfig.Type) + require.NotNil(t, webhookConfig.SNS) + assert.Equal(t, "arn:aws:sns:us-east-1:123456789012:orders-events", webhookConfig.SNS.TopicArn) + }) + + t.Run("existing matching metadata -> no subscribe call", func(t *testing.T) { + httpContext := &contexts.HTTPContext{} + + metadataContext := &contexts.MetadataContext{ + Metadata: OnTopicMessageMetadata{ + Region: "us-east-1", + TopicArn: "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + } + + integration := &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + } + err := trigger.Setup(core.TriggerContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + HTTP: httpContext, + Metadata: metadataContext, + Integration: integration, + }) + + require.NoError(t, err) + require.Len(t, httpContext.Requests, 0) + require.Len(t, integration.WebhookRequests, 0) + }) +} + +func Test__OnTopicMessage__HandleWebhook(t *testing.T) { + trigger := &OnTopicMessage{} + + t.Run("notification for configured topic -> emits event", func(t *testing.T) { + privateKey, certPEM := createTestSigningCert(t) + message := signTestMessage(t, trigger, SubscriptionMessage{ + Type: "Notification", + MessageID: "msg-123", + TopicArn: "arn:aws:sns:us-east-1:123456789012:orders-events", + Subject: "order.created", + Message: "{\"orderId\":\"ord_123\"}", + Timestamp: "2026-01-10T10:00:00Z", + SigningCertURL: testSigningCertURL, + SignatureVersion: "2", + }, privateKey) + + body, err := json.Marshal(message) + require.NoError(t, err) + + eventContext := &contexts.EventContext{} + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{{ + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewReader(certPEM)), + }}, + } + + status, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + Events: eventContext, + HTTP: httpContext, + Logger: log.NewEntry(log.New()), + }) + + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + require.Len(t, eventContext.Payloads, 1) + assert.Equal(t, "aws.sns.topic.message", eventContext.Payloads[0].Type) + + payload, ok := eventContext.Payloads[0].Data.(SubscriptionMessage) + require.True(t, ok) + assert.Equal(t, "arn:aws:sns:us-east-1:123456789012:orders-events", payload.TopicArn) + assert.Equal(t, "order.created", payload.Subject) + assert.Equal(t, "{\"orderId\":\"ord_123\"}", payload.Message) + assert.Equal(t, "2026-01-10T10:00:00Z", payload.Timestamp) + }) + + t.Run("subscription confirmation for different topic -> ignored", func(t *testing.T) { + privateKey, certPEM := createTestSigningCert(t) + message := signTestMessage(t, trigger, SubscriptionMessage{ + Type: "SubscriptionConfirmation", + MessageID: "msg-456", + TopicArn: "arn:aws:sns:us-east-1:123456789012:different-topic", + Message: "confirm", + SubscribeURL: "https://sns.us-east-1.amazonaws.com/?Action=ConfirmSubscription", + Timestamp: "2026-01-10T10:00:00Z", + Token: "token-123", + SigningCertURL: testSigningCertURL, + SignatureVersion: "2", + }, privateKey) + + body, err := json.Marshal(message) + require.NoError(t, err) + + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{{ + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewReader(certPEM)), + }}, + } + + status, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + Events: &contexts.EventContext{}, + HTTP: httpContext, + Logger: log.NewEntry(log.New()), + }) + + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + }) + + t.Run("confirmation for configured topic -> confirms subscription", func(t *testing.T) { + privateKey, certPEM := createTestSigningCert(t) + message := signTestMessage(t, trigger, SubscriptionMessage{ + Type: "SubscriptionConfirmation", + MessageID: "msg-789", + TopicArn: "arn:aws:sns:us-east-1:123456789012:orders-events", + Message: "confirm", + SubscribeURL: "https://sns.us-east-1.amazonaws.com/?Action=ConfirmSubscription", + Timestamp: "2026-01-10T10:00:00Z", + Token: "token-456", + SigningCertURL: testSigningCertURL, + SignatureVersion: "2", + }, privateKey) + + body, err := json.Marshal(message) + require.NoError(t, err) + + httpCtx := &contexts.HTTPContext{ + Responses: []*http.Response{{ + StatusCode: http.StatusOK, + Body: io.NopCloser(bytes.NewReader(certPEM)), + }, { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(``)), + }}, + } + + status, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: body, + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + HTTP: httpCtx, + Events: &contexts.EventContext{}, + Logger: log.NewEntry(log.New()), + }) + + require.NoError(t, err) + assert.Equal(t, http.StatusOK, status) + require.Len(t, httpCtx.Requests, 2) + assert.Equal(t, testSigningCertURL, httpCtx.Requests[0].URL.String()) + assert.Equal(t, "https://sns.us-east-1.amazonaws.com/?Action=ConfirmSubscription", httpCtx.Requests[1].URL.String()) + }) + + t.Run("unsupported message type -> bad request", func(t *testing.T) { + status, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Body: []byte(`{ + "Type": "UnknownType", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:orders-events" + }`), + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + Events: &contexts.EventContext{}, + Logger: log.NewEntry(log.New()), + }) + + require.Error(t, err) + assert.Equal(t, http.StatusBadRequest, status) + }) +} + +const testSigningCertURL = "https://sns.us-east-1.amazonaws.com/test.pem" + +func createTestSigningCert(t *testing.T) (*rsa.PrivateKey, []byte) { + t.Helper() + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + now := time.Now() + serialNumber, err := rand.Int(rand.Reader, new(big.Int).Lsh(big.NewInt(1), 128)) + require.NoError(t, err) + + template := x509.Certificate{ + SerialNumber: serialNumber, + NotBefore: now.Add(-time.Minute), + NotAfter: now.Add(time.Hour), + KeyUsage: x509.KeyUsageDigitalSignature, + BasicConstraintsValid: true, + IsCA: true, + } + + derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, &privateKey.PublicKey, privateKey) + require.NoError(t, err) + + certPEM := pem.EncodeToMemory(&pem.Block{ + Type: "CERTIFICATE", + Bytes: derBytes, + }) + + return privateKey, certPEM +} + +func signTestMessage(t *testing.T, trigger *OnTopicMessage, message SubscriptionMessage, privateKey *rsa.PrivateKey) SubscriptionMessage { + t.Helper() + + if message.SignatureVersion == "" { + message.SignatureVersion = "2" + } + + stringToSign, err := trigger.buildStringToSign(message) + require.NoError(t, err) + + sum := sha256.Sum256([]byte(stringToSign)) + signature, err := rsa.SignPKCS1v15(rand.Reader, privateKey, crypto.SHA256, sum[:]) + require.NoError(t, err) + + message.Signature = base64.StdEncoding.EncodeToString(signature) + return message +} diff --git a/pkg/integrations/aws/sns/publish_message.go b/pkg/integrations/aws/sns/publish_message.go new file mode 100644 index 0000000000..1ee53986bc --- /dev/null +++ b/pkg/integrations/aws/sns/publish_message.go @@ -0,0 +1,214 @@ +package sns + +import ( + "encoding/json" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +const ( + PublishMessageFormatJSON = "json" + PublishMessageFormatText = "text" +) + +type PublishMessage struct{} + +type PublishMessageConfiguration struct { + Region string `json:"region" mapstructure:"region"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` + Format string `json:"format" mapstructure:"format"` + JSON *any `json:"json" mapstructure:"json"` + Text *string `json:"text" mapstructure:"text"` +} + +func (c *PublishMessage) Name() string { + return "aws.sns.publishMessage" +} + +func (c *PublishMessage) Label() string { + return "SNS • Publish Message" +} + +func (c *PublishMessage) Description() string { + return "Publish a message to an AWS SNS topic" +} + +func (c *PublishMessage) Documentation() string { + return `The Publish Message component sends a message to an AWS SNS topic. + +## Use Cases + +- **Event fan-out**: Broadcast workflow results to multiple subscribers +- **Notifications**: Send operational updates to users and systems +- **Automation**: Trigger downstream subscribers through SNS delivery` +} + +func (c *PublishMessage) Icon() string { + return "aws" +} + +func (c *PublishMessage) Color() string { + return "gray" +} + +func (c *PublishMessage) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *PublishMessage) Configuration() []configuration.Field { + return []configuration.Field{ + regionField(), + topicField(), + { + Name: "format", + Label: "Message Format", + Type: configuration.FieldTypeSelect, + Required: true, + Default: PublishMessageFormatJSON, + TypeOptions: &configuration.TypeOptions{ + Select: &configuration.SelectTypeOptions{ + Options: []configuration.FieldOption{ + {Value: PublishMessageFormatJSON, Label: "JSON"}, + {Value: PublishMessageFormatText, Label: "Text"}, + }, + }, + }, + }, + { + Name: "json", + Label: "JSON Message", + Type: configuration.FieldTypeObject, + Required: false, + Default: map[string]any{}, + VisibilityConditions: []configuration.VisibilityCondition{ + { + Field: "topicArn", + Values: []string{"*"}, + }, + { + Field: "format", + Values: []string{PublishMessageFormatJSON}, + }, + }, + }, + { + Name: "text", + Label: "Text Message", + Type: configuration.FieldTypeText, + Required: false, + VisibilityConditions: []configuration.VisibilityCondition{ + { + Field: "format", + Values: []string{PublishMessageFormatText}, + }, + }, + }, + } +} + +func (c *PublishMessage) Setup(ctx core.SetupContext) error { + var config PublishMessageConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode setup configuration: %w", err) + } + + if _, err := requireRegion(config.Region); err != nil { + return fmt.Errorf("invalid region: %w", err) + } + + if _, err := requireTopicArn(config.TopicArn); err != nil { + return fmt.Errorf("invalid topic ARN: %w", err) + } + + if config.Format == "" { + return fmt.Errorf("format is required") + } + + if config.Format == PublishMessageFormatJSON && config.JSON == nil { + return fmt.Errorf("JSON message is required") + } + + if config.Format == PublishMessageFormatText && config.Text == nil { + return fmt.Errorf("text message is required") + } + + return nil +} + +func (c *PublishMessage) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *PublishMessage) Execute(ctx core.ExecutionContext) error { + var config PublishMessageConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode execution configuration: %w", err) + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("failed to load AWS credentials from integration: %w", err) + } + + params, err := c.buildPublishMessageParameters(config) + if err != nil { + return fmt.Errorf("failed to build publish message parameters: %w", err) + } + + client := NewClient(ctx.HTTP, credentials, config.Region) + result, err := client.PublishMessage(*params) + if err != nil { + return fmt.Errorf("failed to publish message to topic %q: %w", config.TopicArn, err) + } + + if err := ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "aws.sns.message.published", []any{result}); err != nil { + return fmt.Errorf("failed to emit published message payload: %w", err) + } + + return nil +} + +func (c *PublishMessage) Actions() []core.Action { + return []core.Action{} +} + +func (c *PublishMessage) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *PublishMessage) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *PublishMessage) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *PublishMessage) Cleanup(ctx core.SetupContext) error { + return nil +} + +func (c *PublishMessage) buildPublishMessageParameters(config PublishMessageConfiguration) (*PublishMessageParameters, error) { + if config.Format == PublishMessageFormatText { + return &PublishMessageParameters{ + TopicArn: config.TopicArn, + Message: *config.Text, + }, nil + } + + message, err := json.Marshal(config.JSON) + if err != nil { + return nil, fmt.Errorf("failed to marshal JSON message: %w", err) + } + + return &PublishMessageParameters{ + TopicArn: config.TopicArn, + Message: string(message), + }, nil +} diff --git a/pkg/integrations/aws/sns/publish_message_test.go b/pkg/integrations/aws/sns/publish_message_test.go new file mode 100644 index 0000000000..04663501b0 --- /dev/null +++ b/pkg/integrations/aws/sns/publish_message_test.go @@ -0,0 +1,180 @@ +package sns + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__PublishMessage__Setup(t *testing.T) { + component := &PublishMessage{} + + t.Run("missing region -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{}, + }) + require.ErrorContains(t, err, "region is required") + }) + + t.Run("missing topic arn -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + }, + }) + require.ErrorContains(t, err, "topic ARN is required") + }) + + t.Run("missing format -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + }, + }) + require.ErrorContains(t, err, "format is required") + }) + + t.Run("missing json message -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "format": "json", + }, + }) + require.ErrorContains(t, err, "JSON message is required") + }) + + t.Run("missing text message -> error", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "format": "text", + }, + }) + require.ErrorContains(t, err, "text message is required") + }) + + t.Run("valid json message -> success", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "format": "json", + "json": map[string]any{"message": "hello world"}, + }, + }) + require.NoError(t, err) + }) + + t.Run("valid text message -> success", func(t *testing.T) { + err := component.Setup(core.SetupContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "format": "text", + "text": "hello world", + }, + }) + require.NoError(t, err) + }) +} + +func Test__PublishMessage__Execute(t *testing.T) { + component := &PublishMessage{} + + t.Run("valid text message request -> emits publish payload", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + msg-123 + + + `)), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "format": "text", + "text": "hello world", + }, + HTTP: httpContext, + ExecutionState: executionState, + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + }, + }) + + require.NoError(t, err) + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any)["data"] + result, ok := payload.(*PublishResult) + require.True(t, ok) + assert.Equal(t, "msg-123", result.MessageID) + }) + + t.Run("valid json message request -> emits publish payload", func(t *testing.T) { + httpContext := &contexts.HTTPContext{ + Responses: []*http.Response{ + { + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(` + + + msg-123 + + + `)), + }, + }, + } + + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "region": "us-east-1", + "topicArn": "arn:aws:sns:us-east-1:123456789012:orders-events", + "format": "json", + "json": map[string]any{"message": "hello world"}, + }, + HTTP: httpContext, + ExecutionState: executionState, + Integration: &contexts.IntegrationContext{ + Secrets: map[string]core.IntegrationSecret{ + "accessKeyId": {Name: "accessKeyId", Value: []byte("key")}, + "secretAccessKey": {Name: "secretAccessKey", Value: []byte("secret")}, + "sessionToken": {Name: "sessionToken", Value: []byte("token")}, + }, + }, + }) + + require.NoError(t, err) + require.Len(t, executionState.Payloads, 1) + payload := executionState.Payloads[0].(map[string]any)["data"] + result, ok := payload.(*PublishResult) + require.True(t, ok) + assert.Equal(t, "msg-123", result.MessageID) + }) + +} diff --git a/pkg/integrations/aws/sns/resources.go b/pkg/integrations/aws/sns/resources.go new file mode 100644 index 0000000000..9767394915 --- /dev/null +++ b/pkg/integrations/aws/sns/resources.go @@ -0,0 +1,75 @@ +package sns + +import ( + "fmt" + "strings" + + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +func ListTopics(ctx core.ListResourcesContext, resourceType string) ([]core.IntegrationResource, error) { + region := strings.TrimSpace(ctx.Parameters["region"]) + if region == "" { + return nil, fmt.Errorf("list SNS topics: region is required") + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return nil, fmt.Errorf("list SNS topics: failed to load AWS credentials from integration: %w", err) + } + + client := NewClient(ctx.HTTP, credentials, region) + topics, err := client.ListTopics() + if err != nil { + return nil, fmt.Errorf("list SNS topics: failed to list topics in region %q: %w", region, err) + } + + var resources []core.IntegrationResource + for _, topic := range topics { + resources = append(resources, core.IntegrationResource{ + Type: resourceType, + Name: topic.Name, + ID: topic.TopicArn, + }) + } + + return resources, nil +} + +func ListSubscriptions(ctx core.ListResourcesContext, resourceType string) ([]core.IntegrationResource, error) { + region := ctx.Parameters["region"] + if region == "" { + return nil, fmt.Errorf("region is required") + } + + topicArn := ctx.Parameters["topicArn"] + if topicArn == "" { + return nil, fmt.Errorf("topic ARN is required") + } + + ctx.Logger.Infof("listing subscriptions for topic %q in region %q", topicArn, region) + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to load AWS credentials from integration: %w", err) + } + + client := NewClient(ctx.HTTP, credentials, region) + subscriptions, err := client.ListSubscriptionsByTopic(topicArn) + if err != nil { + return nil, fmt.Errorf("failed to list subscriptions in region %q: %w", region, err) + } + + var resources []core.IntegrationResource + for _, subscription := range subscriptions { + parts := strings.Split(subscription.SubscriptionArn, ":") + resources = append(resources, core.IntegrationResource{ + Type: resourceType, + Name: parts[len(parts)-1], + ID: subscription.SubscriptionArn, + }) + } + + return resources, nil +} diff --git a/pkg/integrations/aws/sns/types.go b/pkg/integrations/aws/sns/types.go new file mode 100644 index 0000000000..ae230328c9 --- /dev/null +++ b/pkg/integrations/aws/sns/types.go @@ -0,0 +1,108 @@ +package sns + +// Topic models an AWS SNS topic payload returned by SNS API operations. +type Topic struct { + TopicArn string `json:"topicArn" mapstructure:"topicArn"` + Name string `json:"name" mapstructure:"name"` + DisplayName string `json:"displayName,omitempty" mapstructure:"displayName"` + Owner string `json:"owner,omitempty" mapstructure:"owner"` + KmsMasterKeyID string `json:"kmsMasterKeyId,omitempty" mapstructure:"kmsMasterKeyId"` + FifoTopic bool `json:"fifoTopic" mapstructure:"fifoTopic"` + ContentBasedDeduplication bool `json:"contentBasedDeduplication" mapstructure:"contentBasedDeduplication"` + Attributes map[string]string `json:"attributes,omitempty" mapstructure:"attributes"` +} + +// Subscription models an AWS SNS subscription payload returned by SNS API operations. +type Subscription struct { + SubscriptionArn string `json:"subscriptionArn" mapstructure:"subscriptionArn"` + TopicArn string `json:"topicArn,omitempty" mapstructure:"topicArn"` + Protocol string `json:"protocol,omitempty" mapstructure:"protocol"` + Endpoint string `json:"endpoint,omitempty" mapstructure:"endpoint"` + Owner string `json:"owner,omitempty" mapstructure:"owner"` + PendingConfirmation bool `json:"pendingConfirmation" mapstructure:"pendingConfirmation"` + RawMessageDelivery bool `json:"rawMessageDelivery" mapstructure:"rawMessageDelivery"` + Attributes map[string]string `json:"attributes,omitempty" mapstructure:"attributes"` +} + +// PublishResult models the response emitted after publishing an SNS message. +type PublishResult struct { + MessageID string `json:"messageId" mapstructure:"messageId"` + SequenceNumber string `json:"sequenceNumber,omitempty" mapstructure:"sequenceNumber"` + TopicArn string `json:"topicArn" mapstructure:"topicArn"` +} + +// PublishMessageParameters defines the arguments for a publish operation. +type PublishMessageParameters struct { + TopicArn string + Message string + Subject string + MessageAttributes map[string]string +} + +// SubscribeParameters defines the arguments for a subscribe operation. +type SubscribeParameters struct { + TopicArn string + Protocol string + Endpoint string + Attributes map[string]string + ReturnSubscriptionARN bool +} + +type attributeEntry struct { + Key string `xml:"key"` + Value string `xml:"value"` +} + +type getTopicAttributesResponse struct { + Entries []attributeEntry `xml:"GetTopicAttributesResult>Attributes>entry"` +} + +type createTopicResponse struct { + TopicArn string `xml:"CreateTopicResult>TopicArn"` +} + +type publishResponse struct { + MessageID string `xml:"PublishResult>MessageId"` + SequenceNumber string `xml:"PublishResult>SequenceNumber"` +} + +type getSubscriptionAttributesResponse struct { + Entries []attributeEntry `xml:"GetSubscriptionAttributesResult>Attributes>entry"` +} + +type subscribeResponse struct { + SubscriptionArn string `xml:"SubscribeResult>SubscriptionArn"` +} + +type listTopicMember struct { + TopicArn string `xml:"TopicArn"` +} + +type listTopicsResponse struct { + Topics []listTopicMember `xml:"ListTopicsResult>Topics>member"` + NextToken string `xml:"ListTopicsResult>NextToken"` +} + +type listSubscriptionMember struct { + SubscriptionArn string `xml:"SubscriptionArn"` + TopicArn string `xml:"TopicArn"` + Protocol string `xml:"Protocol"` + Endpoint string `xml:"Endpoint"` + Owner string `xml:"Owner"` +} + +type listSubscriptionsResponse struct { + Subscriptions []listSubscriptionMember `xml:"ListSubscriptionsResult>Subscriptions>member"` + SubscriptionsTopic []listSubscriptionMember `xml:"ListSubscriptionsByTopicResult>Subscriptions>member"` + NextToken string `xml:"ListSubscriptionsResult>NextToken"` + NextTokenTopic string `xml:"ListSubscriptionsByTopicResult>NextToken"` +} + +type snsErrorDetail struct { + Code string `xml:"Code"` + Message string `xml:"Message"` +} + +type snsErrorPayload struct { + Error snsErrorDetail `xml:"Error"` +} diff --git a/pkg/integrations/aws/sns/validation.go b/pkg/integrations/aws/sns/validation.go new file mode 100644 index 0000000000..93943df7e8 --- /dev/null +++ b/pkg/integrations/aws/sns/validation.go @@ -0,0 +1,49 @@ +package sns + +import ( + "fmt" + "strings" + + "github.com/superplanehq/superplane/pkg/integrations/aws/common" +) + +// requireRegion validates and normalizes region values. +func requireRegion(region string) (string, error) { + normalized := strings.TrimSpace(region) + if normalized == "" { + return "", fmt.Errorf("region is required") + } + + // Validate against known AWS regions + for _, r := range common.AllRegions { + if r.Value == normalized { + return normalized, nil + } + } + + return "", fmt.Errorf("invalid AWS region: %s", normalized) +} + +// requireTopicArn validates and normalizes topic ARNs. +func requireTopicArn(topicArn string) (string, error) { + normalized := strings.TrimSpace(topicArn) + if normalized == "" { + return "", fmt.Errorf("topic ARN is required") + } + + // Validate ARN format: arn::sns:region:account-id:topic-name + if !strings.HasPrefix(normalized, "arn:") { + return "", fmt.Errorf("invalid topic ARN format: must start with 'arn:'") + } + + parts := strings.Split(normalized, ":") + if len(parts) < 6 { + return "", fmt.Errorf("invalid topic ARN format: expected arn::sns:region:account-id:topic-name") + } + + if parts[2] != "sns" { + return "", fmt.Errorf("invalid topic ARN format: expected SNS service ARN") + } + + return normalized, nil +} diff --git a/pkg/integrations/aws/webhook_handler.go b/pkg/integrations/aws/webhook_handler.go new file mode 100644 index 0000000000..9da9ffaa9b --- /dev/null +++ b/pkg/integrations/aws/webhook_handler.go @@ -0,0 +1,112 @@ +package aws + +import ( + "fmt" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/integrations/aws/common" + "github.com/superplanehq/superplane/pkg/integrations/aws/sns" +) + +type WebhookHandler struct{} + +func (h *WebhookHandler) Setup(ctx core.WebhookHandlerContext) (any, error) { + var config common.WebhookConfiguration + if err := mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config); err != nil { + return nil, fmt.Errorf("failed to decode SNS webhook configuration: %w", err) + } + + switch config.Type { + case common.WebhookTypeSNS: + return h.setupSNS(ctx, config) + } + + return nil, fmt.Errorf("setup: unsupported webhook type: %s", config.Type) +} + +func (h *WebhookHandler) setupSNS(ctx core.WebhookHandlerContext, config common.WebhookConfiguration) (any, error) { + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return nil, fmt.Errorf("failed to load AWS credentials from integration: %w", err) + } + + client := sns.NewClient(ctx.HTTP, credentials, config.Region) + subscription, err := client.Subscribe(sns.SubscribeParameters{ + TopicArn: config.SNS.TopicArn, + Protocol: "https", + Endpoint: ctx.Webhook.GetURL(), + ReturnSubscriptionARN: true, + }) + + if err != nil { + return nil, fmt.Errorf("failed to subscribe to SNS topic %q in region %q: %w", config.SNS.TopicArn, config.Region, err) + } + + return common.SNSWebhookMetadata{ + SubscriptionArn: subscription.SubscriptionArn, + }, nil +} + +func (h *WebhookHandler) Cleanup(ctx core.WebhookHandlerContext) error { + var config common.WebhookConfiguration + if err := mapstructure.Decode(ctx.Webhook.GetConfiguration(), &config); err != nil { + return fmt.Errorf("failed to decode SNS webhook configuration: %w", err) + } + + switch config.Type { + case common.WebhookTypeSNS: + return h.cleanupSNS(ctx, config.Region) + default: + return fmt.Errorf("cleanup: unsupported webhook type: %s", config.Type) + } +} + +func (h *WebhookHandler) cleanupSNS(ctx core.WebhookHandlerContext, region string) error { + metadata := common.SNSWebhookMetadata{} + if err := mapstructure.Decode(ctx.Webhook.GetMetadata(), &metadata); err != nil { + return fmt.Errorf("failed to decode SNS webhook metadata: %w", err) + } + + credentials, err := common.CredentialsFromInstallation(ctx.Integration) + if err != nil { + return fmt.Errorf("cleanup SNS: failed to load AWS credentials from integration: %w", err) + } + + client := sns.NewClient(ctx.HTTP, credentials, region) + err = client.Unsubscribe(metadata.SubscriptionArn) + if err != nil && !common.IsNotFoundErr(err) { + return fmt.Errorf("cleanup SNS: failed to unsubscribe existing subscription %q in region %q: %w", metadata.SubscriptionArn, region, err) + } + + return nil +} + +func (h *WebhookHandler) CompareConfig(a, b any) (bool, error) { + configA := common.WebhookConfiguration{} + configB := common.WebhookConfiguration{} + + err := mapstructure.Decode(a, &configA) + if err != nil { + return false, err + } + + err = mapstructure.Decode(b, &configB) + if err != nil { + return false, err + } + + if configA.Type != configB.Type { + return false, nil + } + + if configA.Type == common.WebhookTypeSNS { + return configA.SNS.TopicArn == configB.SNS.TopicArn, nil + } + + return false, nil +} + +func (h *WebhookHandler) Merge(current, requested any) (any, bool, error) { + return current, false, nil +} diff --git a/pkg/public/server.go b/pkg/public/server.go index 4ddd80c222..663fbc1262 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -409,7 +409,6 @@ func (s *Server) InitRouter(additionalMiddlewares ...mux.MiddlewareFunc) { // publicRoute. HandleFunc(s.BasePath+"/webhooks/{webhookID}", s.HandleWebhook). - HeadersRegexp("Content-Type", `^application/json(?:;\s*charset=utf-8)?$`). Methods("POST") // diff --git a/web_src/src/assets/icons/integrations/aws.sns.svg b/web_src/src/assets/icons/integrations/aws.sns.svg new file mode 100644 index 0000000000..6cb54adab2 --- /dev/null +++ b/web_src/src/assets/icons/integrations/aws.sns.svg @@ -0,0 +1,18 @@ + + + + Icon-Architecture/64/Arch_AWS-Simple-Notification-Service_64 + Created with Sketch. + + + + + + + + + + + + + \ No newline at end of file diff --git a/web_src/src/pages/workflowv2/mappers/aws/index.ts b/web_src/src/pages/workflowv2/mappers/aws/index.ts index e431554a64..215540d6e8 100644 --- a/web_src/src/pages/workflowv2/mappers/aws/index.ts +++ b/web_src/src/pages/workflowv2/mappers/aws/index.ts @@ -15,6 +15,12 @@ import { deleteRepositoryMapper } from "./codeartifact/delete_repository"; import { disposePackageVersionsMapper } from "./codeartifact/dispose_package_versions"; import { updatePackageVersionsStatusMapper } from "./codeartifact/update_package_versions_status"; import { onAlarmTriggerRenderer } from "./cloudwatch/on_alarm"; +import { onTopicMessageTriggerRenderer } from "./sns/on_topic_message"; +import { createTopicMapper } from "./sns/create_topic"; +import { deleteTopicMapper } from "./sns/delete_topic"; +import { getSubscriptionMapper } from "./sns/get_subscription"; +import { getTopicMapper } from "./sns/get_topic"; +import { publishMessageMapper } from "./sns/publish_message"; export const componentMappers: Record = { "lambda.runFunction": runFunctionMapper, @@ -28,6 +34,11 @@ export const componentMappers: Record = { "codeArtifact.disposePackageVersions": disposePackageVersionsMapper, "codeArtifact.getPackageVersion": getPackageVersionMapper, "codeArtifact.updatePackageVersionsStatus": updatePackageVersionsStatusMapper, + "sns.getTopic": getTopicMapper, + "sns.getSubscription": getSubscriptionMapper, + "sns.createTopic": createTopicMapper, + "sns.deleteTopic": deleteTopicMapper, + "sns.publishMessage": publishMessageMapper, }; export const triggerRenderers: Record = { @@ -35,6 +46,7 @@ export const triggerRenderers: Record = { "codeArtifact.onPackageVersion": onPackageVersionTriggerRenderer, "ecr.onImagePush": onImagePushTriggerRenderer, "ecr.onImageScan": onImageScanTriggerRenderer, + "sns.onTopicMessage": onTopicMessageTriggerRenderer, }; export const eventStateRegistry: Record = { @@ -48,4 +60,9 @@ export const eventStateRegistry: Record = { "codeArtifact.disposePackageVersions": buildActionStateRegistry("disposed"), "codeArtifact.getPackageVersion": buildActionStateRegistry("retrieved"), "codeArtifact.updatePackageVersionsStatus": buildActionStateRegistry("updated"), + "sns.getTopic": buildActionStateRegistry("retrieved"), + "sns.getSubscription": buildActionStateRegistry("retrieved"), + "sns.createTopic": buildActionStateRegistry("created"), + "sns.deleteTopic": buildActionStateRegistry("deleted"), + "sns.publishMessage": buildActionStateRegistry("published"), }; diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/common.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/common.ts new file mode 100644 index 0000000000..dc9c29a177 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/common.ts @@ -0,0 +1,61 @@ +import { ComponentBaseContext, ExecutionInfo, NodeInfo, SubtitleContext } from "../../types"; +import { ComponentBaseProps, EventSection } from "@/ui/componentBase"; +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import { getState, getStateMap, getTriggerRenderer } from "../.."; +import awsSnsIcon from "@/assets/icons/integrations/aws.sns.svg"; +import { formatTimeAgo } from "@/utils/date"; +import { MetadataItem } from "@/ui/metadataList"; + +export function buildSnsProps(context: ComponentBaseContext, metadata: MetadataItem[]): ComponentBaseProps { + const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null; + const componentName = context.componentDefinition.name || "unknown"; + + return { + title: context.node.name || context.componentDefinition.label || "Unnamed component", + iconSrc: awsSnsIcon, + iconColor: getColorClass(context.componentDefinition.color), + collapsedBackground: getBackgroundColorClass(context.componentDefinition.color), + collapsed: context.node.isCollapsed, + eventSections: lastExecution ? buildEventSections(context.nodes, lastExecution, componentName) : undefined, + includeEmptyState: !lastExecution, + metadata, + eventStateMap: getStateMap(componentName), + }; +} + +export function buildSubtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) { + return ""; + } + + return formatTimeAgo(new Date(context.execution.createdAt)); +} + +export function buildEventSections(nodes: NodeInfo[], execution: ExecutionInfo, componentName: string): EventSection[] { + if (!execution.createdAt || !execution.rootEvent?.id) { + return []; + } + + const rootTriggerNode = nodes.find((node) => node.id === execution.rootEvent?.nodeId); + const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName || ""); + const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent }); + + return [ + { + receivedAt: new Date(execution.createdAt), + eventTitle: title, + eventSubtitle: formatTimeAgo(new Date(execution.createdAt)), + eventState: getState(componentName)(execution), + eventId: execution.rootEvent.id, + }, + ]; +} + +export function extractArnResourceName(arn?: string): string | undefined { + if (!arn) { + return undefined; + } + + const name = arn.split(":").at(-1); + return name || undefined; +} diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/create_topic.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/create_topic.ts new file mode 100644 index 0000000000..dfdcffca44 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/create_topic.ts @@ -0,0 +1,67 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, + NodeInfo, +} from "../../types"; +import { MetadataItem } from "@/ui/metadataList"; +import { stringOrDash } from "../../utils"; +import { buildSnsProps, buildSubtitle } from "./common"; + +interface CreateTopicConfiguration { + region?: string; + name?: string; +} + +interface TopicData { + topicArn?: string; + name?: string; + displayName?: string; + owner?: string; + fifoTopic?: boolean; + contentBasedDeduplication?: boolean; +} + +export const createTopicMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext) { + return buildSnsProps(context, buildMetadata(context.node)); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const result = outputs?.default?.[0]?.data as TopicData | undefined; + if (!result) { + return {}; + } + + return { + "Topic ARN": stringOrDash(result.topicArn), + Name: stringOrDash(result.name), + "Display Name": stringOrDash(result.displayName), + Owner: stringOrDash(result.owner), + "FIFO Topic": stringOrDash(result.fifoTopic), + "Content-based Deduplication": stringOrDash(result.contentBasedDeduplication), + }; + }, + + subtitle(context: SubtitleContext): string { + return buildSubtitle(context); + }, +}; + +function buildMetadata(node: NodeInfo): MetadataItem[] { + const configuration = node.configuration as CreateTopicConfiguration | undefined; + const items: MetadataItem[] = []; + + if (configuration?.region) { + items.push({ icon: "map", label: configuration.region }); + } + + if (configuration?.name) { + items.push({ icon: "tag", label: configuration.name }); + } + + return items; +} diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/delete_topic.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/delete_topic.ts new file mode 100644 index 0000000000..5befc7f4db --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/delete_topic.ts @@ -0,0 +1,60 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, + NodeInfo, +} from "../../types"; +import { MetadataItem } from "@/ui/metadataList"; +import { stringOrDash } from "../../utils"; +import { buildSnsProps, buildSubtitle, extractArnResourceName } from "./common"; + +interface DeleteTopicConfiguration { + region?: string; + topicArn?: string; +} + +interface DeleteTopicData { + topicArn?: string; + deleted?: boolean; +} + +export const deleteTopicMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext) { + return buildSnsProps(context, buildMetadata(context.node)); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const result = outputs?.default?.[0]?.data as DeleteTopicData | undefined; + if (!result) { + return {}; + } + + return { + "Topic ARN": stringOrDash(result.topicArn), + Deleted: stringOrDash(result.deleted), + }; + }, + + subtitle(context: SubtitleContext): string { + return buildSubtitle(context); + }, +}; + +function buildMetadata(node: NodeInfo): MetadataItem[] { + const configuration = node.configuration as DeleteTopicConfiguration | undefined; + const topicName = extractArnResourceName(configuration?.topicArn); + const items: MetadataItem[] = []; + + if (configuration?.region) { + items.push({ icon: "map", label: configuration.region }); + } + + if (topicName) { + items.push({ icon: "hash", label: topicName }); + } + + return items; +} diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/get_subscription.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/get_subscription.ts new file mode 100644 index 0000000000..ae26562cc8 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/get_subscription.ts @@ -0,0 +1,76 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, + NodeInfo, +} from "../../types"; +import { MetadataItem } from "@/ui/metadataList"; +import { stringOrDash } from "../../utils"; +import { buildSnsProps, buildSubtitle, extractArnResourceName } from "./common"; + +interface GetSubscriptionConfiguration { + region?: string; + topicArn?: string; + subscriptionArn?: string; +} + +interface SubscriptionData { + subscriptionArn?: string; + topicArn?: string; + protocol?: string; + endpoint?: string; + owner?: string; + pendingConfirmation?: boolean; + rawMessageDelivery?: boolean; +} + +export const getSubscriptionMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext) { + return buildSnsProps(context, buildMetadata(context.node)); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const result = outputs?.default?.[0]?.data as SubscriptionData | undefined; + if (!result) { + return {}; + } + + return { + "Subscription ARN": stringOrDash(result.subscriptionArn), + "Topic ARN": stringOrDash(result.topicArn), + Protocol: stringOrDash(result.protocol), + Endpoint: stringOrDash(result.endpoint), + Owner: stringOrDash(result.owner), + "Pending Confirmation": stringOrDash(result.pendingConfirmation), + "Raw Message Delivery": stringOrDash(result.rawMessageDelivery), + }; + }, + + subtitle(context: SubtitleContext): string { + return buildSubtitle(context); + }, +}; + +function buildMetadata(node: NodeInfo): MetadataItem[] { + const configuration = node.configuration as GetSubscriptionConfiguration | undefined; + const items: MetadataItem[] = []; + + if (configuration?.region) { + items.push({ icon: "map", label: configuration.region }); + } + + const topicName = extractArnResourceName(configuration?.topicArn); + if (topicName) { + items.push({ icon: "hash", label: topicName }); + } + + const subscriptionArn = extractArnResourceName(configuration?.subscriptionArn); + if (subscriptionArn) { + items.push({ icon: "link", label: subscriptionArn }); + } + + return items; +} diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/get_topic.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/get_topic.ts new file mode 100644 index 0000000000..83f3547ba6 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/get_topic.ts @@ -0,0 +1,68 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, + NodeInfo, +} from "../../types"; +import { MetadataItem } from "@/ui/metadataList"; +import { stringOrDash } from "../../utils"; +import { buildSnsProps, buildSubtitle, extractArnResourceName } from "./common"; + +interface GetTopicConfiguration { + region?: string; + topicArn?: string; +} + +interface TopicData { + topicArn?: string; + name?: string; + displayName?: string; + owner?: string; + fifoTopic?: boolean; + contentBasedDeduplication?: boolean; +} + +export const getTopicMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext) { + return buildSnsProps(context, buildMetadata(context.node)); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const result = outputs?.default?.[0]?.data as TopicData | undefined; + if (!result) { + return {}; + } + + return { + "Topic ARN": stringOrDash(result.topicArn), + Name: stringOrDash(result.name), + "Display Name": stringOrDash(result.displayName), + Owner: stringOrDash(result.owner), + "FIFO Topic": stringOrDash(result.fifoTopic), + "Content-based Deduplication": stringOrDash(result.contentBasedDeduplication), + }; + }, + + subtitle(context: SubtitleContext): string { + return buildSubtitle(context); + }, +}; + +function buildMetadata(node: NodeInfo): MetadataItem[] { + const configuration = node.configuration as GetTopicConfiguration | undefined; + const topicName = extractArnResourceName(configuration?.topicArn); + const items: MetadataItem[] = []; + + if (configuration?.region) { + items.push({ icon: "map", label: configuration.region }); + } + + if (topicName) { + items.push({ icon: "hash", label: topicName }); + } + + return items; +} diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/on_topic_message.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/on_topic_message.ts new file mode 100644 index 0000000000..fd24507a27 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/on_topic_message.ts @@ -0,0 +1,82 @@ +import { getBackgroundColorClass } from "@/utils/colors"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../../types"; +import { TriggerProps } from "@/ui/trigger"; +import awsSnsIcon from "@/assets/icons/integrations/aws.sns.svg"; +import { formatTimeAgo } from "@/utils/date"; +import { stringOrDash } from "../../utils"; + +interface OnTopicMessageConfiguration { + region?: string; + topicArn?: string; +} + +interface OnTopicMessageMetadata { + region?: string; + topicArn?: string; +} + +interface TopicMessageEvent { + Type?: string; + Message?: string; + MessageId?: string; + TopicArn?: string; + Subject?: string; + Timestamp?: string; + SignatureVersion?: string; + Signature?: string; + SigningCertURL?: string; + UnsubscribeURL?: string; + SubscribeURL?: string; + Token?: string; + MessageAttributes?: Record; +} + +export const onTopicMessageTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as TopicMessageEvent; + const title = eventData?.MessageId ? eventData.MessageId : "SNS topic message"; + const subtitle = context.event?.createdAt ? formatTimeAgo(new Date(context.event.createdAt)) : ""; + + return { title, subtitle }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as TopicMessageEvent; + + return { + "Message ID": stringOrDash(eventData?.MessageId), + Message: stringOrDash(eventData?.Message), + "Topic ARN": stringOrDash(eventData?.TopicArn), + Timestamp: stringOrDash(eventData?.Timestamp), + Subject: stringOrDash(eventData?.Subject), + }; + }, + + getTriggerProps: (context: TriggerRendererContext) => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as OnTopicMessageMetadata | undefined; + const configuration = node.configuration as OnTopicMessageConfiguration | undefined; + const topicArn = metadata?.topicArn || configuration?.topicArn; + const topicName = topicArn ? topicArn.split(":").at(-1) : undefined; + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: awsSnsIcon, + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: topicName ? [{ icon: "hash", label: topicName }] : [], + }; + + if (lastEvent) { + const { title, subtitle } = onTopicMessageTriggerRenderer.getTitleAndSubtitle({ event: lastEvent }); + props.lastEventData = { + title, + subtitle, + receivedAt: new Date(lastEvent.createdAt), + state: "triggered", + eventId: lastEvent.id, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/aws/sns/publish_message.ts b/web_src/src/pages/workflowv2/mappers/aws/sns/publish_message.ts new file mode 100644 index 0000000000..cd75d4f331 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/aws/sns/publish_message.ts @@ -0,0 +1,78 @@ +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, + NodeInfo, +} from "../../types"; +import { MetadataItem } from "@/ui/metadataList"; +import { stringOrDash } from "../../utils"; +import { buildSnsProps, buildSubtitle, extractArnResourceName } from "./common"; + +interface PublishMessageConfiguration { + region?: string; + topicArn?: string; + format?: string; +} + +interface PublishMessageData { + messageId?: string; + topicArn?: string; +} + +export const publishMessageMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext) { + return buildSnsProps(context, buildMetadata(context.node)); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + const result = outputs?.default?.[0]?.data as PublishMessageData | undefined; + if (!result) { + return {}; + } + + return { + "Message ID": stringOrDash(result.messageId), + "Topic ARN": stringOrDash(result.topicArn), + }; + }, + + subtitle(context: SubtitleContext): string { + return buildSubtitle(context); + }, +}; + +function buildMetadata(node: NodeInfo): MetadataItem[] { + const configuration = node.configuration as PublishMessageConfiguration | undefined; + const metadata: MetadataItem[] = []; + + const topicName = extractArnResourceName(configuration?.topicArn); + if (topicName) { + metadata.push({ icon: "hash", label: topicName }); + } + + const formatLabel = formatPublishMessageFormat(configuration?.format); + if (formatLabel) { + metadata.push({ icon: "message-square", label: formatLabel }); + } + + return metadata.slice(0, 2); +} + +function formatPublishMessageFormat(format?: string): string | undefined { + if (!format) { + return undefined; + } + + if (format.toLowerCase() === "json") { + return "JSON"; + } + + if (format.toLowerCase() === "text") { + return "Text"; + } + + return format; +} diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index a8a7c2bc6a..acddb64866 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -33,6 +33,7 @@ import awsLambdaIcon from "@/assets/icons/integrations/aws.lambda.svg"; import awsEcrIcon from "@/assets/icons/integrations/aws.ecr.svg"; import awsCodeArtifactIcon from "@/assets/icons/integrations/aws.codeartifact.svg"; import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; +import awsSnsIcon from "@/assets/icons/integrations/aws.sns.svg"; import rootlyIcon from "@/assets/icons/integrations/rootly.svg"; import SemaphoreLogo from "@/assets/semaphore-logo-sign-black.svg"; import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; @@ -425,6 +426,7 @@ function CategorySection({ cloudwatch: awsCloudwatchIcon, lambda: awsLambdaIcon, ecr: awsEcrIcon, + sns: awsSnsIcon, }, }; @@ -503,6 +505,7 @@ function CategorySection({ cloudwatch: awsCloudwatchIcon, ecr: awsEcrIcon, lambda: awsLambdaIcon, + sns: awsSnsIcon, }, }; const appLogo = nameParts[0] ? appLogoMap[nameParts[0]] : undefined; diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index d22d7a7644..9d194f7af9 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -3,6 +3,8 @@ import React from "react"; import awsIcon from "@/assets/icons/integrations/aws.svg"; import awsLambdaIcon from "@/assets/icons/integrations/aws.lambda.svg"; import circleciIcon from "@/assets/icons/integrations/circleci.svg"; +import awsCloudwatchIcon from "@/assets/icons/integrations/aws.cloudwatch.svg"; +import awsSnsIcon from "@/assets/icons/integrations/aws.sns.svg"; import cloudflareIcon from "@/assets/icons/integrations/cloudflare.svg"; import dash0Icon from "@/assets/icons/integrations/dash0.svg"; import datadogIcon from "@/assets/icons/integrations/datadog.svg"; @@ -80,6 +82,7 @@ export const APP_LOGO_MAP: Record> = { aws: { cloudwatch: awsCloudwatchIcon, lambda: awsLambdaIcon, + sns: awsSnsIcon, }, }; From f687e84d28f538d84a6bc2e8fe020d8f1a3038d4 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 13 Feb 2026 17:44:59 -0300 Subject: [PATCH 109/160] feat: new components for GitLab pipelines (#3109) Three new GitLab components: - `gitlab.getPipeline` - fetch information about a pipeline - `gitlab.getLatestPipeline` - fetch the latest pipeline on a ref - `gitlab.getTestReportSummary` - fetch the test report summary for a pipeline --------- Signed-off-by: Lucas Pinheiro Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitLab.mdx | 176 +++++++++++++++ pkg/integrations/gitlab/client.go | 185 ++++++++++++++++ pkg/integrations/gitlab/client_test.go | 208 ++++++++++++++++++ pkg/integrations/gitlab/common.go | 13 ++ .../example_output_get_latest_pipeline.json | 39 ++++ .../gitlab/example_output_get_pipeline.json | 39 ++++ ...xample_output_get_test_report_summary.json | 39 ++++ .../gitlab/get_latest_pipeline.go | 137 ++++++++++++ .../gitlab/get_latest_pipeline_test.go | 58 +++++ pkg/integrations/gitlab/get_pipeline.go | 154 +++++++++++++ pkg/integrations/gitlab/get_pipeline_test.go | 58 +++++ .../gitlab/get_test_report_summary.go | 150 +++++++++++++ .../gitlab/get_test_report_summary_test.go | 54 +++++ pkg/integrations/gitlab/gitlab.go | 3 + pkg/integrations/gitlab/list_resources.go | 1 + .../pages/workflowv2/mappers/gitlab/index.ts | 7 + .../mappers/gitlab/pipeline_actions.ts | 111 ++++++++++ 17 files changed, 1432 insertions(+) create mode 100644 pkg/integrations/gitlab/example_output_get_latest_pipeline.json create mode 100644 pkg/integrations/gitlab/example_output_get_pipeline.json create mode 100644 pkg/integrations/gitlab/example_output_get_test_report_summary.json create mode 100644 pkg/integrations/gitlab/get_latest_pipeline.go create mode 100644 pkg/integrations/gitlab/get_latest_pipeline_test.go create mode 100644 pkg/integrations/gitlab/get_pipeline.go create mode 100644 pkg/integrations/gitlab/get_pipeline_test.go create mode 100644 pkg/integrations/gitlab/get_test_report_summary.go create mode 100644 pkg/integrations/gitlab/get_test_report_summary_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/pipeline_actions.ts diff --git a/docs/components/GitLab.mdx b/docs/components/GitLab.mdx index 4264d7f665..114529bec7 100644 --- a/docs/components/GitLab.mdx +++ b/docs/components/GitLab.mdx @@ -21,6 +21,9 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + + + ## Instructions @@ -633,3 +636,176 @@ The component outputs the created issue object, including: } ``` + + +## Get Latest Pipeline + +The Get Latest Pipeline component retrieves the newest pipeline for a GitLab project. + +### Configuration + +- **Project** (required): The GitLab project to query +- **Ref** (optional): Branch or tag to scope the latest pipeline search + +### Example Output + +```json +{ + "data": { + "before_sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "committed_at": "2026-02-13T19:20:45.000Z", + "coverage": "87.1", + "created_at": "2026-02-13T19:21:00.000Z", + "detailed_status": { + "group": "success", + "has_details": true, + "icon": "status_success", + "label": "passed", + "text": "passed", + "tooltip": "passed" + }, + "duration": 268, + "finished_at": "2026-02-13T19:25:43.000Z", + "id": 457882200, + "iid": 9822, + "project_id": 123456, + "queued_duration": 12.6, + "ref": "main", + "sha": "afce89e8d28741d4f65ec71ad0a4174a801122cd", + "source": "merge_request_event", + "started_at": "2026-02-13T19:21:15.000Z", + "status": "success", + "tag": false, + "updated_at": "2026-02-13T19:25:43.000Z", + "user": { + "avatar_url": "https://www.gravatar.com/avatar/ef56gh78", + "id": 18, + "name": "Alex Garcia", + "username": "agarcia" + }, + "web_url": "https://gitlab.com/group/example-project/-/pipelines/457882200", + "yaml_errors": null + }, + "timestamp": "2026-02-13T19:25:43.000Z", + "type": "gitlab.pipeline" +} +``` + + + +## Get Pipeline + +The Get Pipeline component retrieves details for a specific GitLab pipeline. + +### Configuration + +- **Project** (required): The GitLab project containing the pipeline +- **Pipeline** (required): Select a pipeline from the selected project + +### Output + +Returns pipeline data including status, ref, SHA, and pipeline URL. + +### Example Output + +```json +{ + "data": { + "before_sha": "0000000000000000000000000000000000000000", + "committed_at": "2026-02-13T17:59:22.000Z", + "coverage": null, + "created_at": "2026-02-13T18:00:00.000Z", + "detailed_status": { + "group": "running", + "has_details": true, + "icon": "status_running", + "label": "running", + "text": "running", + "tooltip": "running" + }, + "duration": 0, + "finished_at": null, + "id": 457882113, + "iid": 9821, + "project_id": 123456, + "queued_duration": 8.2, + "ref": "main", + "sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "source": "push", + "started_at": "2026-02-13T18:00:12.000Z", + "status": "running", + "tag": false, + "updated_at": "2026-02-13T18:00:10.000Z", + "user": { + "avatar_url": "https://www.gravatar.com/avatar/abc123", + "id": 22, + "name": "Jamie Rivera", + "username": "jrivera" + }, + "web_url": "https://gitlab.com/group/example-project/-/pipelines/457882113", + "yaml_errors": null + }, + "timestamp": "2026-02-13T18:00:10.000Z", + "type": "gitlab.pipeline" +} +``` + + + +## Get Test Report Summary + +The Get Test Report Summary component fetches the test report summary for a GitLab pipeline. + +### Configuration + +- **Project** (required): The GitLab project containing the pipeline +- **Pipeline** (required): Select a pipeline from the selected project + +### Example Output + +```json +{ + "data": { + "test_suites": [ + { + "build_ids": [ + 8934210 + ], + "error_count": 0, + "failed_count": 1, + "name": "backend-rspec", + "skipped_count": 0, + "success_count": 247, + "suite_error": null, + "total_count": 248, + "total_time": 81.27 + }, + { + "build_ids": [ + 8934211 + ], + "error_count": 0, + "failed_count": 1, + "name": "frontend-jest", + "skipped_count": 1, + "success_count": 162, + "suite_error": null, + "total_count": 164, + "total_time": 71.19 + } + ], + "total": { + "count": 412, + "error": 0, + "failed": 2, + "skipped": 1, + "success": 409, + "suite_error": null, + "time": 152.46 + } + }, + "timestamp": "2026-02-13T19:26:01.000Z", + "type": "gitlab.testReportSummary" +} +``` + diff --git a/pkg/integrations/gitlab/client.go b/pkg/integrations/gitlab/client.go index dc79dcd1b0..0346c846fa 100644 --- a/pkg/integrations/gitlab/client.go +++ b/pkg/integrations/gitlab/client.go @@ -5,6 +5,7 @@ import ( "context" "encoding/json" "fmt" + "io" "net/http" "net/url" @@ -248,3 +249,187 @@ func (c *Client) FetchIntegrationData() (*User, []Project, error) { return user, projects, nil } + +type PipelineVariable struct { + Key string `json:"key"` + Value string `json:"value"` + VariableType string `json:"variable_type,omitempty"` +} + +type CreatePipelineRequest struct { + Ref string `json:"ref"` + Variables []PipelineVariable `json:"variables,omitempty"` + Inputs []PipelineInput `json:"inputs,omitempty"` +} + +type PipelineInput struct { + Name string `json:"name"` + Value string `json:"value"` +} + +type Pipeline struct { + ID int `json:"id"` + IID int `json:"iid"` + ProjectID int `json:"project_id"` + Status string `json:"status"` + Source string `json:"source,omitempty"` + Ref string `json:"ref"` + SHA string `json:"sha"` + BeforeSHA string `json:"before_sha,omitempty"` + Tag bool `json:"tag,omitempty"` + YamlErrors *string `json:"yaml_errors,omitempty"` + WebURL string `json:"web_url"` + URL string `json:"url,omitempty"` + CreatedAt string `json:"created_at"` + UpdatedAt string `json:"updated_at"` + StartedAt string `json:"started_at,omitempty"` + FinishedAt string `json:"finished_at,omitempty"` + CommittedAt string `json:"committed_at,omitempty"` + Duration float64 `json:"duration,omitempty"` + QueuedDuration float64 `json:"queued_duration,omitempty"` + Coverage string `json:"coverage,omitempty"` + User map[string]any `json:"user,omitempty"` + DetailedStatus map[string]any `json:"detailed_status,omitempty"` +} + +type PipelineTestReportSummary struct { + Total map[string]any `json:"total"` + TestSuites []map[string]any `json:"test_suites"` +} + +func (c *Client) CreatePipeline(ctx context.Context, projectID string, req *CreatePipelineRequest) (*Pipeline, error) { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/pipeline", c.baseURL, apiVersion, url.PathEscape(projectID)) + + body, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %v", err) + } + + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, apiURL, bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + httpReq.Header.Set("Content-Type", "application/json") + + resp, err := c.do(httpReq) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusCreated { + return nil, fmt.Errorf("failed to create pipeline: status %d, response: %s", resp.StatusCode, readResponseBody(resp)) + } + + var pipeline Pipeline + if err := json.NewDecoder(resp.Body).Decode(&pipeline); err != nil { + return nil, fmt.Errorf("failed to decode pipeline: %v", err) + } + + if pipeline.WebURL == "" && pipeline.URL != "" { + pipeline.WebURL = pipeline.URL + } + + return &pipeline, nil +} + +func (c *Client) GetPipeline(projectID string, pipelineID int) (*Pipeline, error) { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/pipelines/%d", c.baseURL, apiVersion, url.PathEscape(projectID), pipelineID) + req, err := http.NewRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + resp, err := c.do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to get pipeline: status %d, response: %s", resp.StatusCode, readResponseBody(resp)) + } + + var pipeline Pipeline + if err := json.NewDecoder(resp.Body).Decode(&pipeline); err != nil { + return nil, fmt.Errorf("failed to decode pipeline: %v", err) + } + + if pipeline.WebURL == "" && pipeline.URL != "" { + pipeline.WebURL = pipeline.URL + } + + return &pipeline, nil +} + +func (c *Client) GetLatestPipeline(projectID, ref string) (*Pipeline, error) { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/pipelines/latest", c.baseURL, apiVersion, url.PathEscape(projectID)) + if ref != "" { + apiURL += fmt.Sprintf("?ref=%s", url.QueryEscape(ref)) + } + + req, err := http.NewRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + resp, err := c.do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to get latest pipeline: status %d, response: %s", resp.StatusCode, readResponseBody(resp)) + } + + var pipeline Pipeline + if err := json.NewDecoder(resp.Body).Decode(&pipeline); err != nil { + return nil, fmt.Errorf("failed to decode pipeline: %v", err) + } + + if pipeline.WebURL == "" && pipeline.URL != "" { + pipeline.WebURL = pipeline.URL + } + + return &pipeline, nil +} + +func (c *Client) GetPipelineTestReportSummary(projectID string, pipelineID int) (*PipelineTestReportSummary, error) { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/pipelines/%d/test_report_summary", c.baseURL, apiVersion, url.PathEscape(projectID), pipelineID) + req, err := http.NewRequest(http.MethodGet, apiURL, nil) + if err != nil { + return nil, err + } + + resp, err := c.do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to get pipeline test report summary: status %d, response: %s", resp.StatusCode, readResponseBody(resp)) + } + + var summary PipelineTestReportSummary + if err := json.NewDecoder(resp.Body).Decode(&summary); err != nil { + return nil, fmt.Errorf("failed to decode pipeline test report summary: %v", err) + } + + return &summary, nil +} + +func (c *Client) ListPipelines(projectID string) ([]Pipeline, error) { + return fetchAllResources[Pipeline](c, func(page int) string { + return fmt.Sprintf("%s/api/%s/projects/%s/pipelines?per_page=100&page=%d", c.baseURL, apiVersion, url.PathEscape(projectID), page) + }) +} + +func readResponseBody(resp *http.Response) string { + body, err := io.ReadAll(io.LimitReader(resp.Body, 4096)) + if err != nil { + return "" + } + return string(body) +} diff --git a/pkg/integrations/gitlab/client_test.go b/pkg/integrations/gitlab/client_test.go index 2bef640f3b..d759b689c4 100644 --- a/pkg/integrations/gitlab/client_test.go +++ b/pkg/integrations/gitlab/client_test.go @@ -2,7 +2,9 @@ package gitlab import ( "context" + "io" "net/http" + "strings" "testing" "github.com/stretchr/testify/assert" @@ -376,3 +378,209 @@ func Test__Client__ListMilestones(t *testing.T) { assert.Contains(t, err.Error(), "status 404") }) } + +func Test__Client__CreatePipeline(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusCreated, `{ + "id": 12345, + "iid": 321, + "project_id": 456, + "status": "pending", + "ref": "main", + "sha": "abc123", + "web_url": "https://gitlab.com/group/project/-/pipelines/12345" + }`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + pipeline, err := client.CreatePipeline(context.Background(), "456", &CreatePipelineRequest{ + Ref: "main", + Inputs: []PipelineInput{ + {Name: "target_env", Value: "dev"}, + }, + Variables: []PipelineVariable{ + {Key: "ENV", Value: "dev"}, + }, + }) + require.NoError(t, err) + require.NotNil(t, pipeline) + assert.Equal(t, 12345, pipeline.ID) + assert.Equal(t, "pending", pipeline.Status) + assert.Equal(t, "main", pipeline.Ref) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, http.MethodPost, mockClient.Requests[0].Method) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/pipeline", mockClient.Requests[0].URL.String()) + assert.Equal(t, "token", mockClient.Requests[0].Header.Get("PRIVATE-TOKEN")) + + body, readErr := io.ReadAll(mockClient.Requests[0].Body) + require.NoError(t, readErr) + bodyString := string(body) + assert.True(t, strings.Contains(bodyString, `"ref":"main"`)) + assert.True(t, strings.Contains(bodyString, `"inputs":[{"name":"target_env","value":"dev"}]`)) + assert.True(t, strings.Contains(bodyString, `"variables":[{"key":"ENV","value":"dev"}]`)) + }) +} + +func Test__Client__GetPipeline(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "id": 12345, + "iid": 321, + "project_id": 456, + "status": "running", + "ref": "main", + "sha": "abc123", + "web_url": "https://gitlab.com/group/project/-/pipelines/12345" + }`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + pipeline, err := client.GetPipeline("456", 12345) + require.NoError(t, err) + require.NotNil(t, pipeline) + assert.Equal(t, 12345, pipeline.ID) + assert.Equal(t, "running", pipeline.Status) + assert.Equal(t, "main", pipeline.Ref) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, http.MethodGet, mockClient.Requests[0].Method) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/pipelines/12345", mockClient.Requests[0].URL.String()) + }) +} + +func Test__Client__GetLatestPipeline(t *testing.T) { + t.Run("success with ref", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "id": 12346, + "iid": 322, + "project_id": 456, + "status": "success", + "ref": "main", + "sha": "def456", + "web_url": "https://gitlab.com/group/project/-/pipelines/12346" + }`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + pipeline, err := client.GetLatestPipeline("456", "main") + require.NoError(t, err) + require.NotNil(t, pipeline) + assert.Equal(t, 12346, pipeline.ID) + assert.Equal(t, "success", pipeline.Status) + assert.Equal(t, "main", pipeline.Ref) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, http.MethodGet, mockClient.Requests[0].Method) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/pipelines/latest?ref=main", mockClient.Requests[0].URL.String()) + }) +} + +func Test__Client__GetPipelineTestReportSummary(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "total": { + "time": 12.34, + "count": 40, + "success": 39, + "failed": 1, + "skipped": 0, + "error": 0 + }, + "test_suites": [ + { + "name": "rspec", + "total_time": 12.34, + "total_count": 40, + "success_count": 39, + "failed_count": 1 + } + ] + }`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + summary, err := client.GetPipelineTestReportSummary("456", 12345) + require.NoError(t, err) + require.NotNil(t, summary) + assert.Equal(t, 40.0, summary.Total["count"]) + require.Len(t, summary.TestSuites, 1) + assert.Equal(t, "rspec", summary.TestSuites[0]["name"]) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, http.MethodGet, mockClient.Requests[0].Method) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/pipelines/12345/test_report_summary", mockClient.Requests[0].URL.String()) + }) +} + +func Test__Client__ListPipelines(t *testing.T) { + t.Run("success", func(t *testing.T) { + mockClient := &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `[ + {"id": 1001, "status": "running", "ref": "main"}, + {"id": 1000, "status": "success", "ref": "release/v1.0"} + ]`), + }, + } + + client := &Client{ + baseURL: "https://gitlab.com", + token: "token", + authType: AuthTypePersonalAccessToken, + groupID: "123", + httpClient: mockClient, + } + + pipelines, err := client.ListPipelines("456") + require.NoError(t, err) + require.Len(t, pipelines, 2) + assert.Equal(t, 1001, pipelines[0].ID) + assert.Equal(t, "running", pipelines[0].Status) + assert.Equal(t, "main", pipelines[0].Ref) + + require.Len(t, mockClient.Requests, 1) + assert.Equal(t, http.MethodGet, mockClient.Requests[0].Method) + assert.Equal(t, "https://gitlab.com/api/v4/projects/456/pipelines?per_page=100&page=1", mockClient.Requests[0].URL.String()) + }) +} diff --git a/pkg/integrations/gitlab/common.go b/pkg/integrations/gitlab/common.go index de215e180e..3c99aa5e39 100644 --- a/pkg/integrations/gitlab/common.go +++ b/pkg/integrations/gitlab/common.go @@ -5,6 +5,7 @@ import ( "fmt" "net/http" "slices" + "strings" "github.com/mitchellh/mapstructure" "github.com/superplanehq/superplane/pkg/core" @@ -114,3 +115,15 @@ func ensureProjectInMetadata(ctx core.MetadataContext, app core.IntegrationConte Project: &appMetadata.Projects[repoIndex], }) } + +func normalizePipelineRef(ref string) string { + if strings.HasPrefix(ref, "refs/heads/") { + return strings.TrimPrefix(ref, "refs/heads/") + } + + if strings.HasPrefix(ref, "refs/tags/") { + return strings.TrimPrefix(ref, "refs/tags/") + } + + return ref +} diff --git a/pkg/integrations/gitlab/example_output_get_latest_pipeline.json b/pkg/integrations/gitlab/example_output_get_latest_pipeline.json new file mode 100644 index 0000000000..2c28521b53 --- /dev/null +++ b/pkg/integrations/gitlab/example_output_get_latest_pipeline.json @@ -0,0 +1,39 @@ +{ + "data": { + "id": 457882200, + "iid": 9822, + "project_id": 123456, + "status": "success", + "source": "merge_request_event", + "ref": "main", + "sha": "afce89e8d28741d4f65ec71ad0a4174a801122cd", + "before_sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "tag": false, + "yaml_errors": null, + "web_url": "https://gitlab.com/group/example-project/-/pipelines/457882200", + "created_at": "2026-02-13T19:21:00.000Z", + "updated_at": "2026-02-13T19:25:43.000Z", + "started_at": "2026-02-13T19:21:15.000Z", + "finished_at": "2026-02-13T19:25:43.000Z", + "committed_at": "2026-02-13T19:20:45.000Z", + "duration": 268.0, + "queued_duration": 12.6, + "coverage": "87.1", + "user": { + "id": 18, + "name": "Alex Garcia", + "username": "agarcia", + "avatar_url": "https://www.gravatar.com/avatar/ef56gh78" + }, + "detailed_status": { + "icon": "status_success", + "text": "passed", + "label": "passed", + "group": "success", + "tooltip": "passed", + "has_details": true + } + }, + "timestamp": "2026-02-13T19:25:43.000Z", + "type": "gitlab.pipeline" +} diff --git a/pkg/integrations/gitlab/example_output_get_pipeline.json b/pkg/integrations/gitlab/example_output_get_pipeline.json new file mode 100644 index 0000000000..1fe7ee97be --- /dev/null +++ b/pkg/integrations/gitlab/example_output_get_pipeline.json @@ -0,0 +1,39 @@ +{ + "data": { + "id": 457882113, + "iid": 9821, + "project_id": 123456, + "status": "running", + "source": "push", + "ref": "main", + "sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "before_sha": "0000000000000000000000000000000000000000", + "tag": false, + "yaml_errors": null, + "web_url": "https://gitlab.com/group/example-project/-/pipelines/457882113", + "created_at": "2026-02-13T18:00:00.000Z", + "updated_at": "2026-02-13T18:00:10.000Z", + "started_at": "2026-02-13T18:00:12.000Z", + "finished_at": null, + "committed_at": "2026-02-13T17:59:22.000Z", + "duration": 0, + "queued_duration": 8.2, + "coverage": null, + "user": { + "id": 22, + "name": "Jamie Rivera", + "username": "jrivera", + "avatar_url": "https://www.gravatar.com/avatar/abc123" + }, + "detailed_status": { + "icon": "status_running", + "text": "running", + "label": "running", + "group": "running", + "tooltip": "running", + "has_details": true + } + }, + "timestamp": "2026-02-13T18:00:10.000Z", + "type": "gitlab.pipeline" +} diff --git a/pkg/integrations/gitlab/example_output_get_test_report_summary.json b/pkg/integrations/gitlab/example_output_get_test_report_summary.json new file mode 100644 index 0000000000..1aa3a1f02b --- /dev/null +++ b/pkg/integrations/gitlab/example_output_get_test_report_summary.json @@ -0,0 +1,39 @@ +{ + "data": { + "total": { + "time": 152.46, + "count": 412, + "success": 409, + "failed": 2, + "skipped": 1, + "error": 0, + "suite_error": null + }, + "test_suites": [ + { + "name": "backend-rspec", + "total_time": 81.27, + "total_count": 248, + "success_count": 247, + "failed_count": 1, + "skipped_count": 0, + "error_count": 0, + "build_ids": [8934210], + "suite_error": null + }, + { + "name": "frontend-jest", + "total_time": 71.19, + "total_count": 164, + "success_count": 162, + "failed_count": 1, + "skipped_count": 1, + "error_count": 0, + "build_ids": [8934211], + "suite_error": null + } + ] + }, + "timestamp": "2026-02-13T19:26:01.000Z", + "type": "gitlab.testReportSummary" +} diff --git a/pkg/integrations/gitlab/get_latest_pipeline.go b/pkg/integrations/gitlab/get_latest_pipeline.go new file mode 100644 index 0000000000..fe5c3255e4 --- /dev/null +++ b/pkg/integrations/gitlab/get_latest_pipeline.go @@ -0,0 +1,137 @@ +package gitlab + +import ( + _ "embed" + "encoding/json" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +//go:embed example_output_get_latest_pipeline.json +var exampleOutputGetLatestPipeline []byte + +type GetLatestPipeline struct{} + +type GetLatestPipelineConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Ref string `json:"ref" mapstructure:"ref"` +} + +func (c *GetLatestPipeline) Name() string { + return "gitlab.getLatestPipeline" +} + +func (c *GetLatestPipeline) Label() string { + return "Get Latest Pipeline" +} + +func (c *GetLatestPipeline) Description() string { + return "Get the latest GitLab pipeline for a project" +} + +func (c *GetLatestPipeline) Documentation() string { + return `The Get Latest Pipeline component retrieves the newest pipeline for a GitLab project. + +## Configuration + +- **Project** (required): The GitLab project to query +- **Ref** (optional): Branch or tag to scope the latest pipeline search` +} + +func (c *GetLatestPipeline) Icon() string { + return "gitlab" +} + +func (c *GetLatestPipeline) Color() string { + return "orange" +} + +func (c *GetLatestPipeline) ExampleOutput() map[string]any { + var example map[string]any + if err := json.Unmarshal(exampleOutputGetLatestPipeline, &example); err != nil { + return map[string]any{} + } + return example +} + +func (c *GetLatestPipeline) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetLatestPipeline) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "ref", + Label: "Ref", + Type: configuration.FieldTypeGitRef, + }, + } +} + +func (c *GetLatestPipeline) Setup(ctx core.SetupContext) error { + var config GetLatestPipelineConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + return ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project) +} + +func (c *GetLatestPipeline) Execute(ctx core.ExecutionContext) error { + var config GetLatestPipelineConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + pipeline, err := client.GetLatestPipeline(config.Project, normalizePipelineRef(config.Ref)) + if err != nil { + return fmt.Errorf("failed to get latest pipeline: %w", err) + } + + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "gitlab.pipeline", []any{pipeline}) +} + +func (c *GetLatestPipeline) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetLatestPipeline) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetLatestPipeline) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetLatestPipeline) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetLatestPipeline) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetLatestPipeline) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/gitlab/get_latest_pipeline_test.go b/pkg/integrations/gitlab/get_latest_pipeline_test.go new file mode 100644 index 0000000000..b6f0251fcc --- /dev/null +++ b/pkg/integrations/gitlab/get_latest_pipeline_test.go @@ -0,0 +1,58 @@ +package gitlab + +import ( + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetLatestPipeline__Execute(t *testing.T) { + component := &GetLatestPipeline{} + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "project": "123", + "ref": "refs/heads/main", + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "id": 1002, + "iid": 74, + "project_id": 123, + "status": "success", + "ref": "main" + }`), + }, + }, + ExecutionState: executionState, + }) + + require.NoError(t, err) + assert.Equal(t, core.DefaultOutputChannel.Name, executionState.Channel) + assert.Equal(t, "gitlab.pipeline", executionState.Type) + require.Len(t, executionState.Payloads, 1) + + payload := executionState.Payloads[0].(map[string]any) + dataBytes, err := json.Marshal(payload["data"]) + require.NoError(t, err) + + var pipeline Pipeline + require.NoError(t, json.Unmarshal(dataBytes, &pipeline)) + assert.Equal(t, 1002, pipeline.ID) + assert.Equal(t, "success", pipeline.Status) +} diff --git a/pkg/integrations/gitlab/get_pipeline.go b/pkg/integrations/gitlab/get_pipeline.go new file mode 100644 index 0000000000..787be2efe1 --- /dev/null +++ b/pkg/integrations/gitlab/get_pipeline.go @@ -0,0 +1,154 @@ +package gitlab + +import ( + _ "embed" + "encoding/json" + "fmt" + "net/http" + "strconv" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +//go:embed example_output_get_pipeline.json +var exampleOutputGetPipeline []byte + +type GetPipeline struct{} + +type GetPipelineConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Pipeline string `json:"pipeline" mapstructure:"pipeline"` +} + +func (c *GetPipeline) Name() string { + return "gitlab.getPipeline" +} + +func (c *GetPipeline) Label() string { + return "Get Pipeline" +} + +func (c *GetPipeline) Description() string { + return "Get a GitLab pipeline" +} + +func (c *GetPipeline) Documentation() string { + return `The Get Pipeline component retrieves details for a specific GitLab pipeline. + +## Configuration + +- **Project** (required): The GitLab project containing the pipeline +- **Pipeline** (required): Select a pipeline from the selected project + +## Output + +Returns pipeline data including status, ref, SHA, and pipeline URL.` +} + +func (c *GetPipeline) Icon() string { + return "gitlab" +} + +func (c *GetPipeline) Color() string { + return "orange" +} + +func (c *GetPipeline) ExampleOutput() map[string]any { + var example map[string]any + if err := json.Unmarshal(exampleOutputGetPipeline, &example); err != nil { + return map[string]any{} + } + return example +} + +func (c *GetPipeline) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetPipeline) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "pipeline", + Label: "Pipeline", + Type: configuration.FieldTypeString, + Required: true, + Placeholder: "e.g. 1234567890", + Description: "The ID of the pipeline to get", + }, + } +} + +func (c *GetPipeline) Setup(ctx core.SetupContext) error { + var config GetPipelineConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if config.Pipeline == "" { + return fmt.Errorf("pipeline is required") + } + + return ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project) +} + +func (c *GetPipeline) Execute(ctx core.ExecutionContext) error { + var config GetPipelineConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + p, err := strconv.ParseFloat(config.Pipeline, 64) + if err != nil { + return fmt.Errorf("pipeline ID must be a number: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + pipeline, err := client.GetPipeline(config.Project, int(p)) + if err != nil { + return fmt.Errorf("failed to get pipeline: %w", err) + } + + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "gitlab.pipeline", []any{pipeline}) +} + +func (c *GetPipeline) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetPipeline) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetPipeline) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetPipeline) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetPipeline) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetPipeline) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/gitlab/get_pipeline_test.go b/pkg/integrations/gitlab/get_pipeline_test.go new file mode 100644 index 0000000000..34640919dc --- /dev/null +++ b/pkg/integrations/gitlab/get_pipeline_test.go @@ -0,0 +1,58 @@ +package gitlab + +import ( + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetPipeline__Execute(t *testing.T) { + component := &GetPipeline{} + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "project": "123", + "pipeline": "1001", + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "id": 1001, + "iid": 73, + "project_id": 123, + "status": "running", + "ref": "main" + }`), + }, + }, + ExecutionState: executionState, + }) + + require.NoError(t, err) + assert.Equal(t, core.DefaultOutputChannel.Name, executionState.Channel) + assert.Equal(t, "gitlab.pipeline", executionState.Type) + require.Len(t, executionState.Payloads, 1) + + payload := executionState.Payloads[0].(map[string]any) + dataBytes, err := json.Marshal(payload["data"]) + require.NoError(t, err) + + var pipeline Pipeline + require.NoError(t, json.Unmarshal(dataBytes, &pipeline)) + assert.Equal(t, 1001, pipeline.ID) + assert.Equal(t, "running", pipeline.Status) +} diff --git a/pkg/integrations/gitlab/get_test_report_summary.go b/pkg/integrations/gitlab/get_test_report_summary.go new file mode 100644 index 0000000000..b1b00b36c7 --- /dev/null +++ b/pkg/integrations/gitlab/get_test_report_summary.go @@ -0,0 +1,150 @@ +package gitlab + +import ( + _ "embed" + "encoding/json" + "fmt" + "net/http" + "strconv" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +//go:embed example_output_get_test_report_summary.json +var exampleOutputGetTestReportSummary []byte + +type GetTestReportSummary struct{} + +type GetTestReportSummaryConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Pipeline string `json:"pipeline" mapstructure:"pipeline"` +} + +func (c *GetTestReportSummary) Name() string { + return "gitlab.getTestReportSummary" +} + +func (c *GetTestReportSummary) Label() string { + return "Get Test Report Summary" +} + +func (c *GetTestReportSummary) Description() string { + return "Get GitLab pipeline test report summary" +} + +func (c *GetTestReportSummary) Documentation() string { + return `The Get Test Report Summary component fetches the test report summary for a GitLab pipeline. + +## Configuration + +- **Project** (required): The GitLab project containing the pipeline +- **Pipeline** (required): Select a pipeline from the selected project` +} + +func (c *GetTestReportSummary) Icon() string { + return "gitlab" +} + +func (c *GetTestReportSummary) Color() string { + return "orange" +} + +func (c *GetTestReportSummary) ExampleOutput() map[string]any { + var example map[string]any + if err := json.Unmarshal(exampleOutputGetTestReportSummary, &example); err != nil { + return map[string]any{} + } + return example +} + +func (c *GetTestReportSummary) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *GetTestReportSummary) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "pipeline", + Label: "Pipeline", + Type: configuration.FieldTypeString, + Required: true, + Placeholder: "e.g. 1234567890", + Description: "The ID of the pipeline", + }, + } +} + +func (c *GetTestReportSummary) Setup(ctx core.SetupContext) error { + var config GetTestReportSummaryConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if config.Pipeline == "" { + return fmt.Errorf("pipeline is required") + } + + return ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project) +} + +func (c *GetTestReportSummary) Execute(ctx core.ExecutionContext) error { + var config GetTestReportSummaryConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + p, err := strconv.ParseFloat(config.Pipeline, 64) + if err != nil { + return fmt.Errorf("pipeline ID must be a number: %v", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + summary, err := client.GetPipelineTestReportSummary(config.Project, int(p)) + if err != nil { + return fmt.Errorf("failed to get test report summary: %w", err) + } + + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, "gitlab.testReportSummary", []any{summary}) +} + +func (c *GetTestReportSummary) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *GetTestReportSummary) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return http.StatusOK, nil +} + +func (c *GetTestReportSummary) Actions() []core.Action { + return []core.Action{} +} + +func (c *GetTestReportSummary) HandleAction(ctx core.ActionContext) error { + return nil +} + +func (c *GetTestReportSummary) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *GetTestReportSummary) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/gitlab/get_test_report_summary_test.go b/pkg/integrations/gitlab/get_test_report_summary_test.go new file mode 100644 index 0000000000..843741b44a --- /dev/null +++ b/pkg/integrations/gitlab/get_test_report_summary_test.go @@ -0,0 +1,54 @@ +package gitlab + +import ( + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__GetTestReportSummary__Execute(t *testing.T) { + component := &GetTestReportSummary{} + executionState := &contexts.ExecutionStateContext{KVs: map[string]string{}} + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "project": "123", + "pipeline": "1002", + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "total": {"count": 40, "success": 39, "failed": 1}, + "test_suites": [{"name": "rspec", "total_count": 40}] + }`), + }, + }, + ExecutionState: executionState, + }) + + require.NoError(t, err) + assert.Equal(t, core.DefaultOutputChannel.Name, executionState.Channel) + assert.Equal(t, "gitlab.testReportSummary", executionState.Type) + require.Len(t, executionState.Payloads, 1) + + payload := executionState.Payloads[0].(map[string]any) + dataBytes, err := json.Marshal(payload["data"]) + require.NoError(t, err) + + var summary PipelineTestReportSummary + require.NoError(t, json.Unmarshal(dataBytes, &summary)) + assert.Equal(t, 40.0, summary.Total["count"]) +} diff --git a/pkg/integrations/gitlab/gitlab.go b/pkg/integrations/gitlab/gitlab.go index f54d2e94c1..59d0ab1d9a 100644 --- a/pkg/integrations/gitlab/gitlab.go +++ b/pkg/integrations/gitlab/gitlab.go @@ -169,6 +169,9 @@ func (g *GitLab) Configuration() []configuration.Field { func (g *GitLab) Components() []core.Component { return []core.Component{ &CreateIssue{}, + &GetPipeline{}, + &GetLatestPipeline{}, + &GetTestReportSummary{}, } } diff --git a/pkg/integrations/gitlab/list_resources.go b/pkg/integrations/gitlab/list_resources.go index e124cf7b70..d046f628dc 100644 --- a/pkg/integrations/gitlab/list_resources.go +++ b/pkg/integrations/gitlab/list_resources.go @@ -10,6 +10,7 @@ import ( const ( ResourceTypeMember = "member" ResourceTypeMilestone = "milestone" + ResourceTypePipeline = "pipeline" ResourceTypeProject = "project" ) diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts index 59c3c9c17d..a70e337a51 100644 --- a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts +++ b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts @@ -7,13 +7,20 @@ import { onMilestoneTriggerRenderer } from "./on_milestone"; import { onReleaseTriggerRenderer } from "./on_release"; import { onTagTriggerRenderer } from "./on_tag"; import { onVulnerabilityTriggerRenderer } from "./on_vulnerability"; +import { pipelineLookupMapper, testReportSummaryMapper } from "./pipeline_actions"; export const eventStateRegistry: Record = { createIssue: buildActionStateRegistry("created"), + getPipeline: buildActionStateRegistry("retrieved"), + getLatestPipeline: buildActionStateRegistry("retrieved"), + getTestReportSummary: buildActionStateRegistry("retrieved"), }; export const componentMappers: Record = { createIssue: createIssueMapper, + getPipeline: pipelineLookupMapper, + getLatestPipeline: pipelineLookupMapper, + getTestReportSummary: testReportSummaryMapper, }; export const triggerRenderers: Record = { diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/pipeline_actions.ts b/web_src/src/pages/workflowv2/mappers/gitlab/pipeline_actions.ts new file mode 100644 index 0000000000..b19f6e581d --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/pipeline_actions.ts @@ -0,0 +1,111 @@ +import { ComponentBaseProps } from "@/ui/componentBase"; +import { + ComponentBaseContext, + ComponentBaseMapper, + ExecutionDetailsContext, + OutputPayload, + SubtitleContext, +} from "../types"; +import { baseProps } from "./base"; +import { buildGitlabExecutionSubtitle } from "./utils"; + +interface PipelineOutput { + id?: number; + iid?: number; + status?: string; + ref?: string; + sha?: string; + web_url?: string; + url?: string; +} + +interface TestReportSummaryOutput { + total?: { + count?: number; + success?: number; + failed?: number; + skipped?: number; + error?: number; + time?: number; + }; + test_suites?: Array<{ + name?: string; + total_count?: number; + success_count?: number; + failed_count?: number; + skipped_count?: number; + error_count?: number; + }>; +} + +function getOutputData(context: { execution: { outputs?: unknown } }): unknown { + const outputs = context.execution.outputs as { default?: OutputPayload[] } | undefined; + return outputs?.default?.[0]?.data; +} + +export const pipelineLookupMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + return baseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + }, + + subtitle(context: SubtitleContext): string { + const pipeline = getOutputData(context) as PipelineOutput | undefined; + if (pipeline?.status) { + return buildGitlabExecutionSubtitle(context.execution, `Pipeline ${pipeline.status}`); + } + return buildGitlabExecutionSubtitle(context.execution, "Pipeline Retrieved"); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const pipeline = getOutputData(context) as PipelineOutput | undefined; + const details: Record = {}; + + if (!pipeline) { + return details; + } + + if (pipeline.id) details["Pipeline ID"] = pipeline.id.toString(); + if (pipeline.iid) details["Pipeline IID"] = pipeline.iid.toString(); + if (pipeline.status) details["Status"] = pipeline.status; + if (pipeline.ref) details["Ref"] = pipeline.ref; + if (pipeline.sha) details["SHA"] = pipeline.sha; + if (pipeline.web_url || pipeline.url) details["Pipeline URL"] = pipeline.web_url || pipeline.url || ""; + + return details; + }, +}; + +export const testReportSummaryMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + return baseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + }, + + subtitle(context: SubtitleContext): string { + const summary = getOutputData(context) as TestReportSummaryOutput | undefined; + const failed = summary?.total?.failed; + if (failed !== undefined) { + return buildGitlabExecutionSubtitle(context.execution, `${failed} failed tests`); + } + return buildGitlabExecutionSubtitle(context.execution, "Test Report Retrieved"); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const summary = getOutputData(context) as TestReportSummaryOutput | undefined; + const details: Record = {}; + const total = summary?.total; + + if (!total) { + return details; + } + + if (total.count !== undefined) details["Total Tests"] = total.count.toString(); + if (total.success !== undefined) details["Passed Tests"] = total.success.toString(); + if (total.failed !== undefined) details["Failed Tests"] = total.failed.toString(); + if (total.skipped !== undefined) details["Skipped Tests"] = total.skipped.toString(); + if (total.error !== undefined) details["Errored Tests"] = total.error.toString(); + if (total.time !== undefined) details["Total Time (s)"] = total.time.toString(); + if (summary?.test_suites) details["Test Suites"] = summary.test_suites.length.toString(); + + return details; + }, +}; From 1afc48448fd77123eedf0f950fc3e441cc77ca11 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Fri, 13 Feb 2026 19:51:03 -0300 Subject: [PATCH 110/160] feat: gitlab.runPipeline and gitlab.onPipeline (#3115) New components for GItLab: - New gitlab.onPipeline trigger to listen to GitLab pipeline events - New gitlab.runPipeline trigger to run GitLab pipelines --------- Signed-off-by: Lucas Pinheiro Co-authored-by: Cursor Agent Signed-off-by: Muhammad Fuzail Zubari --- docs/components/GitLab.mdx | 120 ++++ pkg/integrations/gitlab/client.go | 26 +- pkg/integrations/gitlab/client_test.go | 10 +- pkg/integrations/gitlab/common.go | 10 + pkg/integrations/gitlab/example.go | 10 + .../gitlab/example_data_on_pipeline.json | 35 ++ .../gitlab/example_output_run_pipeline.json | 42 ++ pkg/integrations/gitlab/gitlab.go | 2 + pkg/integrations/gitlab/list_resources.go | 1 - .../gitlab/list_resources_test.go | 1 + pkg/integrations/gitlab/on_pipeline.go | 185 ++++++ pkg/integrations/gitlab/on_pipeline_test.go | 69 +++ pkg/integrations/gitlab/run_pipeline.go | 542 ++++++++++++++++++ pkg/integrations/gitlab/run_pipeline_test.go | 196 +++++++ .../pages/workflowv2/mappers/gitlab/index.ts | 5 + .../workflowv2/mappers/gitlab/on_pipeline.ts | 117 ++++ .../workflowv2/mappers/gitlab/run_pipeline.ts | 188 ++++++ 17 files changed, 1548 insertions(+), 11 deletions(-) create mode 100644 pkg/integrations/gitlab/example_data_on_pipeline.json create mode 100644 pkg/integrations/gitlab/example_output_run_pipeline.json create mode 100644 pkg/integrations/gitlab/on_pipeline.go create mode 100644 pkg/integrations/gitlab/on_pipeline_test.go create mode 100644 pkg/integrations/gitlab/run_pipeline.go create mode 100644 pkg/integrations/gitlab/run_pipeline_test.go create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/on_pipeline.ts create mode 100644 web_src/src/pages/workflowv2/mappers/gitlab/run_pipeline.ts diff --git a/docs/components/GitLab.mdx b/docs/components/GitLab.mdx index 114529bec7..edc2ab293b 100644 --- a/docs/components/GitLab.mdx +++ b/docs/components/GitLab.mdx @@ -10,6 +10,7 @@ Manage and react to changes in your GitLab repositories + @@ -24,6 +25,7 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + ## Instructions @@ -291,6 +293,65 @@ The On Milestone trigger starts a workflow execution when milestone events occur } ``` + + +## On Pipeline + +The On Pipeline trigger starts a workflow execution when pipeline events occur in a GitLab project. + +### Configuration + +- **Project** (required): GitLab project to monitor +- **Statuses** (required): Select which pipeline statuses to listen for. Default: success, failed, canceled. + +### Outputs + +- **Default channel**: Emits pipeline webhook payload data including status, ref, SHA, and project information + +### Webhook Setup + +This trigger automatically sets up a GitLab webhook when configured. The webhook is managed by SuperPlane and will be cleaned up when the trigger is removed. + +### Example Data + +```json +{ + "data": { + "merge_request": { + "iid": 12, + "title": "Improve CI pipeline" + }, + "object_attributes": { + "created_at": "2026-02-10 12:00:00 UTC", + "duration": 190, + "finished_at": "2026-02-10 12:03:10 UTC", + "id": 12345, + "iid": 321, + "ref": "main", + "sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "source": "push", + "status": "success", + "updated_at": "2026-02-10 12:03:10 UTC", + "url": "https://gitlab.com/group/example-project/-/pipelines/12345" + }, + "object_kind": "pipeline", + "project": { + "id": 987, + "name": "example-project", + "path_with_namespace": "group/example-project", + "web_url": "https://gitlab.com/group/example-project" + }, + "user": { + "id": 22, + "name": "Jamie Rivera", + "username": "jrivera" + } + }, + "timestamp": "2026-02-13T18:00:00.000000000Z", + "type": "gitlab.pipeline" +} +``` + ## On Release @@ -809,3 +870,62 @@ The Get Test Report Summary component fetches the test report summary for a GitL } ``` + + +## Run Pipeline + +The Run Pipeline component triggers a GitLab pipeline and waits for it to complete. + +### Use Cases + +- **CI/CD orchestration**: Trigger GitLab pipelines from SuperPlane workflows +- **Deployment automation**: Run deployment pipelines with inputs +- **Pipeline chaining**: Coordinate follow-up actions after pipeline completion + +### Example Output + +```json +{ + "data": { + "pipeline": { + "before_sha": "0000000000000000000000000000000000000000", + "committed_at": "2026-02-13T17:59:22.000Z", + "coverage": "86.5", + "created_at": "2026-02-13T18:00:00.000Z", + "detailed_status": { + "group": "success", + "has_details": true, + "icon": "status_success", + "label": "passed", + "text": "passed", + "tooltip": "passed" + }, + "duration": 240, + "finished_at": "2026-02-13T18:04:12.000Z", + "id": 457882113, + "iid": 9821, + "project_id": 123456, + "queued_duration": 8.2, + "ref": "main", + "sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "source": "web", + "started_at": "2026-02-13T18:00:12.000Z", + "status": "success", + "tag": false, + "updated_at": "2026-02-13T18:04:12.000Z", + "url": "https://gitlab.com/group/example-project/-/pipelines/457882113", + "user": { + "avatar_url": "https://www.gravatar.com/avatar/abc123", + "id": 22, + "name": "Jamie Rivera", + "username": "jrivera" + }, + "web_url": "https://gitlab.com/group/example-project/-/pipelines/457882113", + "yaml_errors": null + } + }, + "timestamp": "2026-02-13T18:04:12.000Z", + "type": "gitlab.pipeline.finished" +} +``` + diff --git a/pkg/integrations/gitlab/client.go b/pkg/integrations/gitlab/client.go index 0346c846fa..ec37b7515e 100644 --- a/pkg/integrations/gitlab/client.go +++ b/pkg/integrations/gitlab/client.go @@ -257,9 +257,8 @@ type PipelineVariable struct { } type CreatePipelineRequest struct { - Ref string `json:"ref"` - Variables []PipelineVariable `json:"variables,omitempty"` - Inputs []PipelineInput `json:"inputs,omitempty"` + Ref string `json:"ref"` + Inputs map[string]string `json:"inputs,omitempty"` } type PipelineInput struct { @@ -362,6 +361,27 @@ func (c *Client) GetPipeline(projectID string, pipelineID int) (*Pipeline, error return &pipeline, nil } +func (c *Client) CancelPipeline(ctx context.Context, projectID string, pipelineID int) error { + apiURL := fmt.Sprintf("%s/api/%s/projects/%s/pipelines/%d/cancel", c.baseURL, apiVersion, url.PathEscape(projectID), pipelineID) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, apiURL, nil) + if err != nil { + return err + } + + resp, err := c.do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + switch resp.StatusCode { + case http.StatusOK, http.StatusCreated, http.StatusAccepted, http.StatusNoContent: + return nil + default: + return fmt.Errorf("failed to cancel pipeline: status %d, response: %s", resp.StatusCode, readResponseBody(resp)) + } +} + func (c *Client) GetLatestPipeline(projectID, ref string) (*Pipeline, error) { apiURL := fmt.Sprintf("%s/api/%s/projects/%s/pipelines/latest", c.baseURL, apiVersion, url.PathEscape(projectID)) if ref != "" { diff --git a/pkg/integrations/gitlab/client_test.go b/pkg/integrations/gitlab/client_test.go index d759b689c4..663c708bf1 100644 --- a/pkg/integrations/gitlab/client_test.go +++ b/pkg/integrations/gitlab/client_test.go @@ -405,11 +405,8 @@ func Test__Client__CreatePipeline(t *testing.T) { pipeline, err := client.CreatePipeline(context.Background(), "456", &CreatePipelineRequest{ Ref: "main", - Inputs: []PipelineInput{ - {Name: "target_env", Value: "dev"}, - }, - Variables: []PipelineVariable{ - {Key: "ENV", Value: "dev"}, + Inputs: map[string]string{ + "target_env": "dev", }, }) require.NoError(t, err) @@ -427,8 +424,7 @@ func Test__Client__CreatePipeline(t *testing.T) { require.NoError(t, readErr) bodyString := string(body) assert.True(t, strings.Contains(bodyString, `"ref":"main"`)) - assert.True(t, strings.Contains(bodyString, `"inputs":[{"name":"target_env","value":"dev"}]`)) - assert.True(t, strings.Contains(bodyString, `"variables":[{"key":"ENV","value":"dev"}]`)) + assert.True(t, strings.Contains(bodyString, `"inputs":{"target_env":"dev"}`)) }) } diff --git a/pkg/integrations/gitlab/common.go b/pkg/integrations/gitlab/common.go index 3c99aa5e39..16344539bd 100644 --- a/pkg/integrations/gitlab/common.go +++ b/pkg/integrations/gitlab/common.go @@ -11,6 +11,16 @@ import ( "github.com/superplanehq/superplane/pkg/core" ) +const ( + PipelineStatusSuccess = "success" + PipelineStatusFailed = "failed" + PipelineStatusCanceled = "canceled" + PipelineStatusCancelled = "cancelled" + PipelineStatusSkipped = "skipped" + PipelineStatusManual = "manual" + PipelineStatusBlocked = "blocked" +) + type WebhookConfiguration struct { EventType string `json:"eventType" mapstructure:"eventType"` ProjectID string `json:"projectId" mapstructure:"projectId"` diff --git a/pkg/integrations/gitlab/example.go b/pkg/integrations/gitlab/example.go index 397360118d..e9514d6534 100644 --- a/pkg/integrations/gitlab/example.go +++ b/pkg/integrations/gitlab/example.go @@ -16,6 +16,9 @@ var exampleDataOnMergeRequestBytes []byte //go:embed example_data_on_milestone.json var exampleDataOnMilestoneBytes []byte +//go:embed example_data_on_pipeline.json +var exampleDataOnPipelineBytes []byte + //go:embed example_data_on_release.json var exampleDataOnReleaseBytes []byte @@ -34,6 +37,9 @@ var exampleDataOnMergeRequest map[string]any var exampleDataOnMilestoneOnce sync.Once var exampleDataOnMilestone map[string]any +var exampleDataOnPipelineOnce sync.Once +var exampleDataOnPipeline map[string]any + var exampleDataOnReleaseOnce sync.Once var exampleDataOnRelease map[string]any @@ -55,6 +61,10 @@ func (m *OnMilestone) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnMilestoneOnce, exampleDataOnMilestoneBytes, &exampleDataOnMilestone) } +func (p *OnPipeline) ExampleData() map[string]any { + return utils.UnmarshalEmbeddedJSON(&exampleDataOnPipelineOnce, exampleDataOnPipelineBytes, &exampleDataOnPipeline) +} + func (r *OnRelease) ExampleData() map[string]any { return utils.UnmarshalEmbeddedJSON(&exampleDataOnReleaseOnce, exampleDataOnReleaseBytes, &exampleDataOnRelease) } diff --git a/pkg/integrations/gitlab/example_data_on_pipeline.json b/pkg/integrations/gitlab/example_data_on_pipeline.json new file mode 100644 index 0000000000..77e4fd8327 --- /dev/null +++ b/pkg/integrations/gitlab/example_data_on_pipeline.json @@ -0,0 +1,35 @@ +{ + "data": { + "object_kind": "pipeline", + "object_attributes": { + "id": 12345, + "iid": 321, + "ref": "main", + "sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "status": "success", + "source": "push", + "created_at": "2026-02-10 12:00:00 UTC", + "updated_at": "2026-02-10 12:03:10 UTC", + "finished_at": "2026-02-10 12:03:10 UTC", + "duration": 190, + "url": "https://gitlab.com/group/example-project/-/pipelines/12345" + }, + "project": { + "id": 987, + "name": "example-project", + "path_with_namespace": "group/example-project", + "web_url": "https://gitlab.com/group/example-project" + }, + "user": { + "id": 22, + "name": "Jamie Rivera", + "username": "jrivera" + }, + "merge_request": { + "iid": 12, + "title": "Improve CI pipeline" + } + }, + "timestamp": "2026-02-13T18:00:00.000000000Z", + "type": "gitlab.pipeline" +} diff --git a/pkg/integrations/gitlab/example_output_run_pipeline.json b/pkg/integrations/gitlab/example_output_run_pipeline.json new file mode 100644 index 0000000000..6825c2dbd6 --- /dev/null +++ b/pkg/integrations/gitlab/example_output_run_pipeline.json @@ -0,0 +1,42 @@ +{ + "data": { + "pipeline": { + "id": 457882113, + "iid": 9821, + "project_id": 123456, + "status": "success", + "source": "web", + "ref": "main", + "sha": "f4f6c5a0d2e5ad34be4c17c3f166f4d2ff8b0a55", + "before_sha": "0000000000000000000000000000000000000000", + "tag": false, + "yaml_errors": null, + "web_url": "https://gitlab.com/group/example-project/-/pipelines/457882113", + "url": "https://gitlab.com/group/example-project/-/pipelines/457882113", + "created_at": "2026-02-13T18:00:00.000Z", + "updated_at": "2026-02-13T18:04:12.000Z", + "started_at": "2026-02-13T18:00:12.000Z", + "finished_at": "2026-02-13T18:04:12.000Z", + "committed_at": "2026-02-13T17:59:22.000Z", + "duration": 240.0, + "queued_duration": 8.2, + "coverage": "86.5", + "user": { + "id": 22, + "name": "Jamie Rivera", + "username": "jrivera", + "avatar_url": "https://www.gravatar.com/avatar/abc123" + }, + "detailed_status": { + "icon": "status_success", + "text": "passed", + "label": "passed", + "group": "success", + "tooltip": "passed", + "has_details": true + } + } + }, + "timestamp": "2026-02-13T18:04:12.000Z", + "type": "gitlab.pipeline.finished" +} diff --git a/pkg/integrations/gitlab/gitlab.go b/pkg/integrations/gitlab/gitlab.go index 59d0ab1d9a..ec29d2be04 100644 --- a/pkg/integrations/gitlab/gitlab.go +++ b/pkg/integrations/gitlab/gitlab.go @@ -169,6 +169,7 @@ func (g *GitLab) Configuration() []configuration.Field { func (g *GitLab) Components() []core.Component { return []core.Component{ &CreateIssue{}, + &RunPipeline{}, &GetPipeline{}, &GetLatestPipeline{}, &GetTestReportSummary{}, @@ -180,6 +181,7 @@ func (g *GitLab) Triggers() []core.Trigger { &OnIssue{}, &OnMergeRequest{}, &OnMilestone{}, + &OnPipeline{}, &OnRelease{}, &OnTag{}, &OnVulnerability{}, diff --git a/pkg/integrations/gitlab/list_resources.go b/pkg/integrations/gitlab/list_resources.go index d046f628dc..e124cf7b70 100644 --- a/pkg/integrations/gitlab/list_resources.go +++ b/pkg/integrations/gitlab/list_resources.go @@ -10,7 +10,6 @@ import ( const ( ResourceTypeMember = "member" ResourceTypeMilestone = "milestone" - ResourceTypePipeline = "pipeline" ResourceTypeProject = "project" ) diff --git a/pkg/integrations/gitlab/list_resources_test.go b/pkg/integrations/gitlab/list_resources_test.go index 5f138f0ba6..b423b504ca 100644 --- a/pkg/integrations/gitlab/list_resources_test.go +++ b/pkg/integrations/gitlab/list_resources_test.go @@ -133,4 +133,5 @@ func Test__GitLab__ListResources(t *testing.T) { require.NoError(t, err) assert.Empty(t, resources) }) + } diff --git a/pkg/integrations/gitlab/on_pipeline.go b/pkg/integrations/gitlab/on_pipeline.go new file mode 100644 index 0000000000..289145128c --- /dev/null +++ b/pkg/integrations/gitlab/on_pipeline.go @@ -0,0 +1,185 @@ +package gitlab + +import ( + "encoding/json" + "fmt" + "net/http" + "slices" + + "github.com/mitchellh/mapstructure" + log "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +type OnPipeline struct{} + +type OnPipelineConfiguration struct { + Project string `json:"project" mapstructure:"project"` + Statuses []string `json:"statuses" mapstructure:"statuses"` +} + +func (p *OnPipeline) Name() string { + return "gitlab.onPipeline" +} + +func (p *OnPipeline) Label() string { + return "On Pipeline" +} + +func (p *OnPipeline) Description() string { + return "Listen to pipeline events from GitLab" +} + +func (p *OnPipeline) Documentation() string { + return `The On Pipeline trigger starts a workflow execution when pipeline events occur in a GitLab project. + +## Configuration + +- **Project** (required): GitLab project to monitor +- **Statuses** (required): Select which pipeline statuses to listen for. Default: success, failed, canceled. + +## Outputs + +- **Default channel**: Emits pipeline webhook payload data including status, ref, SHA, and project information + +## Webhook Setup + +This trigger automatically sets up a GitLab webhook when configured. The webhook is managed by SuperPlane and will be cleaned up when the trigger is removed.` +} + +func (p *OnPipeline) Icon() string { + return "gitlab" +} + +func (p *OnPipeline) Color() string { + return "orange" +} + +func (p *OnPipeline) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "statuses", + Label: "Statuses", + Type: configuration.FieldTypeMultiSelect, + Required: true, + Default: []string{PipelineStatusSuccess}, + TypeOptions: &configuration.TypeOptions{ + MultiSelect: &configuration.MultiSelectTypeOptions{ + Options: []configuration.FieldOption{ + {Label: "Success", Value: PipelineStatusSuccess}, + {Label: "Failed", Value: PipelineStatusFailed}, + {Label: "Canceled", Value: PipelineStatusCanceled}, + {Label: "Skipped", Value: PipelineStatusSkipped}, + {Label: "Manual", Value: PipelineStatusManual}, + }, + }, + }, + }, + } +} + +func (p *OnPipeline) Setup(ctx core.TriggerContext) error { + var config OnPipelineConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, config.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "pipeline", + ProjectID: config.Project, + }) +} + +func (p *OnPipeline) Actions() []core.Action { + return []core.Action{} +} + +func (p *OnPipeline) HandleAction(ctx core.TriggerActionContext) (map[string]any, error) { + return nil, nil +} + +func (p *OnPipeline) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + var config OnPipelineConfiguration + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Pipeline Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + data := map[string]any{} + if err := json.Unmarshal(ctx.Body, &data); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + status, ok := p.extractStatus(data) + if !ok { + return http.StatusBadRequest, fmt.Errorf("status missing from pipeline payload") + } + + if len(config.Statuses) > 0 && !p.whitelistedStatus(ctx.Logger, status, config.Statuses) { + return http.StatusOK, nil + } + + if err := ctx.Events.Emit("gitlab.pipeline", data); err != nil { + return http.StatusInternalServerError, fmt.Errorf("error emitting event: %v", err) + } + + return http.StatusOK, nil +} + +func (p *OnPipeline) Cleanup(ctx core.TriggerContext) error { + return nil +} + +func (p *OnPipeline) extractStatus(data map[string]any) (string, bool) { + attrs, ok := data["object_attributes"].(map[string]any) + if !ok { + return "", false + } + + status, ok := attrs["status"].(string) + if !ok { + return "", false + } + + return status, true +} + +func (p *OnPipeline) whitelistedStatus(logger *log.Entry, status string, allowedStatuses []string) bool { + if !slices.Contains(allowedStatuses, status) { + if logger != nil { + logger.Infof("Pipeline status %s is not in the allowed list: %v", status, allowedStatuses) + } + return false + } + + return true +} diff --git a/pkg/integrations/gitlab/on_pipeline_test.go b/pkg/integrations/gitlab/on_pipeline_test.go new file mode 100644 index 0000000000..854f91e9d8 --- /dev/null +++ b/pkg/integrations/gitlab/on_pipeline_test.go @@ -0,0 +1,69 @@ +package gitlab + +import ( + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__OnPipeline__HandleWebhook__StatusMatch(t *testing.T) { + trigger := &OnPipeline{} + body := []byte(`{"object_attributes":{"id":123,"status":"success"}}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Pipeline Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "statuses": []string{"success"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Equal(t, 1, events.Count()) + assert.Equal(t, "gitlab.pipeline", events.Payloads[0].Type) +} + +func Test__OnPipeline__HandleWebhook__StatusMismatch(t *testing.T) { + trigger := &OnPipeline{} + body := []byte(`{"object_attributes":{"id":123,"status":"running"}}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Pipeline Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "statuses": []string{"success"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusOK, code) + assert.NoError(t, err) + assert.Zero(t, events.Count()) +} + +func Test__OnPipeline__HandleWebhook__MissingStatus(t *testing.T) { + trigger := &OnPipeline{} + body := []byte(`{"object_attributes":{"id":123}}`) + events := &contexts.EventContext{} + + code, err := trigger.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Pipeline Hook", "token"), + Body: body, + Configuration: map[string]any{"project": "123", "statuses": []string{"success"}}, + Webhook: &contexts.WebhookContext{Secret: "token"}, + Events: events, + Logger: log.NewEntry(log.New()), + }) + + assert.Equal(t, http.StatusBadRequest, code) + assert.Error(t, err) + assert.Zero(t, events.Count()) +} diff --git a/pkg/integrations/gitlab/run_pipeline.go b/pkg/integrations/gitlab/run_pipeline.go new file mode 100644 index 0000000000..3e40abc1c6 --- /dev/null +++ b/pkg/integrations/gitlab/run_pipeline.go @@ -0,0 +1,542 @@ +package gitlab + +import ( + "context" + _ "embed" + "encoding/json" + "fmt" + "net/http" + "strconv" + "strings" + "time" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +//go:embed example_output_run_pipeline.json +var exampleOutputRunPipeline []byte + +const ( + PipelinePayloadType = "gitlab.pipeline.finished" + PipelinePassedOutputChannel = "passed" + PipelineFailedOutputChannel = "failed" + + RunPipelinePollInterval = 5 * time.Minute + RunPipelinePollAction = "poll" + RunPipelineKVPipelineID = "pipeline_id" +) + +type RunPipeline struct{} + +type RunPipelineSpec struct { + Project string `json:"project" mapstructure:"project"` + Ref string `json:"ref" mapstructure:"ref"` + Inputs []RunPipelineInputSpec `json:"inputs" mapstructure:"inputs"` +} + +type RunPipelineInputSpec struct { + Name string `json:"name" mapstructure:"name"` + Value string `json:"value" mapstructure:"value"` +} + +type RunPipelineExecutionMetadata struct { + Pipeline *PipelineMetadata `json:"pipeline" mapstructure:"pipeline"` +} + +type PipelineMetadata struct { + ID int `json:"id"` + IID int `json:"iid"` + Status string `json:"status"` + URL string `json:"url,omitempty"` +} + +func (r *RunPipeline) Name() string { + return "gitlab.runPipeline" +} + +func (r *RunPipeline) Label() string { + return "Run Pipeline" +} + +func (r *RunPipeline) Description() string { + return "Run a GitLab pipeline and wait for completion" +} + +func (r *RunPipeline) Documentation() string { + return `The Run Pipeline component triggers a GitLab pipeline and waits for it to complete. + +## Use Cases + +- **CI/CD orchestration**: Trigger GitLab pipelines from SuperPlane workflows +- **Deployment automation**: Run deployment pipelines with inputs +- **Pipeline chaining**: Coordinate follow-up actions after pipeline completion` +} + +func (r *RunPipeline) Icon() string { + return "workflow" +} + +func (r *RunPipeline) Color() string { + return "orange" +} + +func (r *RunPipeline) ExampleOutput() map[string]any { + var example map[string]any + if err := json.Unmarshal(exampleOutputRunPipeline, &example); err != nil { + return map[string]any{} + } + return example +} + +func (r *RunPipeline) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{ + { + Name: PipelinePassedOutputChannel, + Label: "Passed", + }, + { + Name: PipelineFailedOutputChannel, + Label: "Failed", + }, + } +} + +func (r *RunPipeline) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "project", + Label: "Project", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: ResourceTypeProject, + }, + }, + }, + { + Name: "ref", + Label: "Ref", + Type: configuration.FieldTypeGitRef, + Required: true, + Default: "main", + }, + { + Name: "inputs", + Label: "Inputs", + Type: configuration.FieldTypeList, + TypeOptions: &configuration.TypeOptions{ + List: &configuration.ListTypeOptions{ + ItemLabel: "Input", + ItemDefinition: &configuration.ListItemDefinition{ + Type: configuration.FieldTypeObject, + Schema: []configuration.Field{ + { + Name: "name", + Label: "Name", + Type: configuration.FieldTypeString, + Required: true, + DisallowExpression: true, + }, + { + Name: "value", + Label: "Value", + Type: configuration.FieldTypeString, + Required: true, + }, + }, + }, + }, + }, + }, + } +} + +func (r *RunPipeline) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (r *RunPipeline) Setup(ctx core.SetupContext) error { + spec := RunPipelineSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + if strings.TrimSpace(spec.Project) == "" { + return fmt.Errorf("project is required") + } + + if strings.TrimSpace(spec.Ref) == "" { + return fmt.Errorf("ref is required") + } + + if err := ensureProjectInMetadata(ctx.Metadata, ctx.Integration, spec.Project); err != nil { + return err + } + + return ctx.Integration.RequestWebhook(WebhookConfiguration{ + EventType: "pipeline", + ProjectID: spec.Project, + }) +} + +func (r *RunPipeline) Execute(ctx core.ExecutionContext) error { + spec := RunPipelineSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + nodeMetadata := NodeMetadata{} + if err := mapstructure.Decode(ctx.NodeMetadata.Get(), &nodeMetadata); err != nil { + return fmt.Errorf("failed to decode node metadata: %w", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + pipeline, err := client.CreatePipeline(context.Background(), spec.Project, &CreatePipelineRequest{ + Ref: normalizePipelineRef(spec.Ref), + Inputs: r.buildInputs(spec.Inputs), + }) + + if err != nil { + return fmt.Errorf("failed to create pipeline: %w", err) + } + + metadata := RunPipelineExecutionMetadata{Pipeline: &PipelineMetadata{ + ID: pipeline.ID, + IID: pipeline.IID, + Status: pipeline.Status, + URL: pipeline.WebURL, + }} + + if err := ctx.Metadata.Set(metadata); err != nil { + return err + } + + if err := ctx.ExecutionState.SetKV(RunPipelineKVPipelineID, strconv.Itoa(pipeline.ID)); err != nil { + return err + } + + ctx.Logger.Infof("Started GitLab pipeline %d on project %s (ref=%s)", pipeline.ID, spec.Project, spec.Ref) + return ctx.Requests.ScheduleActionCall(RunPipelinePollAction, map[string]any{}, RunPipelinePollInterval) +} + +func (r *RunPipeline) Cancel(ctx core.ExecutionContext) error { + metadata := RunPipelineExecutionMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + if metadata.Pipeline == nil || metadata.Pipeline.ID == 0 { + ctx.Logger.Info("No pipeline to cancel") + return nil + } + + if isPipelineDone(metadata.Pipeline.Status) { + ctx.Logger.Infof("Pipeline %d already done - %s", metadata.Pipeline.ID, metadata.Pipeline.Status) + return nil + } + + spec := RunPipelineSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + if err := client.CancelPipeline(context.Background(), spec.Project, metadata.Pipeline.ID); err != nil { + return fmt.Errorf("failed to cancel pipeline: %w", err) + } + + err = ctx.Metadata.Set(RunPipelineExecutionMetadata{Pipeline: &PipelineMetadata{ + ID: metadata.Pipeline.ID, + IID: metadata.Pipeline.IID, + URL: metadata.Pipeline.URL, + Status: PipelineStatusCanceled, + }}) + + if err != nil { + return err + } + + ctx.Logger.Infof("Cancel request sent for pipeline %d", metadata.Pipeline.ID) + return nil +} + +func (r *RunPipeline) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + spec := RunPipelineSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode configuration: %w", err) + } + + eventType := ctx.Headers.Get("X-Gitlab-Event") + if eventType == "" { + return http.StatusBadRequest, fmt.Errorf("missing X-Gitlab-Event header") + } + + if eventType != "Pipeline Hook" { + return http.StatusOK, nil + } + + code, err := verifyWebhookToken(ctx) + if err != nil { + return code, err + } + + var payload map[string]any + if err := json.Unmarshal(ctx.Body, &payload); err != nil { + return http.StatusBadRequest, fmt.Errorf("error parsing request body: %v", err) + } + + newMetadata, err := metadataFromWebhook(payload) + if err != nil { + return http.StatusBadRequest, err + } + + executionCtx, err := ctx.FindExecutionByKV(RunPipelineKVPipelineID, strconv.Itoa(newMetadata.Pipeline.ID)) + + // + // Ignore hooks for pipelines not started by SuperPlane + // + if err != nil { + return http.StatusOK, nil + } + + metadata := RunPipelineExecutionMetadata{} + if err := mapstructure.Decode(executionCtx.Metadata.Get(), &metadata); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to decode metadata: %w", err) + } + + // + // If pipeline is already done, do nothing. + // + if metadata.Pipeline != nil && isPipelineDone(metadata.Pipeline.Status) { + ctx.Logger.Infof("Pipeline %d is already done - %s", newMetadata.Pipeline.ID, metadata.Pipeline.Status) + return http.StatusOK, nil + } + + // + // Set new metadata + // + if err := executionCtx.Metadata.Set(newMetadata); err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to set metadata: %w", err) + } + + // + // If pipeline is not done, do not complete execution and emit yet. + // + if !isPipelineDone(newMetadata.Pipeline.Status) { + ctx.Logger.Infof("Pipeline %d is not done - %s", newMetadata.Pipeline.ID, newMetadata.Pipeline.Status) + return http.StatusOK, nil + } + + // + // Fetch pipeline from API so we have the latest status, + // and so the data emitted by webhook update and by polling is the same. + // + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to create client: %w", err) + } + + pipeline, err := client.GetPipeline(spec.Project, newMetadata.Pipeline.ID) + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to get pipeline: %w", err) + } + + // + // Emit on proper channel + // + channel := PipelineFailedOutputChannel + if pipeline.Status == PipelineStatusSuccess { + channel = PipelinePassedOutputChannel + } + + err = executionCtx.ExecutionState.Emit(channel, PipelinePayloadType, []any{ + map[string]any{ + "pipeline": pipeline, + }, + }) + + if err != nil { + return http.StatusInternalServerError, fmt.Errorf("failed to emit pipeline result: %w", err) + } + + ctx.Logger.Infof("Pipeline %d completed - %s", pipeline.ID, pipeline.Status) + return http.StatusOK, nil +} + +func (r *RunPipeline) Actions() []core.Action { + return []core.Action{ + { + Name: RunPipelinePollAction, + UserAccessible: false, + }, + } +} + +func (r *RunPipeline) HandleAction(ctx core.ActionContext) error { + switch ctx.Name { + case RunPipelinePollAction: + return r.poll(ctx) + } + + return fmt.Errorf("unknown action: %s", ctx.Name) +} + +func (r *RunPipeline) poll(ctx core.ActionContext) error { + if ctx.ExecutionState.IsFinished() { + return nil + } + + spec := RunPipelineSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + + metadata := RunPipelineExecutionMetadata{} + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("failed to decode metadata: %w", err) + } + + if metadata.Pipeline == nil || metadata.Pipeline.ID == 0 { + return fmt.Errorf("pipeline metadata is missing") + } + + // + // If pipeline is already done, do nothing. + // + if isPipelineDone(metadata.Pipeline.Status) { + return nil + } + + // + // Otherwise, poll, update metadata and emit result if pipeline is done. + // + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + pipeline, err := client.GetPipeline(spec.Project, metadata.Pipeline.ID) + if err != nil { + return err + } + + newMetadata := RunPipelineExecutionMetadata{Pipeline: &PipelineMetadata{ + ID: pipeline.ID, + IID: pipeline.IID, + Status: pipeline.Status, + URL: pipeline.URL, + }} + + if err := ctx.Metadata.Set(newMetadata); err != nil { + return err + } + + if !isPipelineDone(pipeline.Status) { + return ctx.Requests.ScheduleActionCall(RunPipelinePollAction, map[string]any{}, RunPipelinePollInterval) + } + + channel := PipelineFailedOutputChannel + if metadata.Pipeline != nil && metadata.Pipeline.Status == PipelineStatusSuccess { + channel = PipelinePassedOutputChannel + } + + return ctx.ExecutionState.Emit(channel, PipelinePayloadType, []any{ + map[string]any{ + "pipeline": pipeline, + }, + }) +} + +func (r *RunPipeline) Cleanup(ctx core.SetupContext) error { + return nil +} + +func (r *RunPipeline) buildInputs(inputs []RunPipelineInputSpec) map[string]string { + result := make(map[string]string, len(inputs)) + for _, input := range inputs { + if strings.TrimSpace(input.Name) == "" { + continue + } + + result[input.Name] = input.Value + } + + return result +} + +func metadataFromWebhook(payload map[string]any) (*RunPipelineExecutionMetadata, error) { + attrs, ok := payload["object_attributes"].(map[string]any) + if !ok { + return nil, fmt.Errorf("pipeline attributes missing from webhook payload") + } + + pipelineID, ok := intFromAny(attrs["id"]) + if !ok { + return nil, fmt.Errorf("pipeline id missing from webhook payload") + } + + status, ok := attrs["status"].(string) + if !ok || status == "" { + return nil, fmt.Errorf("pipeline status missing from webhook payload") + } + + pipelineIID, _ := intFromAny(attrs["iid"]) + url, _ := attrs["url"].(string) + + return &RunPipelineExecutionMetadata{ + Pipeline: &PipelineMetadata{ + ID: pipelineID, + IID: pipelineIID, + Status: status, + URL: url, + }, + }, nil +} + +func isPipelineDone(status string) bool { + switch status { + case PipelineStatusSuccess, + PipelineStatusFailed, + PipelineStatusCanceled, + PipelineStatusCancelled, + PipelineStatusSkipped, + PipelineStatusManual, + PipelineStatusBlocked: + return true + default: + return false + } +} + +func intFromAny(value any) (int, bool) { + switch typed := value.(type) { + case int: + return typed, true + case int32: + return int(typed), true + case int64: + return int(typed), true + case float64: + return int(typed), true + case string: + parsed, err := strconv.Atoi(typed) + if err != nil { + return 0, false + } + return parsed, true + default: + return 0, false + } +} diff --git a/pkg/integrations/gitlab/run_pipeline_test.go b/pkg/integrations/gitlab/run_pipeline_test.go new file mode 100644 index 0000000000..4274d1215c --- /dev/null +++ b/pkg/integrations/gitlab/run_pipeline_test.go @@ -0,0 +1,196 @@ +package gitlab + +import ( + "net/http" + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/test/support/contexts" +) + +func Test__RunPipeline__Execute(t *testing.T) { + component := &RunPipeline{} + metadataCtx := &contexts.MetadataContext{} + requestsCtx := &contexts.RequestContext{} + executionState := &contexts.ExecutionStateContext{ + KVs: map[string]string{}, + } + + err := component.Execute(core.ExecutionContext{ + Configuration: map[string]any{ + "project": "123", + "ref": "refs/heads/main", + "inputs": []map[string]any{ + {"name": "target_env", "value": "dev"}, + }, + "variables": []map[string]any{ + {"name": "DEPLOY_ENV", "value": "dev", "variableType": "env_var"}, + }, + }, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusCreated, `{ + "id": 1001, + "iid": 73, + "project_id": 123, + "status": "pending", + "ref": "main", + "sha": "abc123", + "web_url": "https://gitlab.com/group/project/-/pipelines/1001" + }`), + }, + }, + Metadata: metadataCtx, + NodeMetadata: &contexts.MetadataContext{}, + ExecutionState: executionState, + Requests: requestsCtx, + Logger: log.NewEntry(log.New()), + }) + + require.NoError(t, err) + + metadata, ok := metadataCtx.Metadata.(RunPipelineExecutionMetadata) + require.True(t, ok) + require.NotNil(t, metadata.Pipeline) + assert.Equal(t, 1001, metadata.Pipeline.ID) + assert.Equal(t, "pending", metadata.Pipeline.Status) + assert.Equal(t, "1001", executionState.KVs[RunPipelineKVPipelineID]) + assert.Equal(t, RunPipelinePollAction, requestsCtx.Action) + assert.Equal(t, RunPipelinePollInterval, requestsCtx.Duration) +} + +func Test__RunPipeline__HandleWebhook__FinishedPipeline(t *testing.T) { + component := &RunPipeline{} + metadataCtx := &contexts.MetadataContext{ + Metadata: RunPipelineExecutionMetadata{ + Pipeline: &PipelineMetadata{ + ID: 1001, + Status: "running", + }, + }, + } + executionState := &contexts.ExecutionStateContext{ + KVs: map[string]string{}, + } + + code, err := component.HandleWebhook(core.WebhookRequestContext{ + Headers: gitlabHeaders("Pipeline Hook", "token"), + Body: []byte(`{ + "object_kind": "pipeline", + "project": {"id": 123}, + "object_attributes": { + "id": 1001, + "iid": 73, + "status": "success", + "ref": "main", + "url": "https://gitlab.com/group/project/-/pipelines/1001" + } + }`), + Webhook: &contexts.WebhookContext{ + Secret: "token", + }, + Logger: log.NewEntry(log.New()), + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "id": 1001, + "iid": 73, + "project_id": 123, + "status": "success", + "ref": "main", + "url": "https://gitlab.com/group/project/-/pipelines/1001" + }`), + }, + }, + FindExecutionByKV: func(key string, value string) (*core.ExecutionContext, error) { + if key == RunPipelineKVPipelineID && value == "1001" { + return &core.ExecutionContext{ + Metadata: metadataCtx, + ExecutionState: executionState, + }, nil + } + return nil, assert.AnError + }, + }) + + assert.Equal(t, http.StatusOK, code) + require.NoError(t, err) + assert.Equal(t, PipelinePassedOutputChannel, executionState.Channel) + assert.Equal(t, PipelinePayloadType, executionState.Type) + + metadata, ok := metadataCtx.Metadata.(*RunPipelineExecutionMetadata) + require.True(t, ok) + require.NotNil(t, metadata.Pipeline) + assert.Equal(t, "success", metadata.Pipeline.Status) +} + +func Test__RunPipeline__Poll__SchedulesNextWhenRunning(t *testing.T) { + component := &RunPipeline{} + metadataCtx := &contexts.MetadataContext{ + Metadata: RunPipelineExecutionMetadata{ + Pipeline: &PipelineMetadata{ + ID: 1001, + Status: "running", + }, + }, + } + requestsCtx := &contexts.RequestContext{} + executionState := &contexts.ExecutionStateContext{ + KVs: map[string]string{}, + } + + err := component.HandleAction(core.ActionContext{ + Name: RunPipelinePollAction, + Configuration: map[string]any{ + "project": "123", + "ref": "main", + }, + Metadata: metadataCtx, + Integration: &contexts.IntegrationContext{ + Configuration: map[string]any{ + "authType": AuthTypePersonalAccessToken, + "groupId": "123", + "accessToken": "pat", + "baseUrl": "https://gitlab.com", + }, + }, + HTTP: &contexts.HTTPContext{ + Responses: []*http.Response{ + GitlabMockResponse(http.StatusOK, `{ + "id": 1001, + "iid": 73, + "project_id": 123, + "status": "running", + "ref": "main" + }`), + }, + }, + Requests: requestsCtx, + ExecutionState: executionState, + Logger: log.NewEntry(log.New()), + }) + + require.NoError(t, err) + assert.Equal(t, RunPipelinePollAction, requestsCtx.Action) + assert.Equal(t, RunPipelinePollInterval, requestsCtx.Duration) + assert.Empty(t, executionState.Channel) +} diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts index a70e337a51..bbf9261dde 100644 --- a/web_src/src/pages/workflowv2/mappers/gitlab/index.ts +++ b/web_src/src/pages/workflowv2/mappers/gitlab/index.ts @@ -4,13 +4,16 @@ import { createIssueMapper } from "./create_issue"; import { onIssueTriggerRenderer } from "./on_issue"; import { onMergeRequestTriggerRenderer } from "./on_merge_request"; import { onMilestoneTriggerRenderer } from "./on_milestone"; +import { onPipelineTriggerRenderer } from "./on_pipeline"; import { onReleaseTriggerRenderer } from "./on_release"; import { onTagTriggerRenderer } from "./on_tag"; import { onVulnerabilityTriggerRenderer } from "./on_vulnerability"; +import { RUN_PIPELINE_STATE_REGISTRY, runPipelineMapper } from "./run_pipeline"; import { pipelineLookupMapper, testReportSummaryMapper } from "./pipeline_actions"; export const eventStateRegistry: Record = { createIssue: buildActionStateRegistry("created"), + runPipeline: RUN_PIPELINE_STATE_REGISTRY, getPipeline: buildActionStateRegistry("retrieved"), getLatestPipeline: buildActionStateRegistry("retrieved"), getTestReportSummary: buildActionStateRegistry("retrieved"), @@ -18,6 +21,7 @@ export const eventStateRegistry: Record = { export const componentMappers: Record = { createIssue: createIssueMapper, + runPipeline: runPipelineMapper, getPipeline: pipelineLookupMapper, getLatestPipeline: pipelineLookupMapper, getTestReportSummary: testReportSummaryMapper, @@ -27,6 +31,7 @@ export const triggerRenderers: Record = { onIssue: onIssueTriggerRenderer, onMergeRequest: onMergeRequestTriggerRenderer, onMilestone: onMilestoneTriggerRenderer, + onPipeline: onPipelineTriggerRenderer, onRelease: onReleaseTriggerRenderer, onTag: onTagTriggerRenderer, onVulnerability: onVulnerabilityTriggerRenderer, diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/on_pipeline.ts b/web_src/src/pages/workflowv2/mappers/gitlab/on_pipeline.ts new file mode 100644 index 0000000000..95830f3e64 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/on_pipeline.ts @@ -0,0 +1,117 @@ +import { getBackgroundColorClass, getColorClass } from "@/utils/colors"; +import gitlabIcon from "@/assets/icons/integrations/gitlab.svg"; +import { TriggerProps } from "@/ui/trigger"; +import { TriggerEventContext, TriggerRenderer, TriggerRendererContext } from "../types"; +import { buildGitlabSubtitle } from "./utils"; +import { GitLabNodeMetadata } from "./types"; +import { stringOrDash } from "../utils"; + +interface OnPipelineConfiguration { + statuses: string[]; +} + +interface PipelineObjectAttributes { + id?: number; + iid?: number; + status?: string; + ref?: string; + sha?: string; + url?: string; +} + +interface OnPipelineEventData { + object_kind?: string; + object_attributes?: PipelineObjectAttributes; + project?: { + id: number; + name: string; + path_with_namespace: string; + web_url: string; + }; +} + +function getPipelineTitle(eventData: OnPipelineEventData): string { + const attrs = eventData?.object_attributes; + if (attrs?.iid) { + return `Pipeline #${attrs.iid}`; + } + if (attrs?.id) { + return `Pipeline #${attrs.id}`; + } + return "Pipeline"; +} + +export const onPipelineTriggerRenderer: TriggerRenderer = { + getTitleAndSubtitle: (context: TriggerEventContext): { title: string; subtitle: string } => { + const eventData = context.event?.data as OnPipelineEventData; + const attrs = eventData?.object_attributes; + + return { + title: getPipelineTitle(eventData), + subtitle: buildGitlabSubtitle(attrs?.status || "", context.event?.createdAt), + }; + }, + + getRootEventValues: (context: TriggerEventContext): Record => { + const eventData = context.event?.data as OnPipelineEventData; + const attrs = eventData?.object_attributes; + const values: Record = { + ID: stringOrDash(attrs?.id?.toString()), + IID: stringOrDash(attrs?.iid?.toString()), + Status: stringOrDash(attrs?.status), + Ref: stringOrDash(attrs?.ref), + SHA: stringOrDash(attrs?.sha), + URL: stringOrDash(attrs?.url), + }; + + if (eventData?.project?.path_with_namespace) { + values.Project = eventData.project.path_with_namespace; + } + + return values; + }, + + getTriggerProps: (context: TriggerRendererContext): TriggerProps => { + const { node, definition, lastEvent } = context; + const metadata = node.metadata as unknown as GitLabNodeMetadata; + const configuration = node.configuration as unknown as OnPipelineConfiguration; + const metadataItems = []; + + if (metadata?.project?.name) { + metadataItems.push({ + icon: "book", + label: metadata.project.name, + }); + } + + if (configuration?.statuses?.length > 0) { + metadataItems.push({ + icon: "funnel", + label: configuration.statuses.join(", "), + }); + } + + const props: TriggerProps = { + title: node.name || definition.label || "Unnamed trigger", + iconSrc: gitlabIcon, + iconColor: getColorClass(definition.color), + collapsedBackground: getBackgroundColorClass(definition.color), + metadata: metadataItems, + }; + + if (lastEvent) { + const eventData = lastEvent.data as OnPipelineEventData; + const attrs = eventData?.object_attributes; + + props.lastEventData = { + title: getPipelineTitle(eventData), + subtitle: buildGitlabSubtitle(attrs?.status || "", lastEvent.createdAt), + receivedAt: new Date(lastEvent.createdAt!), + state: "triggered", + eventId: lastEvent.id!, + }; + } + + return props; + }, +}; diff --git a/web_src/src/pages/workflowv2/mappers/gitlab/run_pipeline.ts b/web_src/src/pages/workflowv2/mappers/gitlab/run_pipeline.ts new file mode 100644 index 0000000000..f56d2cbe77 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/gitlab/run_pipeline.ts @@ -0,0 +1,188 @@ +import { CanvasesCanvasNodeExecution } from "@/api-client"; +import { + ComponentBaseProps, + ComponentBaseSpec, + DEFAULT_EVENT_STATE_MAP, + EventState, + EventStateMap, +} from "@/ui/componentBase"; +import { + ComponentBaseContext, + ComponentBaseMapper, + EventStateRegistry, + ExecutionDetailsContext, + OutputPayload, + StateFunction, + SubtitleContext, +} from "../types"; +import { baseProps } from "./base"; +import { buildGitlabExecutionSubtitle } from "./utils"; +import { MetadataItem } from "@/ui/metadataList"; + +interface PipelineMetadata { + id?: number; + iid?: number; + status?: string; + url?: string; +} + +interface ExecutionMetadata { + pipeline?: PipelineMetadata; +} + +interface RunPipelineConfiguration { + project: string; + ref: string; + inputs: Array<{ name: string; value: string }>; +} + +export const RUN_PIPELINE_STATE_MAP: EventStateMap = { + ...DEFAULT_EVENT_STATE_MAP, + running: { + icon: "loader-circle", + textColor: "text-gray-800", + backgroundColor: "bg-blue-100", + badgeColor: "bg-blue-500", + }, + passed: { + icon: "circle-check", + textColor: "text-gray-800", + backgroundColor: "bg-green-100", + badgeColor: "bg-emerald-500", + }, + failed: { + icon: "circle-x", + textColor: "text-gray-800", + backgroundColor: "bg-red-100", + badgeColor: "bg-red-400", + }, +}; + +export const runPipelineStateFunction: StateFunction = (execution: CanvasesCanvasNodeExecution): EventState => { + if (!execution) return "neutral"; + + if ( + execution.resultMessage && + (execution.resultReason === "RESULT_REASON_ERROR" || + (execution.result === "RESULT_FAILED" && execution.resultReason !== "RESULT_REASON_ERROR_RESOLVED")) + ) { + return "error"; + } + + if (execution.result === "RESULT_CANCELLED") { + return "cancelled"; + } + + if (execution.state === "STATE_PENDING" || execution.state === "STATE_STARTED") { + return "running"; + } + + const outputs = execution.outputs as { passed?: OutputPayload[]; failed?: OutputPayload[] } | undefined; + if (outputs?.failed && outputs.failed.length > 0) { + return "failed"; + } + if (outputs?.passed && outputs.passed.length > 0) { + return "passed"; + } + + const metadata = execution.metadata as ExecutionMetadata; + switch (metadata?.pipeline?.status) { + case "success": + return "passed"; + case "failed": + case "canceled": + case "cancelled": + case "skipped": + case "manual": + case "blocked": + return "failed"; + default: + return "neutral"; + } +}; + +export const RUN_PIPELINE_STATE_REGISTRY: EventStateRegistry = { + stateMap: RUN_PIPELINE_STATE_MAP, + getState: runPipelineStateFunction, +}; + +export const runPipelineMapper: ComponentBaseMapper = { + props(context: ComponentBaseContext): ComponentBaseProps { + const base = baseProps(context.nodes, context.node, context.componentDefinition, context.lastExecutions); + const config = context.node.configuration as RunPipelineConfiguration; + const metadata = base.metadata as MetadataItem[]; + if (config.ref) { + metadata.push({ icon: "git-branch", label: config.ref }); + } + + return { + ...base, + specs: runPipelineSpecs(context.node.configuration), + eventStateMap: RUN_PIPELINE_STATE_MAP, + metadata: metadata, + }; + }, + + subtitle(context: SubtitleContext): string { + const metadata = context.execution.metadata as ExecutionMetadata | undefined; + const status = metadata?.pipeline?.status ? metadata.pipeline.status : "Pipeline Run"; + return buildGitlabExecutionSubtitle(context.execution, status); + }, + + getExecutionDetails(context: ExecutionDetailsContext): Record { + const metadata = context.execution.metadata as ExecutionMetadata | undefined; + const details: Record = {}; + const pipeline = metadata?.pipeline; + + if (pipeline?.id) { + details["ID"] = pipeline.id.toString(); + } + if (pipeline?.iid) { + details["IID"] = pipeline.iid.toString(); + } + if (pipeline?.status) { + details["Status"] = pipeline.status; + } + if (pipeline?.url) { + details["URL"] = pipeline.url; + } + if (context.execution.createdAt) { + details["Started At"] = new Date(context.execution.createdAt).toLocaleString(); + } + if (context.execution.updatedAt) { + details["Last Updated At"] = new Date(context.execution.updatedAt).toLocaleString(); + } + + return details; + }, +}; + +function runPipelineSpecs(configuration: unknown): ComponentBaseSpec[] { + const specs: ComponentBaseSpec[] = []; + const config = configuration as RunPipelineConfiguration; + const inputs = config?.inputs; + + if (inputs && inputs.length > 0) { + specs.push({ + title: "input", + tooltipTitle: "pipeline inputs", + iconSlug: "settings", + values: inputs.map((input) => ({ + badges: [ + { + label: input.name, + bgColor: "bg-indigo-100", + textColor: "text-indigo-800", + }, + { + label: input.value, + bgColor: "bg-gray-100", + textColor: "text-gray-800", + }, + ], + })), + }); + } + + return specs; +} From f7d335c79284598f0a46b7624e9109d85cf9a50a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sat, 14 Feb 2026 00:49:49 +0100 Subject: [PATCH 111/160] feat: Connection retries for SSH component (#3112) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Core.mdx | 1 + pkg/components/ssh/client.go | 9 - pkg/components/ssh/ssh.go | 299 +++++++++++++----- pkg/components/ssh/ssh_test.go | 24 -- pkg/core/component.go | 1 + pkg/workers/node_request_worker.go | 14 + web_src/src/pages/workflowv2/mappers/ssh.ts | 12 +- .../ObjectFieldRenderer.tsx | 12 +- 8 files changed, 255 insertions(+), 117 deletions(-) diff --git a/docs/components/Core.mdx b/docs/components/Core.mdx index 0c0fae5f5d..42f5dbc4e4 100644 --- a/docs/components/Core.mdx +++ b/docs/components/Core.mdx @@ -515,6 +515,7 @@ Choose **SSH key** or **Password**, then select the organization Secret and the - **Command**: The command to run (supports expressions). - **Working directory**: Optional; Changes to this directory before running the command. - **Timeout (seconds)**: How long the command may run (default 60). +- **Connection retry** (optional): Enable to retry connecting when the host is not reachable yet (e.g. server still booting). Set number of retries and interval between attempts. ### Output diff --git a/pkg/components/ssh/client.go b/pkg/components/ssh/client.go index 59902b7f00..a86b729b12 100644 --- a/pkg/components/ssh/client.go +++ b/pkg/components/ssh/client.go @@ -31,18 +31,13 @@ type Client struct { conn *ssh.Client } -// CommandResult holds the result of a single command execution. type CommandResult struct { Stdout string `json:"stdout"` Stderr string `json:"stderr"` ExitCode int `json:"exitCode"` } -// NewClient builds a client for key-based auth. func NewClientKey(host string, port int, username string, privateKey, passphrase []byte) *Client { - if port == 0 { - port = 22 - } return &Client{ Host: host, Port: port, @@ -53,11 +48,7 @@ func NewClientKey(host string, port int, username string, privateKey, passphrase } } -// NewClientPassword builds a client for password auth. func NewClientPassword(host string, port int, username string, password []byte) *Client { - if port == 0 { - port = 22 - } return &Client{ Host: host, Port: port, diff --git a/pkg/components/ssh/ssh.go b/pkg/components/ssh/ssh.go index 691bf342f1..2e200afbae 100644 --- a/pkg/components/ssh/ssh.go +++ b/pkg/components/ssh/ssh.go @@ -3,6 +3,7 @@ package ssh import ( "errors" "fmt" + "strings" "time" "github.com/google/uuid" @@ -23,7 +24,6 @@ func init() { type SSHCommand struct{} -// SecretKeyRef is stored in YAML as: { secret: "name", key: "keyName" }. type SecretKeyRef struct { Secret string `json:"secret" mapstructure:"secret"` Key string `json:"key" mapstructure:"key"` @@ -33,7 +33,6 @@ func (r SecretKeyRef) IsSet() bool { return r.Secret != "" && r.Key != "" } -// AuthSpec is the authentication config group (SSH key or password, credential references). type AuthSpec struct { Method string `json:"authMethod" mapstructure:"authMethod"` PrivateKey SecretKeyRef `json:"privateKey" mapstructure:"privateKey"` @@ -41,20 +40,33 @@ type AuthSpec struct { Password SecretKeyRef `json:"password" mapstructure:"password"` } +type ConnectionRetrySpec struct { + Enabled bool `json:"enabled" mapstructure:"enabled"` + Retries int `json:"retries" mapstructure:"retries"` + IntervalSeconds int `json:"intervalSeconds" mapstructure:"intervalSeconds"` +} + type Spec struct { - Host string `json:"host" mapstructure:"host"` - Port int `json:"port" mapstructure:"port"` - User string `json:"username" mapstructure:"username"` - Authentication AuthSpec `json:"authentication" mapstructure:"authentication"` - Command string `json:"command" mapstructure:"command"` - WorkingDirectory string `json:"workingDirectory,omitempty" mapstructure:"workingDirectory"` - Timeout int `json:"timeout" mapstructure:"timeout"` // command timeout in seconds (default 60) + Host string `json:"host" mapstructure:"host"` + Port int `json:"port" mapstructure:"port"` + User string `json:"username" mapstructure:"username"` + Authentication AuthSpec `json:"authentication" mapstructure:"authentication"` + Command string `json:"command" mapstructure:"command"` + WorkingDirectory string `json:"workingDirectory,omitempty" mapstructure:"workingDirectory"` + Timeout int `json:"timeout" mapstructure:"timeout"` + ConnectionRetry *ConnectionRetrySpec `json:"connectionRetry,omitempty" mapstructure:"connectionRetry"` } type ExecutionMetadata struct { Result *CommandResult `json:"result" mapstructure:"result"` } +type ConnectionRetryState struct { + Attempt int `json:"attempt" mapstructure:"attempt"` // retries done so far (1 = first retry) + MaxRetries int `json:"maxRetries" mapstructure:"maxRetries"` // max retries from config + IntervalSeconds int `json:"intervalSeconds" mapstructure:"intervalSeconds"` // seconds between attempts +} + func (c *SSHCommand) Name() string { return "ssh" } func (c *SSHCommand) Label() string { return "SSH Command" } func (c *SSHCommand) Description() string { @@ -76,6 +88,7 @@ Choose **SSH key** or **Password**, then select the organization Secret and the - **Command**: The command to run (supports expressions). - **Working directory**: Optional; Changes to this directory before running the command. - **Timeout (seconds)**: How long the command may run (default 60). +- **Connection retry** (optional): Enable to retry connecting when the host is not reachable yet (e.g. server still booting). Set number of retries and interval between attempts. ## Output @@ -212,6 +225,49 @@ func (c *SSHCommand) Configuration() []configuration.Field { Default: 60, Description: "Limit how long the command may run (seconds).", }, + { + Name: "connectionRetry", + Label: "Connection retry", + Type: configuration.FieldTypeObject, + Required: false, + Description: "Optionally retry connecting when the host is unreachable (e.g. server still booting).", + TypeOptions: &configuration.TypeOptions{ + Object: &configuration.ObjectTypeOptions{ + Schema: []configuration.Field{ + { + Name: "enabled", + Label: "Enable connection retry", + Type: configuration.FieldTypeBool, + Required: false, + Default: false, + Description: "Retry connecting if the host is not reachable yet.", + }, + { + Name: "retries", + Label: "Retries", + Type: configuration.FieldTypeNumber, + Required: false, + Default: 5, + Description: "Number of retry attempts.", + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "enabled", Values: []string{"true"}}, + }, + }, + { + Name: "intervalSeconds", + Label: "Retry interval (seconds)", + Type: configuration.FieldTypeNumber, + Required: false, + Default: 15, + Description: "Seconds to wait between connect attempts.", + VisibilityConditions: []configuration.VisibilityCondition{ + {Field: "enabled", Values: []string{"true"}}, + }, + }, + }, + }, + }, + }, } } @@ -253,100 +309,165 @@ func (c *SSHCommand) Setup(ctx core.SetupContext) error { default: return fmt.Errorf("invalid auth method: %s", spec.Authentication.Method) } + if spec.ConnectionRetry != nil && spec.ConnectionRetry.Enabled { + if spec.ConnectionRetry.Retries < 0 { + return errors.New("connection retry: retries must be 0 or greater") + } + if spec.ConnectionRetry.IntervalSeconds < 1 { + return errors.New("connection retry: interval must be at least 1 second") + } + } return nil } func (c *SSHCommand) Execute(ctx core.ExecutionContext) error { - var spec Spec - config, ok := ctx.Configuration.(map[string]any) - if !ok || config == nil { - return fmt.Errorf("decode configuration: invalid configuration type") - } - if err := mapstructure.Decode(config, &spec); err != nil { - return fmt.Errorf("decode configuration: %w", err) - } + return c.executeSSH(ctx.Configuration, ctx.Secrets, ctx.Metadata, ctx.Requests, ctx.ExecutionState) +} - if spec.Host == "" || spec.User == "" || spec.Command == "" { - return fmt.Errorf("host, username, and command are required") +func (c *SSHCommand) HandleAction(ctx core.ActionContext) error { + if ctx.Name == "connectionRetry" { + return c.executeSSH(ctx.Configuration, ctx.Secrets, ctx.Metadata, ctx.Requests, ctx.ExecutionState) } - if ctx.Secrets == nil { - return fmt.Errorf("secrets context not available") - } + return fmt.Errorf("unknown action: %s", ctx.Name) +} - port := spec.Port - if port == 0 { - port = 22 +func (c *SSHCommand) executeSSH(config any, secrets core.SecretsContext, metadata core.MetadataContext, req core.RequestContext, state core.ExecutionStateContext) error { + spec, err := c.decodeSpec(config) + if err != nil { + return err } - var client *Client - switch spec.Authentication.Method { - case AuthMethodSSHKey: - if !spec.Authentication.PrivateKey.IsSet() { - return fmt.Errorf("private key credential is required") - } - privateKey, err := ctx.Secrets.GetKey(spec.Authentication.PrivateKey.Secret, spec.Authentication.PrivateKey.Key) - if err != nil { - if errors.Is(err, core.ErrSecretKeyNotFound) { - return fmt.Errorf("private key could not be resolved from the selected credential") + client, err := c.createClient(secrets, spec) + if err != nil { + return err + } + defer client.Close() + + result, err := client.ExecuteCommand(spec.Command, time.Duration(spec.Timeout)*time.Second) + if c.isConnectError(err) { + if c.shouldRetry(spec.ConnectionRetry, metadata) { + err = c.incrementRetryCount(metadata) + if err != nil { + return err } - return fmt.Errorf("resolve private key: %w", err) + + return req.ScheduleActionCall("connectionRetry", map[string]any{}, time.Duration(spec.ConnectionRetry.IntervalSeconds)*time.Second) } - var passphrase []byte - if spec.Authentication.Passphrase.IsSet() { - passphrase, _ = ctx.Secrets.GetKey(spec.Authentication.Passphrase.Secret, spec.Authentication.Passphrase.Key) + // Retries exhausted — emit on the failed channel with the connection error. + attempt := c.getRetryAttempt(metadata) + failResult := &CommandResult{ + Stdout: "", + Stderr: fmt.Sprintf("connection failed after %d retries: %s", attempt, err.Error()), + ExitCode: -1, } - client = NewClientKey(spec.Host, port, spec.User, privateKey, passphrase) + c.setResultMetadata(metadata, failResult) - case AuthMethodPassword: - if !spec.Authentication.Password.IsSet() { - return fmt.Errorf("password credential is required") - } - password, err := ctx.Secrets.GetKey(spec.Authentication.Password.Secret, spec.Authentication.Password.Key) - if err != nil { - if errors.Is(err, core.ErrSecretKeyNotFound) { - return fmt.Errorf("password could not be resolved from the selected credential") - } - return fmt.Errorf("resolve password: %w", err) - } + return state.Emit(channelFailed, "ssh.connection.failed", []any{failResult}) + } - client = NewClientPassword(spec.Host, port, spec.User, password) + if err != nil { + return err + } - default: - return fmt.Errorf("invalid auth method: %s", spec.Authentication.Method) + err = c.setResultMetadata(metadata, result) + if err != nil { + return err } - defer func() { _ = client.Close() }() + channel := channelFailed + if result.ExitCode == 0 { + channel = channelSuccess + } - command := spec.Command - if spec.WorkingDirectory != "" { - command = fmt.Sprintf("cd %s && %s", spec.WorkingDirectory, command) + return state.Emit(channel, "ssh.command.executed", []any{result}) +} + +func (c *SSHCommand) shouldRetry(retrySpec *ConnectionRetrySpec, metadata core.MetadataContext) bool { + if retrySpec == nil || !retrySpec.Enabled { + return false } - timeoutSec := spec.Timeout - if timeoutSec <= 0 { - timeoutSec = 60 + return c.getRetryAttempt(metadata) < retrySpec.Retries +} + +func (c *SSHCommand) incrementRetryCount(metadata core.MetadataContext) error { + current := c.getMetadataMap(metadata) + current["attempt"] = c.getRetryAttempt(metadata) + 1 + + return metadata.Set(current) +} + +func (c *SSHCommand) getRetryAttempt(metadata core.MetadataContext) int { + meta := c.getMetadataMap(metadata) + + attempt, ok := meta["attempt"] + if !ok { + return 0 } - timeout := time.Duration(timeoutSec) * time.Second - ctx.Logger.Infof("Executing SSH command on %s@%s:%d: %s", spec.User, spec.Host, port, command) + // JSON numbers deserialize as float64 + switch v := attempt.(type) { + case float64: + return int(v) + case int: + return v + default: + return 0 + } +} - result, err := client.ExecuteCommand(command, timeout) - if err != nil { - return fmt.Errorf("SSH execution failed: %w", err) +func (c *SSHCommand) getMetadataMap(metadata core.MetadataContext) map[string]any { + current, ok := metadata.Get().(map[string]any) + if !ok || current == nil { + return map[string]any{} } - if err := ctx.Metadata.Set(ExecutionMetadata{Result: result}); err != nil { - return fmt.Errorf("set metadata: %w", err) + return current +} + +func (c *SSHCommand) setResultMetadata(metadata core.MetadataContext, result *CommandResult) error { + current := c.getMetadataMap(metadata) + current["result"] = map[string]any{ + "exitCode": result.ExitCode, + "stdout": result.Stdout, + "stderr": result.Stderr, } - if result.ExitCode == 0 { - return ctx.ExecutionState.Emit(channelSuccess, "ssh.command.executed", []any{result}) + return metadata.Set(current) +} + +func (c *SSHCommand) decodeSpec(cfg any) (Spec, error) { + var spec Spec + + config, ok := cfg.(map[string]any) + if !ok || config == nil { + return spec, fmt.Errorf("decode configuration: invalid configuration type") + } + + if err := mapstructure.Decode(config, &spec); err != nil { + return spec, fmt.Errorf("decode configuration: %w", err) } - return ctx.ExecutionState.Emit(channelFailed, "ssh.command.failed", []any{result}) + + return spec, nil +} + +func (c *SSHCommand) isConnectError(err error) bool { + if err == nil { + return false + } + + s := strings.ToLower(err.Error()) + + return strings.Contains(s, "dial") || + strings.Contains(s, "timeout") || + strings.Contains(s, "connection refused") || + strings.Contains(s, "i/o timeout") || + strings.Contains(s, "connection reset") || + strings.Contains(s, "no route to host") } func (c *SSHCommand) Cancel(ctx core.ExecutionContext) error { @@ -358,11 +479,9 @@ func (c *SSHCommand) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, } func (c *SSHCommand) Actions() []core.Action { - return nil -} - -func (c *SSHCommand) HandleAction(ctx core.ActionContext) error { - return fmt.Errorf("no actions defined for ssh") + return []core.Action{ + {Name: "connectionRetry"}, + } } func (c *SSHCommand) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { @@ -372,3 +491,31 @@ func (c *SSHCommand) HandleWebhook(ctx core.WebhookRequestContext) (int, error) func (c *SSHCommand) Cleanup(ctx core.SetupContext) error { return nil } + +func (c *SSHCommand) createClient(secrets core.SecretsContext, spec Spec) (*Client, error) { + switch spec.Authentication.Method { + case AuthMethodSSHKey: + return c.createClientSSHKey(secrets, spec) + case AuthMethodPassword: + return c.createClientForPassword(secrets, spec) + default: + return nil, fmt.Errorf("unsupported authentication method: %s", spec.Authentication.Method) + } +} + +func (c *SSHCommand) createClientForPassword(secrets core.SecretsContext, spec Spec) (*Client, error) { + password, err := secrets.GetKey(spec.Authentication.Password.Secret, spec.Authentication.Password.Key) + if err != nil { + return nil, fmt.Errorf("cannot get password: %w", err) + } + return NewClientPassword(spec.Host, spec.Port, spec.User, password), nil +} + +func (c *SSHCommand) createClientSSHKey(secrets core.SecretsContext, spec Spec) (*Client, error) { + privateKey, err := secrets.GetKey(spec.Authentication.PrivateKey.Secret, spec.Authentication.PrivateKey.Key) + if err != nil { + return nil, fmt.Errorf("cannot get private key: %w", err) + } + + return NewClientKey(spec.Host, spec.Port, spec.User, privateKey, nil), nil +} diff --git a/pkg/components/ssh/ssh_test.go b/pkg/components/ssh/ssh_test.go index 7604540e73..8c8bc985a7 100644 --- a/pkg/components/ssh/ssh_test.go +++ b/pkg/components/ssh/ssh_test.go @@ -7,7 +7,6 @@ import ( "github.com/stretchr/testify/require" "github.com/superplanehq/superplane/pkg/core" - "github.com/superplanehq/superplane/test/support/contexts" ) func authConfig(method string, privateKey, password any) map[string]any { @@ -117,26 +116,3 @@ func TestSSHCommand_Setup_ValidatesRequiredFields(t *testing.T) { require.NoError(t, err) }) } - -func TestSSHCommand_Execute_RequiresSecretsContext(t *testing.T) { - c := &SSHCommand{} - stateCtx := &contexts.ExecutionStateContext{} - metadataCtx := &contexts.MetadataContext{} - - ctx := core.ExecutionContext{ - Configuration: map[string]any{ - "host": "example.com", - "username": "root", - "authentication": authConfig(AuthMethodSSHKey, map[string]any{"secret": "secret", "key": "key"}, nil), - "command": "ls", - "timeout": 60, - }, - ExecutionState: stateCtx, - Metadata: metadataCtx, - Secrets: nil, // not set - } - - err := c.Execute(ctx) - require.Error(t, err) - assert.Contains(t, err.Error(), "secrets") -} diff --git a/pkg/core/component.go b/pkg/core/component.go index 036e4704e2..c178825673 100644 --- a/pkg/core/component.go +++ b/pkg/core/component.go @@ -253,6 +253,7 @@ type ActionContext struct { Requests RequestContext Integration IntegrationContext Notifications NotificationContext + Secrets SecretsContext } /* diff --git a/pkg/workers/node_request_worker.go b/pkg/workers/node_request_worker.go index 1a0379cb20..4f5a3f4544 100644 --- a/pkg/workers/node_request_worker.go +++ b/pkg/workers/node_request_worker.go @@ -203,6 +203,11 @@ func (w *NodeRequestWorker) invokeParentNodeComponentAction(tx *gorm.DB, request return fmt.Errorf("action '%s' not found for component '%s'", actionName, component.Name()) } + workflow, err := models.FindCanvasWithoutOrgScopeInTransaction(tx, execution.WorkflowID) + if err != nil { + return fmt.Errorf("workflow not found: %w", err) + } + logger := logging.ForExecution(execution, nil) actionCtx := core.ActionContext{ Name: actionName, @@ -213,6 +218,8 @@ func (w *NodeRequestWorker) invokeParentNodeComponentAction(tx *gorm.DB, request ExecutionState: contexts.NewExecutionStateContext(tx, execution), Requests: contexts.NewExecutionRequestContext(tx, execution), Notifications: contexts.NewNotificationContext(tx, uuid.Nil, node.WorkflowID), + Auth: contexts.NewAuthContext(tx, workflow.OrganizationID, nil, nil), + Secrets: contexts.NewSecretsContext(tx, workflow.OrganizationID, w.encryptor), } if node.AppInstallationID != nil { @@ -277,6 +284,11 @@ func (w *NodeRequestWorker) invokeChildNodeComponentAction(tx *gorm.DB, request return fmt.Errorf("action '%s' not found for component '%s'", actionName, component.Name()) } + workflow, err := models.FindCanvasWithoutOrgScopeInTransaction(tx, execution.WorkflowID) + if err != nil { + return fmt.Errorf("workflow not found: %w", err) + } + actionCtx := core.ActionContext{ Name: actionName, Configuration: childNode.Configuration, @@ -287,6 +299,8 @@ func (w *NodeRequestWorker) invokeChildNodeComponentAction(tx *gorm.DB, request ExecutionState: contexts.NewExecutionStateContext(tx, execution), Requests: contexts.NewExecutionRequestContext(tx, execution), Notifications: contexts.NewNotificationContext(tx, uuid.Nil, execution.WorkflowID), + Auth: contexts.NewAuthContext(tx, workflow.OrganizationID, nil, nil), + Secrets: contexts.NewSecretsContext(tx, workflow.OrganizationID, w.encryptor), } err = component.HandleAction(actionCtx) diff --git a/web_src/src/pages/workflowv2/mappers/ssh.ts b/web_src/src/pages/workflowv2/mappers/ssh.ts index 04e3c04253..25286ad81b 100644 --- a/web_src/src/pages/workflowv2/mappers/ssh.ts +++ b/web_src/src/pages/workflowv2/mappers/ssh.ts @@ -96,9 +96,19 @@ export const sshMapper: ComponentBaseMapper = { if (context.execution.createdAt) { details["Started at"] = new Date(context.execution.createdAt).toLocaleString(); } - if (context.execution.updatedAt) { + if (context.execution.updatedAt && context.execution.state === "STATE_FINISHED") { details["Finished at"] = new Date(context.execution.updatedAt).toLocaleString(); } + + // Show connection retry progress + const retryAttempt = typeof metadata?.attempt === "number" ? metadata.attempt : 0; + const retryConfig = ( + context.node.configuration as SSHConfiguration & { connectionRetry?: { enabled?: boolean; retries?: number } } + )?.connectionRetry; + if (retryConfig?.enabled && retryAttempt > 0) { + details["Connection retry"] = `${retryAttempt} / ${retryConfig.retries ?? "?"}`; + } + if (result?.exitCode !== undefined) { details["Exit code"] = String(result.exitCode); } diff --git a/web_src/src/ui/configurationFieldRenderer/ObjectFieldRenderer.tsx b/web_src/src/ui/configurationFieldRenderer/ObjectFieldRenderer.tsx index c5ec1c9f78..b9ff10e96b 100644 --- a/web_src/src/ui/configurationFieldRenderer/ObjectFieldRenderer.tsx +++ b/web_src/src/ui/configurationFieldRenderer/ObjectFieldRenderer.tsx @@ -6,6 +6,7 @@ import { resolveIcon } from "@/lib/utils"; import { Dialog, DialogContent, DialogDescription, DialogTitle } from "@/components/ui/dialog"; import { SimpleTooltip } from "../componentSidebar/SimpleTooltip"; import { useMonacoExpressionAutocomplete } from "./useMonacoExpressionAutocomplete"; +import { parseDefaultValues } from "../../utils/components"; export const ObjectFieldRenderer: React.FC = ({ field, @@ -231,14 +232,11 @@ export const ObjectFieldRenderer: React.FC = ({ } // Merge schema defaults so visibility/required for nested fields see e.g. authMethod + // Use parseDefaultValues to properly convert string defaults to their correct types + // (e.g. boolean "false" -> false, number "5" -> 5) const schemaDefaults = React.useMemo(() => { - const acc: Record = {}; - schema?.forEach((f) => { - if (f.name != null && f.defaultValue !== undefined && f.defaultValue !== null) { - acc[f.name] = f.defaultValue; - } - }); - return acc; + if (!schema) return {}; + return parseDefaultValues(schema); }, [schema]); const objValue = React.useMemo( From 911cbbb09b4a76af30f95eada92d797123d25a29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Sat, 14 Feb 2026 02:32:20 +0100 Subject: [PATCH 112/160] feat: Hetzner integration with createServer and deleteServer (#3116) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/Hetzner Cloud.mdx | 73 +++ pkg/components/ssh/ssh.go | 142 ++++-- pkg/integrations/hetzner/client.go | 460 ++++++++++++++++++ pkg/integrations/hetzner/create_server.go | 316 ++++++++++++ pkg/integrations/hetzner/delete_server.go | 206 ++++++++ pkg/integrations/hetzner/hetzner.go | 193 ++++++++ pkg/server/server.go | 1 + pkg/workers/node_request_worker.go | 2 +- .../src/assets/icons/integrations/hetzner.svg | 1 + web_src/src/pages/workflowv2/index.tsx | 9 + .../pages/workflowv2/mappers/hetzner/base.ts | 37 ++ .../pages/workflowv2/mappers/hetzner/index.ts | 7 + web_src/src/pages/workflowv2/mappers/index.ts | 2 + web_src/src/pages/workflowv2/mappers/ssh.ts | 7 + .../src/ui/BuildingBlocksSidebar/index.tsx | 3 + .../ui/componentSidebar/integrationIcons.tsx | 3 + 16 files changed, 1417 insertions(+), 45 deletions(-) create mode 100644 docs/components/Hetzner Cloud.mdx create mode 100644 pkg/integrations/hetzner/client.go create mode 100644 pkg/integrations/hetzner/create_server.go create mode 100644 pkg/integrations/hetzner/delete_server.go create mode 100644 pkg/integrations/hetzner/hetzner.go create mode 100644 web_src/src/assets/icons/integrations/hetzner.svg create mode 100644 web_src/src/pages/workflowv2/mappers/hetzner/base.ts create mode 100644 web_src/src/pages/workflowv2/mappers/hetzner/index.ts diff --git a/docs/components/Hetzner Cloud.mdx b/docs/components/Hetzner Cloud.mdx new file mode 100644 index 0000000000..60c7b2c9bb --- /dev/null +++ b/docs/components/Hetzner Cloud.mdx @@ -0,0 +1,73 @@ +--- +title: "Hetzner Cloud" +--- + +Create and delete Hetzner Cloud servers + +import { CardGrid, LinkCard } from "@astrojs/starlight/components"; + +## Actions + + + + + + +## Instructions + +**API Token:** Create a token in [Hetzner Cloud Console](https://console.hetzner.cloud/) → Project → Security → API Tokens. Use **Read & Write** scope. + + + +## Create Server + +The Create Server component creates a new server in Hetzner Cloud and waits for the create action to complete. + +### How It Works + +1. Creates a server with the given name, server type, image, and optional location/SSH keys/user data +2. Polls the Hetzner API until the create action finishes +3. Emits the server details on the default output when ready. If creation fails, the execution errors. + +### Configuration + +- **Name**: Server name (supports expressions) +- **Server type**: e.g. cx11, cpx11, cax11 +- **Image**: Image name or ID, e.g. ubuntu-24.04 +- **Location** (optional): e.g. fsn1, nbg1, hel1 +- **SSH keys** (optional): List of SSH key names or IDs +- **User data** (optional): Cloud-init user data + +### Example Output + +```json +{ + "created": "2024-01-15T10:30:00+00:00", + "id": 42, + "name": "my-server", + "publicIp": "1.2.3.4", + "status": "running" +} +``` + + + +## Delete Server + +The Delete Server component deletes a server in Hetzner Cloud and waits for the delete action to complete. + +### How It Works + +1. Deletes the selected server via the Hetzner API +2. Polls the API until the delete action finishes +3. Emits on the default output when the server is deleted. If deletion fails, the execution errors. + +### Example Output + +```json +{ + "actionId": 123, + "serverId": 42 +} +``` + diff --git a/pkg/components/ssh/ssh.go b/pkg/components/ssh/ssh.go index 2e200afbae..aaac462e09 100644 --- a/pkg/components/ssh/ssh.go +++ b/pkg/components/ssh/ssh.go @@ -58,7 +58,18 @@ type Spec struct { } type ExecutionMetadata struct { - Result *CommandResult `json:"result" mapstructure:"result"` + Result *CommandResult `json:"result" mapstructure:"result"` + Host string `json:"host" mapstructure:"host"` + Port int `json:"port" mapstructure:"port"` + User string `json:"user" mapstructure:"user"` + Command string `json:"command" mapstructure:"command"` + WorkingDirectory string `json:"workingDirectory" mapstructure:"workingDirectory"` + Timeout int `json:"timeout" mapstructure:"timeout"` + ConnectionRetry *ConnectionRetrySpec `json:"connectionRetry" mapstructure:"connectionRetry"` + Attempt int `json:"attempt" mapstructure:"attempt"` + MaxRetries int `json:"maxRetries" mapstructure:"maxRetries"` + IntervalSeconds int `json:"intervalSeconds" mapstructure:"intervalSeconds"` + Authentication AuthSpec `json:"authentication" mapstructure:"authentication"` } type ConnectionRetryState struct { @@ -322,58 +333,116 @@ func (c *SSHCommand) Setup(ctx core.SetupContext) error { } func (c *SSHCommand) Execute(ctx core.ExecutionContext) error { - return c.executeSSH(ctx.Configuration, ctx.Secrets, ctx.Metadata, ctx.Requests, ctx.ExecutionState) + spec := Spec{} + err := mapstructure.Decode(ctx.Configuration, &spec) + if err != nil { + return err + } + + metadata := ExecutionMetadata{ + Host: spec.Host, + Port: spec.Port, + User: spec.User, + Command: spec.Command, + WorkingDirectory: spec.WorkingDirectory, + Timeout: spec.Timeout, + ConnectionRetry: spec.ConnectionRetry, + Attempt: 0, + MaxRetries: spec.ConnectionRetry.Retries, + IntervalSeconds: spec.ConnectionRetry.IntervalSeconds, + Authentication: spec.Authentication, + } + + err = ctx.Metadata.Set(metadata) + if err != nil { + return err + } + + execCtx := ExecuteSSHContext{ + secretsCtx: ctx.Secrets, + requestsCtx: ctx.Requests, + stateCtx: ctx.ExecutionState, + metadataCtx: ctx.Metadata, + execMetadata: metadata, + } + + return c.executeSSH(execCtx) } func (c *SSHCommand) HandleAction(ctx core.ActionContext) error { if ctx.Name == "connectionRetry" { - return c.executeSSH(ctx.Configuration, ctx.Secrets, ctx.Metadata, ctx.Requests, ctx.ExecutionState) + if ctx.ExecutionState.IsFinished() { + return nil + } + + metadata := ExecutionMetadata{} + err := mapstructure.Decode(ctx.Metadata.Get(), &metadata) + if err != nil { + return err + } + + execCtx := ExecuteSSHContext{ + secretsCtx: ctx.Secrets, + requestsCtx: ctx.Requests, + stateCtx: ctx.ExecutionState, + metadataCtx: ctx.Metadata, + execMetadata: metadata, + } + + return c.executeSSH(execCtx) } return fmt.Errorf("unknown action: %s", ctx.Name) } -func (c *SSHCommand) executeSSH(config any, secrets core.SecretsContext, metadata core.MetadataContext, req core.RequestContext, state core.ExecutionStateContext) error { - spec, err := c.decodeSpec(config) - if err != nil { - return err - } +type ExecuteSSHContext struct { + secretsCtx core.SecretsContext + requestsCtx core.RequestContext + stateCtx core.ExecutionStateContext + metadataCtx core.MetadataContext + + execMetadata ExecutionMetadata +} - client, err := c.createClient(secrets, spec) +func (c *SSHCommand) executeSSH(ctx ExecuteSSHContext) error { + client, err := c.createClient(ctx.secretsCtx, ctx.execMetadata) if err != nil { return err } defer client.Close() - result, err := client.ExecuteCommand(spec.Command, time.Duration(spec.Timeout)*time.Second) + result, err := client.ExecuteCommand(ctx.execMetadata.Command, time.Duration(ctx.execMetadata.Timeout)*time.Second) if c.isConnectError(err) { - if c.shouldRetry(spec.ConnectionRetry, metadata) { - err = c.incrementRetryCount(metadata) + if c.shouldRetry(ctx.execMetadata.ConnectionRetry, ctx.metadataCtx) { + err = c.incrementRetryCount(ctx.metadataCtx) if err != nil { return err } - return req.ScheduleActionCall("connectionRetry", map[string]any{}, time.Duration(spec.ConnectionRetry.IntervalSeconds)*time.Second) + return ctx.requestsCtx.ScheduleActionCall("connectionRetry", map[string]any{}, time.Duration(ctx.execMetadata.ConnectionRetry.IntervalSeconds)*time.Second) } // Retries exhausted — emit on the failed channel with the connection error. - attempt := c.getRetryAttempt(metadata) + attempt := c.getRetryAttempt(ctx.metadataCtx) failResult := &CommandResult{ Stdout: "", Stderr: fmt.Sprintf("connection failed after %d retries: %s", attempt, err.Error()), ExitCode: -1, } - c.setResultMetadata(metadata, failResult) + err = c.setResultMetadata(ctx.metadataCtx, failResult) + if err != nil { + return err + } - return state.Emit(channelFailed, "ssh.connection.failed", []any{failResult}) + return ctx.stateCtx.Emit(channelFailed, "ssh.connection.failed", []any{failResult}) } if err != nil { return err } - err = c.setResultMetadata(metadata, result) + err = c.setResultMetadata(ctx.metadataCtx, result) if err != nil { return err } @@ -383,7 +452,7 @@ func (c *SSHCommand) executeSSH(config any, secrets core.SecretsContext, metadat channel = channelSuccess } - return state.Emit(channel, "ssh.command.executed", []any{result}) + return ctx.stateCtx.Emit(channel, "ssh.command.executed", []any{result}) } func (c *SSHCommand) shouldRetry(retrySpec *ConnectionRetrySpec, metadata core.MetadataContext) bool { @@ -440,21 +509,6 @@ func (c *SSHCommand) setResultMetadata(metadata core.MetadataContext, result *Co return metadata.Set(current) } -func (c *SSHCommand) decodeSpec(cfg any) (Spec, error) { - var spec Spec - - config, ok := cfg.(map[string]any) - if !ok || config == nil { - return spec, fmt.Errorf("decode configuration: invalid configuration type") - } - - if err := mapstructure.Decode(config, &spec); err != nil { - return spec, fmt.Errorf("decode configuration: %w", err) - } - - return spec, nil -} - func (c *SSHCommand) isConnectError(err error) bool { if err == nil { return false @@ -492,30 +546,30 @@ func (c *SSHCommand) Cleanup(ctx core.SetupContext) error { return nil } -func (c *SSHCommand) createClient(secrets core.SecretsContext, spec Spec) (*Client, error) { - switch spec.Authentication.Method { +func (c *SSHCommand) createClient(secrets core.SecretsContext, metadata ExecutionMetadata) (*Client, error) { + switch metadata.Authentication.Method { case AuthMethodSSHKey: - return c.createClientSSHKey(secrets, spec) + return c.createClientSSHKey(secrets, metadata) case AuthMethodPassword: - return c.createClientForPassword(secrets, spec) + return c.createClientForPassword(secrets, metadata) default: - return nil, fmt.Errorf("unsupported authentication method: %s", spec.Authentication.Method) + return nil, fmt.Errorf("unsupported authentication method: %s", metadata.Authentication.Method) } } -func (c *SSHCommand) createClientForPassword(secrets core.SecretsContext, spec Spec) (*Client, error) { - password, err := secrets.GetKey(spec.Authentication.Password.Secret, spec.Authentication.Password.Key) +func (c *SSHCommand) createClientForPassword(secrets core.SecretsContext, metadata ExecutionMetadata) (*Client, error) { + password, err := secrets.GetKey(metadata.Authentication.Password.Secret, metadata.Authentication.Password.Key) if err != nil { return nil, fmt.Errorf("cannot get password: %w", err) } - return NewClientPassword(spec.Host, spec.Port, spec.User, password), nil + return NewClientPassword(metadata.Host, metadata.Port, metadata.User, password), nil } -func (c *SSHCommand) createClientSSHKey(secrets core.SecretsContext, spec Spec) (*Client, error) { - privateKey, err := secrets.GetKey(spec.Authentication.PrivateKey.Secret, spec.Authentication.PrivateKey.Key) +func (c *SSHCommand) createClientSSHKey(secrets core.SecretsContext, metadata ExecutionMetadata) (*Client, error) { + privateKey, err := secrets.GetKey(metadata.Authentication.PrivateKey.Secret, metadata.Authentication.PrivateKey.Key) if err != nil { return nil, fmt.Errorf("cannot get private key: %w", err) } - return NewClientKey(spec.Host, spec.Port, spec.User, privateKey, nil), nil + return NewClientKey(metadata.Host, metadata.Port, metadata.User, privateKey, nil), nil } diff --git a/pkg/integrations/hetzner/client.go b/pkg/integrations/hetzner/client.go new file mode 100644 index 0000000000..7f5acaeb5e --- /dev/null +++ b/pkg/integrations/hetzner/client.go @@ -0,0 +1,460 @@ +package hetzner + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/core" +) + +const defaultHetznerBaseURL = "https://api.hetzner.cloud/v1" + +type Client struct { + Token string + BaseURL string + http core.HTTPContext +} + +type APIError struct { + StatusCode int + Body string + Message string +} + +func (e *APIError) Error() string { + if e.Message != "" { + return fmt.Sprintf("Hetzner API error %d: %s", e.StatusCode, e.Message) + } + return fmt.Sprintf("Hetzner API error %d: %s", e.StatusCode, e.Body) +} + +type createServerRequest struct { + Name string `json:"name"` + ServerType string `json:"server_type"` + Image string `json:"image"` + Location string `json:"location,omitempty"` + SSHKeys []string `json:"ssh_keys,omitempty"` + UserData string `json:"user_data,omitempty"` + StartAfterCreate *bool `json:"start_after_create,omitempty"` +} + +type createServerResponse struct { + Server *ServerResponse `json:"server"` + Action *ActionResponse `json:"action"` +} + +type ServerResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + Created string `json:"created"` + PublicNet struct { + IPv4 struct { + IP string `json:"ip"` + } `json:"ipv4"` + } `json:"public_net"` +} + +type ActionResponse struct { + ID string `json:"id"` + Status string `json:"status"` + Command string `json:"command"` + Progress int `json:"progress"` + Started string `json:"started"` + Finished string `json:"finished"` + Error *struct { + Code string `json:"code"` + Message string `json:"message"` + } `json:"error"` +} + +type getActionResponse struct { + Action ActionResponse `json:"action"` +} + +const ( + ActionStatusRunning = "running" + ActionStatusSuccess = "success" + ActionStatusError = "error" +) + +func NewClient(httpCtx core.HTTPContext, integration core.IntegrationContext) (*Client, error) { + token, err := integration.GetConfig("apiToken") + if err != nil { + return nil, fmt.Errorf("apiToken is required: %w", err) + } + return &Client{ + Token: string(token), + BaseURL: defaultHetznerBaseURL, + http: httpCtx, + }, nil +} + +func (c *Client) do(method, path string, body any) (*http.Response, error) { + var bodyReader io.Reader + if body != nil { + raw, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(raw) + } + + req, err := http.NewRequest(method, c.BaseURL+path, bodyReader) + if err != nil { + return nil, err + } + req.Header.Set("Authorization", "Bearer "+c.Token) + req.Header.Set("Content-Type", "application/json") + + resp, err := c.http.Do(req) + if err != nil { + return nil, err + } + return resp, nil +} + +// decodeJSON decodes a JSON response body into the target struct. +// It uses json.Decoder.UseNumber() so that numeric IDs from the Hetzner API +// are preserved as strings (via mapstructure's WeaklyTypedInput). +func decodeJSON(r io.Reader, result any) error { + var raw any + dec := json.NewDecoder(r) + dec.UseNumber() + if err := dec.Decode(&raw); err != nil { + return err + } + + decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + Result: result, + TagName: "json", + WeaklyTypedInput: true, + }) + if err != nil { + return err + } + + return decoder.Decode(raw) +} + +func (c *Client) parseError(resp *http.Response) error { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + apiErr := &APIError{StatusCode: resp.StatusCode, Body: string(body)} + var errPayload struct { + Error struct { + Code string `json:"code"` + Message string `json:"message"` + } `json:"error"` + } + if json.Unmarshal(body, &errPayload) == nil && errPayload.Error.Message != "" { + apiErr.Message = errPayload.Error.Message + if resp.StatusCode == http.StatusUnprocessableEntity && strings.Contains(strings.ToLower(apiErr.Message), "unsupported location") { + apiErr.Message = "the selected location is not available for this server type. Select a server type first; the Location dropdown then shows only locations that support it." + } + } + return apiErr +} + +func (c *Client) CreateServer(name, serverType, image, location string, sshKeys []string, userData string) (*ServerResponse, *ActionResponse, error) { + req := createServerRequest{ + Name: name, + ServerType: serverType, + Image: image, + Location: location, + SSHKeys: sshKeys, + UserData: userData, + } + startAfter := true + req.StartAfterCreate = &startAfter + + resp, err := c.do("POST", "/servers", req) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusCreated { + return nil, nil, c.parseError(resp) + } + + var out createServerResponse + if err := decodeJSON(resp.Body, &out); err != nil { + return nil, nil, fmt.Errorf("decode create server response: %w", err) + } + if out.Server == nil || out.Action == nil { + return nil, nil, fmt.Errorf("create server response missing server or action") + } + return out.Server, out.Action, nil +} + +func (c *Client) GetAction(actionID string) (*ActionResponse, error) { + resp, err := c.do("GET", "/actions/"+actionID, nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out getActionResponse + if err := decodeJSON(resp.Body, &out); err != nil { + return nil, fmt.Errorf("decode get action response: %w", err) + } + return &out.Action, nil +} + +func (c *Client) GetServer(serverID string) (*ServerResponse, error) { + resp, err := c.do("GET", "/servers/"+serverID, nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out struct { + Server ServerResponse `json:"server"` + } + if err := decodeJSON(resp.Body, &out); err != nil { + return nil, fmt.Errorf("decode get server response: %w", err) + } + return &out.Server, nil +} + +func (c *Client) DeleteServer(serverID string) (*ActionResponse, error) { + resp, err := c.do("DELETE", "/servers/"+serverID, nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out struct { + Action ActionResponse `json:"action"` + } + if err := decodeJSON(resp.Body, &out); err != nil { + return nil, fmt.Errorf("decode delete server response: %w", err) + } + return &out.Action, nil +} + +func (c *Client) ListServers() ([]ServerResponse, error) { + resp, err := c.do("GET", "/servers?per_page=50", nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out struct { + Servers []ServerResponse `json:"servers"` + } + if err := decodeJSON(resp.Body, &out); err != nil { + return nil, fmt.Errorf("decode list servers response: %w", err) + } + return out.Servers, nil +} + +type ServerTypePrice struct { + Location string `json:"location"` +} + +type ServerTypeResponse struct { + Name string `json:"name"` + ID int `json:"id"` + Description string `json:"description"` + Cores int `json:"cores"` + Memory float64 `json:"memory"` + Disk int `json:"disk"` + Prices []ServerTypePrice `json:"prices"` +} + +func (c *Client) ListServerTypes() ([]ServerTypeResponse, error) { + resp, err := c.do("GET", "/server_types?per_page=50", nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out struct { + ServerTypes []ServerTypeResponse `json:"server_types"` + } + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return nil, fmt.Errorf("decode list server types response: %w", err) + } + return out.ServerTypes, nil +} + +// ServerTypeLocationNames returns the location names (e.g. fsn1, nbg1) where the given server type is available. +// Prices in the API list per-location pricing, so a price entry means the type is available there. +func (c *Client) ServerTypeLocationNames(serverTypeName string) ([]string, error) { + types, err := c.ListServerTypes() + if err != nil { + return nil, err + } + for _, t := range types { + if t.Name == serverTypeName { + names := make([]string, 0, len(t.Prices)) + for _, p := range t.Prices { + if p.Location != "" { + names = append(names, p.Location) + } + } + return names, nil + } + } + return nil, fmt.Errorf("server type %q not found", serverTypeName) +} + +// ServerTypeDisplayName returns a label for the server type including specs (e.g. "cpx11 — 2 vCPU, 2 GB RAM, 40 GB disk"). +func (s *ServerTypeResponse) ServerTypeDisplayName() string { + if s.Name == "" { + return "" + } + var parts []string + if s.Cores > 0 { + parts = append(parts, fmt.Sprintf("%d vCPU", s.Cores)) + } + if s.Memory > 0 { + parts = append(parts, fmt.Sprintf("%.0f GB RAM", s.Memory)) + } + if s.Disk > 0 { + parts = append(parts, fmt.Sprintf("%d GB disk", s.Disk)) + } + if len(parts) == 0 { + return s.Name + } + return s.Name + " — " + strings.Join(parts, ", ") +} + +type ImageResponse struct { + Name string `json:"name"` + ID int `json:"id"` +} + +func (c *Client) ListImages() ([]ImageResponse, error) { + resp, err := c.do("GET", "/images?per_page=50&type=system", nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out struct { + Images []ImageResponse `json:"images"` + } + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return nil, fmt.Errorf("decode list images response: %w", err) + } + return out.Images, nil +} + +type LocationResponse struct { + Name string `json:"name"` + ID int `json:"id"` + Description string `json:"description"` + City string `json:"city"` + Country string `json:"country"` +} + +// LocationDisplayName returns a label for the location (e.g. "Nuremberg, DE (nbg1)"). +func (l *LocationResponse) LocationDisplayName() string { + if l.Name == "" { + return "" + } + if l.City != "" && l.Country != "" { + return fmt.Sprintf("%s, %s (%s)", l.City, l.Country, l.Name) + } + if l.City != "" { + return fmt.Sprintf("%s (%s)", l.City, l.Name) + } + if l.Description != "" { + return fmt.Sprintf("%s (%s)", l.Description, l.Name) + } + return l.Name +} + +func (c *Client) ListLocations() ([]LocationResponse, error) { + resp, err := c.do("GET", "/locations?per_page=50", nil) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, c.parseError(resp) + } + + var out struct { + Locations []LocationResponse `json:"locations"` + } + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return nil, fmt.Errorf("decode list locations response: %w", err) + } + return out.Locations, nil +} + +func (c *Client) Verify() error { + resp, err := c.do("GET", "/servers?per_page=1", nil) + if err != nil { + return err + } + resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return c.parseError(resp) + } + return nil +} + +// resolveServerID extracts the server ID from the configuration map, +// handling both string values and float64 values (which occur when +// template expressions resolve to JSON numbers). +func resolveServerID(config any) (string, error) { + m, ok := config.(map[string]any) + if !ok { + return "", fmt.Errorf("invalid configuration type") + } + + raw, ok := m["server"] + if !ok { + return "", fmt.Errorf("server is required") + } + + switch v := raw.(type) { + case string: + s := strings.TrimSpace(v) + if s == "" { + return "", fmt.Errorf("server is required") + } + return s, nil + case float64: + return fmt.Sprintf("%.0f", v), nil + case int: + return fmt.Sprintf("%d", v), nil + default: + return "", fmt.Errorf("invalid server value: %v", raw) + } +} diff --git a/pkg/integrations/hetzner/create_server.go b/pkg/integrations/hetzner/create_server.go new file mode 100644 index 0000000000..16b2e38e64 --- /dev/null +++ b/pkg/integrations/hetzner/create_server.go @@ -0,0 +1,316 @@ +package hetzner + +import ( + "fmt" + "strings" + "time" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + CreateServerPayloadType = "hetzner.server.created" + CreateServerPollInterval = 5 * time.Second +) + +type CreateServer struct{} + +type CreateServerSpec struct { + Name string `json:"name" mapstructure:"name"` + ServerType string `json:"serverType" mapstructure:"serverType"` + Image string `json:"image" mapstructure:"image"` + Location string `json:"location" mapstructure:"location"` + SSHKeys []string `json:"sshKeys" mapstructure:"sshKeys"` + UserData string `json:"userData" mapstructure:"userData"` +} + +type CreateServerExecutionMetadata struct { + ActionID string `json:"actionId" mapstructure:"actionId"` + Server *ServerResponse `json:"server,omitempty" mapstructure:"server"` +} + +func (c *CreateServer) Name() string { + return "hetzner.createServer" +} + +func (c *CreateServer) Label() string { + return "Create Server" +} + +func (c *CreateServer) Description() string { + return "Create a Hetzner Cloud server and wait for the action to complete" +} + +func (c *CreateServer) Documentation() string { + return `The Create Server component creates a new server in Hetzner Cloud and waits for the create action to complete. + +## How It Works + +1. Creates a server with the given name, server type, image, and optional location/SSH keys/user data +2. Polls the Hetzner API until the create action finishes +3. Emits the server details on the default output when ready. If creation fails, the execution errors. + +## Configuration + +- **Name**: Server name (supports expressions) +- **Server type**: e.g. cx11, cpx11, cax11 +- **Image**: Image name or ID, e.g. ubuntu-24.04 +- **Location** (optional): e.g. fsn1, nbg1, hel1 +- **SSH keys** (optional): List of SSH key names or IDs +- **User data** (optional): Cloud-init user data +` +} + +func (c *CreateServer) Icon() string { + return "hetzner" +} + +func (c *CreateServer) Color() string { + return "gray" +} + +func (c *CreateServer) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *CreateServer) ExampleOutput() map[string]any { + return map[string]any{ + "id": 42, + "name": "my-server", + "status": "running", + "created": "2024-01-15T10:30:00+00:00", + "publicIp": "1.2.3.4", + } +} + +func (c *CreateServer) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "name", + Label: "Name", + Type: configuration.FieldTypeExpression, + Required: true, + Description: "Server name", + }, + { + Name: "serverType", + Label: "Server type", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + Placeholder: "Select server type", + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "server_type", + }, + }, + Description: "Server type", + }, + { + Name: "image", + Label: "Image", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "image", + }, + }, + Description: "Image", + }, + { + Name: "location", + Label: "Location", + Type: configuration.FieldTypeIntegrationResource, + Required: false, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "location", + Parameters: []configuration.ParameterRef{ + {Name: "serverType", ValueFrom: &configuration.ParameterValueFrom{Field: "serverType"}}, + }, + }, + }, + Description: "Location (optional, omit for auto). Only locations that support the selected server type are shown.", + }, + { + Name: "sshKeys", + Label: "SSH keys", + Type: configuration.FieldTypeList, + Required: false, + Description: "Add the name or ID of each SSH key from your Hetzner Cloud project (Security → SSH Keys). The server will allow login with these keys.", + TypeOptions: &configuration.TypeOptions{ + List: &configuration.ListTypeOptions{ + ItemLabel: "SSH key", + ItemDefinition: &configuration.ListItemDefinition{ + Type: configuration.FieldTypeString, + }, + }, + }, + }, + { + Name: "userData", + Label: "User data", + Type: configuration.FieldTypeText, + Required: false, + Description: "Cloud-init user data", + }, + } +} + +func (c *CreateServer) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *CreateServer) Setup(ctx core.SetupContext) error { + spec := CreateServerSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + if strings.TrimSpace(spec.ServerType) == "" { + return fmt.Errorf("serverType is required") + } + if strings.TrimSpace(spec.Image) == "" { + return fmt.Errorf("image is required") + } + return nil +} + +func (c *CreateServer) Execute(ctx core.ExecutionContext) error { + spec := CreateServerSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return err + } + name := strings.TrimSpace(readStringFromAny(spec.Name)) + if name == "" { + return fmt.Errorf("name is required") + } + serverType := strings.TrimSpace(spec.ServerType) + image := strings.TrimSpace(spec.Image) + location := strings.TrimSpace(spec.Location) + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + server, action, err := client.CreateServer(name, serverType, image, location, spec.SSHKeys, spec.UserData) + if err != nil { + return fmt.Errorf("create server: %w", err) + } + + metadata := CreateServerExecutionMetadata{ + ActionID: action.ID, + Server: server, + } + if err := ctx.Metadata.Set(metadata); err != nil { + return err + } + + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, CreateServerPollInterval) +} + +func (c *CreateServer) Actions() []core.Action { + return []core.Action{ + {Name: "poll", UserAccessible: false}, + } +} + +func (c *CreateServer) HandleAction(ctx core.ActionContext) error { + if ctx.Name == "poll" { + return c.poll(ctx) + } + return fmt.Errorf("unknown action: %s", ctx.Name) +} + +func (c *CreateServer) poll(ctx core.ActionContext) error { + if ctx.ExecutionState.IsFinished() { + return nil + } + + var metadata CreateServerExecutionMetadata + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("decode metadata: %w", err) + } + if metadata.ActionID == "" { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + action, err := client.GetAction(metadata.ActionID) + if err != nil { + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, CreateServerPollInterval) + } + + switch action.Status { + case ActionStatusRunning: + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, CreateServerPollInterval) + case ActionStatusError: + msg := "create server action failed" + if action.Error != nil && action.Error.Message != "" { + msg = action.Error.Message + } + return fmt.Errorf("%s", msg) + case ActionStatusSuccess: + server := metadata.Server + if server != nil && server.ID != "" { + if refreshed, err := client.GetServer(server.ID); err == nil { + server = refreshed + } + } + payload := serverToPayload(server) + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, CreateServerPayloadType, []any{payload}) + default: + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, CreateServerPollInterval) + } +} + +func (c *CreateServer) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *CreateServer) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *CreateServer) Cleanup(ctx core.SetupContext) error { + return nil +} + +func serverToPayload(s *ServerResponse) map[string]any { + if s == nil { + return map[string]any{} + } + out := map[string]any{ + "id": s.ID, + "name": s.Name, + "status": s.Status, + "created": s.Created, + } + if s.PublicNet.IPv4.IP != "" { + out["publicIp"] = s.PublicNet.IPv4.IP + } + return out +} + +func readStringFromAny(v any) string { + if v == nil { + return "" + } + switch x := v.(type) { + case string: + return x + case float64: + return fmt.Sprintf("%.0f", x) + case int: + return fmt.Sprintf("%d", x) + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/pkg/integrations/hetzner/delete_server.go b/pkg/integrations/hetzner/delete_server.go new file mode 100644 index 0000000000..93d4efa18e --- /dev/null +++ b/pkg/integrations/hetzner/delete_server.go @@ -0,0 +1,206 @@ +package hetzner + +import ( + "fmt" + "strings" + "time" + + "github.com/google/uuid" + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" +) + +const ( + DeleteServerPayloadType = "hetzner.server.deleted" + DeleteServerPollInterval = 5 * time.Second +) + +type DeleteServer struct{} + +type DeleteServerSpec struct { + Server string `json:"server" mapstructure:"server"` +} + +type DeleteServerExecutionMetadata struct { + ActionID string `json:"actionId" mapstructure:"actionId"` + ServerID string `json:"serverId" mapstructure:"serverId"` +} + +func (c *DeleteServer) Name() string { + return "hetzner.deleteServer" +} + +func (c *DeleteServer) Label() string { + return "Delete Server" +} + +func (c *DeleteServer) Description() string { + return "Delete a Hetzner Cloud server and wait for the action to complete" +} + +func (c *DeleteServer) Documentation() string { + return `The Delete Server component deletes a server in Hetzner Cloud and waits for the delete action to complete. + +## How It Works + +1. Deletes the selected server via the Hetzner API +2. Polls the API until the delete action finishes +3. Emits on the default output when the server is deleted. If deletion fails, the execution errors. +` +} + +func (c *DeleteServer) Icon() string { + return "hetzner" +} + +func (c *DeleteServer) Color() string { + return "gray" +} + +func (c *DeleteServer) OutputChannels(configuration any) []core.OutputChannel { + return []core.OutputChannel{core.DefaultOutputChannel} +} + +func (c *DeleteServer) ExampleOutput() map[string]any { + return map[string]any{ + "actionId": 123, + "serverId": 42, + } +} + +func (c *DeleteServer) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "server", + Label: "Server", + Type: configuration.FieldTypeIntegrationResource, + Required: true, + TypeOptions: &configuration.TypeOptions{ + Resource: &configuration.ResourceTypeOptions{ + Type: "server", + }, + }, + Description: "Server to delete", + }, + } +} + +func (c *DeleteServer) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) { + return ctx.DefaultProcessing() +} + +func (c *DeleteServer) Setup(ctx core.SetupContext) error { + spec := DeleteServerSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + if strings.TrimSpace(spec.Server) == "" { + return fmt.Errorf("server is required") + } + return nil +} + +func (c *DeleteServer) Execute(ctx core.ExecutionContext) error { + spec := DeleteServerSpec{} + if err := mapstructure.Decode(ctx.Configuration, &spec); err != nil { + return err + } + serverID, err := resolveServerID(ctx.Configuration) + if err != nil { + return err + } + + // Store server ID in metadata early so it's visible in the UI + // even if the API call fails. + if err := ctx.Metadata.Set(DeleteServerExecutionMetadata{ServerID: serverID}); err != nil { + return err + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + action, err := client.DeleteServer(serverID) + if err != nil { + return fmt.Errorf("delete server: %w", err) + } + + if err := ctx.Metadata.Set(DeleteServerExecutionMetadata{ + ActionID: action.ID, + ServerID: serverID, + }); err != nil { + return err + } + + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, DeleteServerPollInterval) +} + +func (c *DeleteServer) Actions() []core.Action { + return []core.Action{ + {Name: "poll", UserAccessible: false}, + } +} + +func (c *DeleteServer) HandleAction(ctx core.ActionContext) error { + if ctx.Name == "poll" { + return c.poll(ctx) + } + return fmt.Errorf("unknown action: %s", ctx.Name) +} + +func (c *DeleteServer) poll(ctx core.ActionContext) error { + if ctx.ExecutionState.IsFinished() { + return nil + } + + var metadata DeleteServerExecutionMetadata + if err := mapstructure.Decode(ctx.Metadata.Get(), &metadata); err != nil { + return fmt.Errorf("decode metadata: %w", err) + } + if metadata.ActionID == "" { + return nil + } + + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + + action, err := client.GetAction(metadata.ActionID) + if err != nil { + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, DeleteServerPollInterval) + } + + switch action.Status { + case ActionStatusRunning: + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, DeleteServerPollInterval) + case ActionStatusError: + msg := "delete server action failed" + if action.Error != nil && action.Error.Message != "" { + msg = action.Error.Message + } + return fmt.Errorf("%s", msg) + case ActionStatusSuccess: + payload := map[string]any{ + "actionId": metadata.ActionID, + "serverId": metadata.ServerID, + } + return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, DeleteServerPayloadType, []any{payload}) + default: + return ctx.Requests.ScheduleActionCall("poll", map[string]any{}, DeleteServerPollInterval) + } +} + +func (c *DeleteServer) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { + return 200, nil +} + +func (c *DeleteServer) Cancel(ctx core.ExecutionContext) error { + return nil +} + +func (c *DeleteServer) Cleanup(ctx core.SetupContext) error { + return nil +} diff --git a/pkg/integrations/hetzner/hetzner.go b/pkg/integrations/hetzner/hetzner.go new file mode 100644 index 0000000000..8a27183fef --- /dev/null +++ b/pkg/integrations/hetzner/hetzner.go @@ -0,0 +1,193 @@ +package hetzner + +import ( + "fmt" + "strings" + + "github.com/mitchellh/mapstructure" + "github.com/superplanehq/superplane/pkg/configuration" + "github.com/superplanehq/superplane/pkg/core" + "github.com/superplanehq/superplane/pkg/registry" +) + +func init() { + registry.RegisterIntegration("hetzner", &Hetzner{}) +} + +type Hetzner struct{} + +type Configuration struct { + APIToken string `json:"apiToken" mapstructure:"apiToken"` +} + +func (h *Hetzner) Name() string { + return "hetzner" +} + +func (h *Hetzner) Label() string { + return "Hetzner Cloud" +} + +func (h *Hetzner) Icon() string { + return "hetzner" +} + +func (h *Hetzner) Description() string { + return "Create and delete Hetzner Cloud servers" +} + +func (h *Hetzner) Instructions() string { + return ` +**API Token:** Create a token in [Hetzner Cloud Console](https://console.hetzner.cloud/) → Project → Security → API Tokens. Use **Read & Write** scope. +` +} + +func (h *Hetzner) Configuration() []configuration.Field { + return []configuration.Field{ + { + Name: "apiToken", + Label: "API Token", + Type: configuration.FieldTypeString, + Required: true, + Sensitive: true, + Description: "Hetzner Cloud API token with Read & Write access", + }, + } +} + +func (h *Hetzner) Components() []core.Component { + return []core.Component{ + &CreateServer{}, + &DeleteServer{}, + } +} + +func (h *Hetzner) Triggers() []core.Trigger { + return nil +} + +func (h *Hetzner) Cleanup(ctx core.IntegrationCleanupContext) error { + return nil +} + +func (h *Hetzner) Sync(ctx core.SyncContext) error { + config := Configuration{} + if err := mapstructure.Decode(ctx.Configuration, &config); err != nil { + return fmt.Errorf("failed to decode configuration: %w", err) + } + if strings.TrimSpace(config.APIToken) == "" { + return fmt.Errorf("apiToken is required") + } + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return err + } + if err := client.Verify(); err != nil { + return fmt.Errorf("failed to verify Hetzner credentials: %w", err) + } + ctx.Integration.Ready() + return nil +} + +func (h *Hetzner) HandleRequest(ctx core.HTTPRequestContext) {} + +func (h *Hetzner) ListResources(resourceType string, ctx core.ListResourcesContext) ([]core.IntegrationResource, error) { + client, err := NewClient(ctx.HTTP, ctx.Integration) + if err != nil { + return nil, err + } + + switch resourceType { + case "server": + servers, err := client.ListServers() + if err != nil { + return nil, err + } + resources := make([]core.IntegrationResource, 0, len(servers)) + for _, s := range servers { + id := s.ID + name := s.Name + if name == "" { + name = id + } + resources = append(resources, core.IntegrationResource{Type: "server", Name: name, ID: id}) + } + return resources, nil + case "server_type": + types, err := client.ListServerTypes() + if err != nil { + return nil, err + } + resources := make([]core.IntegrationResource, 0, len(types)) + for _, t := range types { + id := t.Name + if id == "" { + id = fmt.Sprintf("%d", t.ID) + } + displayName := t.ServerTypeDisplayName() + if displayName == "" { + displayName = id + } + resources = append(resources, core.IntegrationResource{Type: "server_type", Name: displayName, ID: id}) + } + return resources, nil + case "image": + images, err := client.ListImages() + if err != nil { + return nil, err + } + resources := make([]core.IntegrationResource, 0, len(images)) + for _, img := range images { + id := img.Name + if id == "" { + id = fmt.Sprintf("%d", img.ID) + } + resources = append(resources, core.IntegrationResource{Type: "image", Name: img.Name, ID: id}) + } + return resources, nil + case "location": + locations, err := client.ListLocations() + if err != nil { + return nil, err + } + if serverType := ctx.Parameters["serverType"]; serverType != "" { + allowedNames, err := client.ServerTypeLocationNames(serverType) + if err == nil && len(allowedNames) > 0 { + allowed := make(map[string]bool) + for _, n := range allowedNames { + allowed[n] = true + } + filtered := locations[:0] + for _, loc := range locations { + if allowed[loc.Name] { + filtered = append(filtered, loc) + } + } + locations = filtered + } + } + resources := make([]core.IntegrationResource, 0, len(locations)) + for _, loc := range locations { + id := loc.Name + if id == "" { + id = fmt.Sprintf("%d", loc.ID) + } + displayName := loc.LocationDisplayName() + if displayName == "" { + displayName = id + } + resources = append(resources, core.IntegrationResource{Type: "location", Name: displayName, ID: id}) + } + return resources, nil + default: + return nil, nil + } +} + +func (h *Hetzner) Actions() []core.Action { + return nil +} + +func (h *Hetzner) HandleAction(ctx core.IntegrationActionContext) error { + return nil +} diff --git a/pkg/server/server.go b/pkg/server/server.go index 9286713019..f909652ba0 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -44,6 +44,7 @@ import ( _ "github.com/superplanehq/superplane/pkg/integrations/dockerhub" _ "github.com/superplanehq/superplane/pkg/integrations/github" _ "github.com/superplanehq/superplane/pkg/integrations/gitlab" + _ "github.com/superplanehq/superplane/pkg/integrations/hetzner" _ "github.com/superplanehq/superplane/pkg/integrations/jira" _ "github.com/superplanehq/superplane/pkg/integrations/openai" _ "github.com/superplanehq/superplane/pkg/integrations/pagerduty" diff --git a/pkg/workers/node_request_worker.go b/pkg/workers/node_request_worker.go index 4f5a3f4544..367bd9aedd 100644 --- a/pkg/workers/node_request_worker.go +++ b/pkg/workers/node_request_worker.go @@ -291,7 +291,7 @@ func (w *NodeRequestWorker) invokeChildNodeComponentAction(tx *gorm.DB, request actionCtx := core.ActionContext{ Name: actionName, - Configuration: childNode.Configuration, + Configuration: execution.Configuration.Data(), Parameters: spec.InvokeAction.Parameters, Logger: logging.ForExecution(execution, parentExecution), HTTP: w.registry.HTTPContext(), diff --git a/web_src/src/assets/icons/integrations/hetzner.svg b/web_src/src/assets/icons/integrations/hetzner.svg new file mode 100644 index 0000000000..6f2932944c --- /dev/null +++ b/web_src/src/assets/icons/integrations/hetzner.svg @@ -0,0 +1 @@ + diff --git a/web_src/src/pages/workflowv2/index.tsx b/web_src/src/pages/workflowv2/index.tsx index 50c23f107d..02f244a877 100644 --- a/web_src/src/pages/workflowv2/index.tsx +++ b/web_src/src/pages/workflowv2/index.tsx @@ -68,6 +68,7 @@ import { getStateMap, } from "./mappers"; import { resolveExecutionErrors } from "./mappers/dash0"; +import { getHeaderIconSrc } from "@/ui/componentSidebar/integrationIcons"; import { useOnCancelQueueItemHandler } from "./useOnCancelQueueItemHandler"; import { usePushThroughHandler } from "./usePushThroughHandler"; import { useCancelExecutionHandler } from "./useCancelExecutionHandler"; @@ -3387,6 +3388,14 @@ function prepareComponentBaseNode( additionalData: additionalData, }); + // If the mapper didn't provide a custom icon, resolve from the app logo map + if (!componentBaseProps.iconSrc) { + const resolvedIconSrc = getHeaderIconSrc(node.component?.name); + if (resolvedIconSrc) { + componentBaseProps.iconSrc = resolvedIconSrc; + } + } + // If there's an error and empty state is shown, customize the message const hasError = !!node.errorMessage; const showingEmptyState = componentBaseProps.includeEmptyState; diff --git a/web_src/src/pages/workflowv2/mappers/hetzner/base.ts b/web_src/src/pages/workflowv2/mappers/hetzner/base.ts new file mode 100644 index 0000000000..f46da8a752 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/hetzner/base.ts @@ -0,0 +1,37 @@ +import { ComponentBaseMapper, ExecutionDetailsContext, SubtitleContext } from "../types"; +import { noopMapper } from "../noop"; +import { formatTimeAgo } from "@/utils/date"; + +function getExecutionDetails(context: ExecutionDetailsContext): Record { + const details: Record = {}; + const metadata = context.execution.metadata as Record | undefined; + + const serverId = metadata?.serverId ?? (metadata?.server as Record | undefined)?.id; + if (serverId !== undefined) { + details["Server ID"] = String(serverId); + } + + if (context.execution.createdAt) { + details["Started at"] = new Date(context.execution.createdAt).toLocaleString(); + } + if (context.execution.updatedAt && context.execution.state === "STATE_FINISHED") { + details["Finished at"] = new Date(context.execution.updatedAt).toLocaleString(); + } + + if (context.execution.resultMessage) { + details["Error"] = context.execution.resultMessage; + } + + return details; +} + +function subtitle(context: SubtitleContext): string { + if (!context.execution.createdAt) return ""; + return formatTimeAgo(new Date(context.execution.createdAt)); +} + +export const hetznerBaseMapper: ComponentBaseMapper = { + ...noopMapper, + getExecutionDetails: getExecutionDetails, + subtitle: subtitle, +}; diff --git a/web_src/src/pages/workflowv2/mappers/hetzner/index.ts b/web_src/src/pages/workflowv2/mappers/hetzner/index.ts new file mode 100644 index 0000000000..6de6806905 --- /dev/null +++ b/web_src/src/pages/workflowv2/mappers/hetzner/index.ts @@ -0,0 +1,7 @@ +import { ComponentBaseMapper } from "../types"; +import { hetznerBaseMapper } from "./base"; + +export const componentMappers: Record = { + createServer: hetznerBaseMapper, + deleteServer: hetznerBaseMapper, +}; diff --git a/web_src/src/pages/workflowv2/mappers/index.ts b/web_src/src/pages/workflowv2/mappers/index.ts index 5de22c24c7..f8b2d348f3 100644 --- a/web_src/src/pages/workflowv2/mappers/index.ts +++ b/web_src/src/pages/workflowv2/mappers/index.ts @@ -85,6 +85,7 @@ import { triggerRenderers as awsTriggerRenderers, eventStateRegistry as awsEventStateRegistry, } from "./aws"; +import { componentMappers as hetznerComponentMappers } from "./hetzner/index"; import { timeGateMapper, TIME_GATE_STATE_REGISTRY } from "./timegate"; import { componentMappers as discordComponentMappers, @@ -180,6 +181,7 @@ const appMappers: Record> = { claude: claudeComponentMappers, prometheus: prometheusComponentMappers, cursor: cursorComponentMappers, + hetzner: hetznerComponentMappers, dockerhub: dockerhubComponentMappers, }; diff --git a/web_src/src/pages/workflowv2/mappers/ssh.ts b/web_src/src/pages/workflowv2/mappers/ssh.ts index 25286ad81b..7763edb864 100644 --- a/web_src/src/pages/workflowv2/mappers/ssh.ts +++ b/web_src/src/pages/workflowv2/mappers/ssh.ts @@ -92,6 +92,13 @@ export const sshMapper: ComponentBaseMapper = { const details: Record = {}; const metadata = context.execution.metadata as Record | undefined; const result = metadata?.result as { stdout?: string; stderr?: string; exitCode?: number } | undefined; + const host = metadata?.host as string | undefined; + const port = metadata?.port as number | undefined; + const username = metadata?.username as string | undefined; + if (host) { + const portSuffix = port && port !== 22 ? `:${port}` : ""; + details["Host"] = `${username || "user"}@${host}${portSuffix}`; + } if (context.execution.createdAt) { details["Started at"] = new Date(context.execution.createdAt).toLocaleString(); diff --git a/web_src/src/ui/BuildingBlocksSidebar/index.tsx b/web_src/src/ui/BuildingBlocksSidebar/index.tsx index acddb64866..eefaaad538 100644 --- a/web_src/src/ui/BuildingBlocksSidebar/index.tsx +++ b/web_src/src/ui/BuildingBlocksSidebar/index.tsx @@ -40,6 +40,7 @@ import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; import prometheusIcon from "@/assets/icons/integrations/prometheus.svg"; import renderIcon from "@/assets/icons/integrations/render.svg"; import dockerIcon from "@/assets/icons/integrations/docker.svg"; +import hetznerIcon from "@/assets/icons/integrations/hetzner.svg"; export interface BuildingBlock { name: string; @@ -408,6 +409,7 @@ function CategorySection({ github: githubIcon, gitlab: gitlabIcon, grafana: grafanaIcon, + hetzner: hetznerIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -488,6 +490,7 @@ function CategorySection({ github: githubIcon, gitlab: gitlabIcon, grafana: grafanaIcon, + hetzner: hetznerIcon, openai: openAiIcon, "open-ai": openAiIcon, claude: claudeIcon, diff --git a/web_src/src/ui/componentSidebar/integrationIcons.tsx b/web_src/src/ui/componentSidebar/integrationIcons.tsx index 9d194f7af9..7194a96502 100644 --- a/web_src/src/ui/componentSidebar/integrationIcons.tsx +++ b/web_src/src/ui/componentSidebar/integrationIcons.tsx @@ -26,6 +26,7 @@ import sendgridIcon from "@/assets/icons/integrations/sendgrid.svg"; import prometheusIcon from "@/assets/icons/integrations/prometheus.svg"; import renderIcon from "@/assets/icons/integrations/render.svg"; import dockerIcon from "@/assets/icons/integrations/docker.svg"; +import hetznerIcon from "@/assets/icons/integrations/hetzner.svg"; /** Integration type name (e.g. "github") → logo src. Used for Settings tab and header. */ export const INTEGRATION_APP_LOGO_MAP: Record = { @@ -39,6 +40,7 @@ export const INTEGRATION_APP_LOGO_MAP: Record = { github: githubIcon, gitlab: gitlabIcon, grafana: grafanaIcon, + hetzner: hetznerIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, @@ -66,6 +68,7 @@ export const APP_LOGO_MAP: Record> = { github: githubIcon, gitlab: gitlabIcon, grafana: grafanaIcon, + hetzner: hetznerIcon, jira: jiraIcon, openai: openAiIcon, "open-ai": openAiIcon, From 44b1c671e5b572ffcfd3f68be648d82a5963155b Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Sun, 15 Feb 2026 15:48:01 -0300 Subject: [PATCH 113/160] feat: include discovery commands on CLI (#3119) This PR overhauls the SuperPlane CLI command structure to make resource discovery and canvas workflows easier, while also making command implementation more maintainable. Before: ``` superplane ``` Now ``` superplane ``` The new structure makes it easier to discover things, build an internal structure to extend the commands and sub-commands going forward, and is also a pattern used in other CLIs - [see gh](https://github.com/cli/cli). ### CLI internals - Refactored CLI internals into a reusable command framework: - Added `pkg/cli/core` with shared command binding/context logic. - Added centralized output rendering with support for `text`, `json`, and `yaml`. - Reorganized CLI commands into resource-focused groups under `pkg/cli/commands/*` ### New discovery-focused commands: - `superplane integrations list --connected` - `superplane integrations list-resources --id --type [--parameters key=value,...]` - `superplane components list --from ` - `superplane triggers list --from ` ### Global output flag You can control the output of the commands now with `--output` flag. Available options are: json / yaml / text. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/skills/superplane-cli/SKILL.md | 212 ++++++++++++++++++ pkg/cli/check.go | 45 ---- pkg/cli/commands/canvases/create.go | 77 +++++++ pkg/cli/commands/canvases/get.go | 77 +++++++ pkg/cli/commands/canvases/list.go | 46 ++++ .../{ => commands/canvases}/models/canvas.go | 0 pkg/cli/commands/canvases/root.go | 54 +++++ pkg/cli/commands/canvases/update.go | 61 +++++ pkg/cli/commands/components/get.go | 58 +++++ pkg/cli/commands/components/list.go | 45 ++++ pkg/cli/commands/components/root.go | 34 +++ pkg/cli/commands/config/root.go | 106 +++++++++ pkg/cli/commands/integrations/get.go | 30 +++ pkg/cli/commands/integrations/list.go | 100 +++++++++ .../commands/integrations/list_resources.go | 204 +++++++++++++++++ pkg/cli/commands/integrations/root.go | 50 +++++ pkg/cli/commands/triggers/get.go | 58 +++++ pkg/cli/commands/triggers/list.go | 45 ++++ pkg/cli/commands/triggers/root.go | 34 +++ pkg/cli/config.go | 108 --------- pkg/cli/core/command.go | 146 ++++++++++++ pkg/cli/core/common.go | 58 +++++ pkg/cli/create.go | 78 ------- pkg/cli/delete.go | 16 -- pkg/cli/get.go | 58 ----- pkg/cli/list.go | 49 ---- pkg/cli/root.go | 71 ++++++ pkg/cli/update.go | 60 ----- pkg/cli/utils.go | 73 ------ pkg/cli/whoami.go | 35 ++- pkg/cli/yaml_resource.go | 35 --- pkg/cli/yaml_resource_test.go | 67 ------ 32 files changed, 1589 insertions(+), 601 deletions(-) create mode 100644 .cursor/skills/superplane-cli/SKILL.md delete mode 100644 pkg/cli/check.go create mode 100644 pkg/cli/commands/canvases/create.go create mode 100644 pkg/cli/commands/canvases/get.go create mode 100644 pkg/cli/commands/canvases/list.go rename pkg/cli/{ => commands/canvases}/models/canvas.go (100%) create mode 100644 pkg/cli/commands/canvases/root.go create mode 100644 pkg/cli/commands/canvases/update.go create mode 100644 pkg/cli/commands/components/get.go create mode 100644 pkg/cli/commands/components/list.go create mode 100644 pkg/cli/commands/components/root.go create mode 100644 pkg/cli/commands/config/root.go create mode 100644 pkg/cli/commands/integrations/get.go create mode 100644 pkg/cli/commands/integrations/list.go create mode 100644 pkg/cli/commands/integrations/list_resources.go create mode 100644 pkg/cli/commands/integrations/root.go create mode 100644 pkg/cli/commands/triggers/get.go create mode 100644 pkg/cli/commands/triggers/list.go create mode 100644 pkg/cli/commands/triggers/root.go delete mode 100644 pkg/cli/config.go create mode 100644 pkg/cli/core/command.go create mode 100644 pkg/cli/core/common.go delete mode 100644 pkg/cli/create.go delete mode 100644 pkg/cli/delete.go delete mode 100644 pkg/cli/get.go delete mode 100644 pkg/cli/list.go delete mode 100644 pkg/cli/update.go delete mode 100644 pkg/cli/utils.go delete mode 100644 pkg/cli/yaml_resource.go delete mode 100644 pkg/cli/yaml_resource_test.go diff --git a/.cursor/skills/superplane-cli/SKILL.md b/.cursor/skills/superplane-cli/SKILL.md new file mode 100644 index 0000000000..2a6679909e --- /dev/null +++ b/.cursor/skills/superplane-cli/SKILL.md @@ -0,0 +1,212 @@ +--- +name: superplane-cli +description: Use when working with the SuperPlane CLI to discover available integrations, components, and triggers, and to build or troubleshoot canvases that connect trigger->component flows. Covers list/get command usage, interpreting configuration schemas, wiring channels between nodes, and resolving integration binding issues such as "integration is required". +--- + +# SuperPlane CLI Canvas Workflow + +Use this workflow to build or debug canvases from the CLI. + +## Discover what exists + +Run these first: + +```bash +superplane integrations list +superplane integrations list --connected +superplane triggers list +superplane components list +``` + +Narrow to one integration: + +```bash +superplane triggers list --from github +superplane components list --from github +superplane components list --from semaphore +``` + +Use `--connected` to list organization-connected integration instances (not just available providers). + +Inspect required config fields and example payloads: + +```bash +superplane triggers get github.onPush +superplane components get semaphore.runWorkflow +superplane components get github.runWorkflow +superplane components get approval +``` + +List runtime options for `integration-resource` fields: + +```bash +superplane integrations list-resources --id --type --parameters key1=value1,key2=value2 +``` + +Use `superplane integrations list --connected` first to find valid integration IDs. + +## Build canvas incrementally + +Create a blank canvas first: + +```bash +superplane canvases create +superplane canvases get +``` + +Edit a canvas file and update via: + +```bash +superplane canvases update --file +``` + +Use this resource header: + +```yaml +apiVersion: v1 +kind: Canvas +metadata: + id: + name: +spec: + nodes: [] + edges: [] +``` + +## Canvas YAML structure + +Use this as the canonical shape when editing a canvas file. + +Top-level fields: + +- `apiVersion`: always `v1` +- `kind`: always `Canvas` +- `metadata.id`: canvas UUID (required for update) +- `metadata.name`: canvas name +- `spec.nodes`: list of trigger/component nodes +- `spec.edges`: list of directed graph connections + +Node structure: + +- Common fields: `id`, `name`, `type`, `configuration`, `position`, `paused`, `isCollapsed` +- Keep node `name` values unique within a canvas. Duplicate names can produce warnings and make expressions/diagnostics ambiguous. +- `type` must be `TYPE_TRIGGER` or `TYPE_COMPONENT` +- Trigger nodes must include `trigger.name` +- Component nodes must include `component.name` +- Integration-backed nodes should include `integration.id` (`integration.name` can be empty string) +- `errorMessage` and `warningMessage` are optional but useful for troubleshooting +- `metadata` is optional and usually server-populated + +Edge structure: + +- `sourceId`: upstream node id +- `targetId`: downstream node id +- `channel`: output channel from source node (`default`, `passed`, `approved`, etc.) + +Minimal example: + +```yaml +apiVersion: v1 +kind: Canvas +metadata: + id: + name: +spec: + nodes: + - id: trigger-main + name: github.onPush + type: TYPE_TRIGGER + trigger: + name: github.onPush + integration: + id: + name: "" + configuration: + repository: owner/repo + refs: + - type: equals + value: refs/heads/main + position: + x: 120 + y: 100 + paused: false + isCollapsed: false + + - id: component-ci + name: semaphore.runWorkflow + type: TYPE_COMPONENT + component: + name: semaphore.runWorkflow + integration: + id: + name: "" + configuration: + project: + pipelineFile: .semaphore/semaphore.yml + ref: refs/heads/main + position: + x: 480 + y: 100 + paused: false + isCollapsed: false + + edges: + - sourceId: trigger-main + targetId: component-ci + channel: default +``` + +## Node and edge wiring rules + +Use `TYPE_TRIGGER` for trigger nodes and `TYPE_COMPONENT` for component nodes. + +For triggers, set: + +- `trigger.name` to the trigger id (example: `github.onPush`) + +For components, set: + +- `component.name` to the component id (example: `semaphore.runWorkflow`) + +For graph flow, set edges: + +- `sourceId` and `targetId` for connection +- `channel` when routing specific outputs (example: `passed`, `approved`) + +Typical gated flow: + +1. Trigger -> CI component +2. CI `passed` -> `approval` +3. `approval` `approved` -> deploy component + +## Configure integration-backed fields correctly + +When a field type is `integration-resource` (such as `repository` or `project`), the org must have a configured integration instance for that provider. + +Symptoms of missing binding: + +- Node `errorMessage` contains `integration is required` + +How to resolve: + +1. Run `superplane integrations list --connected` and confirm required providers are connected for the org. +2. Ensure the provider integration (GitHub, Semaphore, etc.) is installed and authenticated for the organization. +3. Reopen the node config and select valid provider resources for required fields. +4. Use `superplane integrations list-resources --id --type --parameters ...` to inspect valid option IDs/names. +5. Re-run `superplane canvases get ` and confirm node errors are cleared. + +## Troubleshooting checklist + +Run this after every update: + +```bash +superplane canvases get +``` + +Check: + +- All required `configuration` fields are present. +- Edges use the correct output channels. +- No node `errorMessage` remains. +- No node `warningMessage` indicates duplicate names (for example: `Multiple components named "semaphore.runWorkflow"`). +- Expressions reference existing node names. diff --git a/pkg/cli/check.go b/pkg/cli/check.go deleted file mode 100644 index fdd0c9d8b8..0000000000 --- a/pkg/cli/check.go +++ /dev/null @@ -1,45 +0,0 @@ -package cli - -import ( - "flag" - "fmt" - "os" -) - -// Checks if an error is present. -// -// If it is present, it displays the error and exits with status 1. -// -// If you want to display a custom message use CheckWithMessage. -func Check(err error) { - if err != nil { - fmt.Fprintf(os.Stderr, "error: %s\n", err.Error()) - - Exit(1) - } -} - -// Checks if an error is present. -// -// If it is present, it displays the provided message and exits with status 1. -func CheckWithMessage(err error, message string) { - if err != nil { - fmt.Fprintf(os.Stderr, "error: %+v\n", message) - - Exit(1) - } -} - -func Fail(message string) { - fmt.Fprintf(os.Stderr, "error: %s\n", message) - - Exit(1) -} - -func Exit(code int) { - if flag.Lookup("test.v") == nil { - os.Exit(1) - } else { - panic(fmt.Sprintf("exit %d", code)) - } -} diff --git a/pkg/cli/commands/canvases/create.go b/pkg/cli/commands/canvases/create.go new file mode 100644 index 0000000000..a29e56dd3d --- /dev/null +++ b/pkg/cli/commands/canvases/create.go @@ -0,0 +1,77 @@ +package canvases + +import ( + "fmt" + "os" + + "github.com/superplanehq/superplane/pkg/cli/commands/canvases/models" + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type createCommand struct { + file *string +} + +func (c *createCommand) Execute(ctx core.CommandContext) error { + filePath := "" + if c.file != nil { + filePath = *c.file + } + + if filePath != "" { + if len(ctx.Args) > 0 { + return fmt.Errorf("cannot use together with --file") + } + return c.createFromFile(ctx, filePath) + } + + if len(ctx.Args) != 1 { + return fmt.Errorf("either --file or is required") + } + + name := ctx.Args[0] + resource := models.Canvas{ + APIVersion: core.APIVersion, + Kind: models.CanvasKind, + Metadata: &openapi_client.CanvasesCanvasMetadata{Name: &name}, + Spec: models.EmptyCanvasSpec(), + } + + canvas := models.CanvasFromCanvas(resource) + request := openapi_client.CanvasesCreateCanvasRequest{} + request.SetCanvas(canvas) + + _, _, err := ctx.API.CanvasAPI.CanvasesCreateCanvas(ctx.Context).Body(request).Execute() + return err +} + +func (c *createCommand) createFromFile(ctx core.CommandContext, path string) error { + // #nosec + data, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("failed to read resource file: %w", err) + } + + _, kind, err := core.ParseYamlResourceHeaders(data) + if err != nil { + return err + } + + switch kind { + case models.CanvasKind: + resource, err := models.ParseCanvas(data) + if err != nil { + return err + } + + canvas := models.CanvasFromCanvas(*resource) + request := openapi_client.CanvasesCreateCanvasRequest{} + request.SetCanvas(canvas) + + _, _, err = ctx.API.CanvasAPI.CanvasesCreateCanvas(ctx.Context).Body(request).Execute() + return err + default: + return fmt.Errorf("unsupported resource kind %q", kind) + } +} diff --git a/pkg/cli/commands/canvases/get.go b/pkg/cli/commands/canvases/get.go new file mode 100644 index 0000000000..79458d1743 --- /dev/null +++ b/pkg/cli/commands/canvases/get.go @@ -0,0 +1,77 @@ +package canvases + +import ( + "fmt" + "io" + + "github.com/google/uuid" + "github.com/superplanehq/superplane/pkg/cli/commands/canvases/models" + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type getCommand struct{} + +func (c *getCommand) Execute(ctx core.CommandContext) error { + canvasID, err := findCanvasID(ctx, ctx.API, ctx.Args[0]) + if err != nil { + return err + } + + response, _, err := ctx.API.CanvasAPI.CanvasesDescribeCanvas(ctx.Context, canvasID).Execute() + if err != nil { + return err + } + + resource := models.CanvasResourceFromCanvas(*response.Canvas) + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "ID: %s\n", resource.Metadata.GetId()) + _, _ = fmt.Fprintf(stdout, "Name: %s\n", resource.Metadata.GetName()) + _, _ = fmt.Fprintf(stdout, "Nodes: %d\n", len(resource.Spec.GetNodes())) + _, err := fmt.Fprintf(stdout, "Edges: %d\n", len(resource.Spec.GetEdges())) + return err + }) + } + + return ctx.Renderer.Render(resource) +} + +func findCanvasID(ctx core.CommandContext, client *openapi_client.APIClient, nameOrID string) (string, error) { + if _, err := uuid.Parse(nameOrID); err == nil { + return nameOrID, nil + } + + return findCanvasIDByName(ctx, client, nameOrID) +} + +func findCanvasIDByName(ctx core.CommandContext, client *openapi_client.APIClient, name string) (string, error) { + response, _, err := client.CanvasAPI.CanvasesListCanvases(ctx.Context).Execute() + if err != nil { + return "", err + } + + var matches []openapi_client.CanvasesCanvas + for _, canvas := range response.GetCanvases() { + if canvas.Metadata == nil || canvas.Metadata.Name == nil { + continue + } + if *canvas.Metadata.Name == name { + matches = append(matches, canvas) + } + } + + if len(matches) == 0 { + return "", fmt.Errorf("canvas %q not found", name) + } + + if len(matches) > 1 { + return "", fmt.Errorf("multiple canvases named %q found", name) + } + + if matches[0].Metadata == nil || matches[0].Metadata.Id == nil { + return "", fmt.Errorf("canvas %q is missing an id", name) + } + + return *matches[0].Metadata.Id, nil +} diff --git a/pkg/cli/commands/canvases/list.go b/pkg/cli/commands/canvases/list.go new file mode 100644 index 0000000000..7818ed611d --- /dev/null +++ b/pkg/cli/commands/canvases/list.go @@ -0,0 +1,46 @@ +package canvases + +import ( + "fmt" + "io" + "text/tabwriter" + "time" + + "github.com/superplanehq/superplane/pkg/cli/commands/canvases/models" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +type listCommand struct{} + +func (c *listCommand) Execute(ctx core.CommandContext) error { + response, _, err := ctx.API.CanvasAPI.CanvasesListCanvases(ctx.Context).Execute() + if err != nil { + return err + } + + canvases := response.GetCanvases() + resources := make([]models.Canvas, 0, len(canvases)) + for _, canvas := range canvases { + resources = append(resources, models.CanvasResourceFromCanvas(canvas)) + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "ID\tNAME\tCREATED_AT") + + for _, canvas := range canvases { + metadata := canvas.GetMetadata() + createdAt := "" + if metadata.HasCreatedAt() { + createdAt = metadata.GetCreatedAt().Format(time.RFC3339) + } + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", metadata.GetId(), metadata.GetName(), createdAt) + } + + return writer.Flush() + }) + } + + return ctx.Renderer.Render(resources) +} diff --git a/pkg/cli/models/canvas.go b/pkg/cli/commands/canvases/models/canvas.go similarity index 100% rename from pkg/cli/models/canvas.go rename to pkg/cli/commands/canvases/models/canvas.go diff --git a/pkg/cli/commands/canvases/root.go b/pkg/cli/commands/canvases/root.go new file mode 100644 index 0000000000..e4e428b28d --- /dev/null +++ b/pkg/cli/commands/canvases/root.go @@ -0,0 +1,54 @@ +package canvases + +import ( + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "canvases", + Short: "Manage canvases", + Aliases: []string{"canvas"}, + } + + listCmd := &cobra.Command{ + Use: "list", + Short: "List canvases", + Args: cobra.NoArgs, + } + core.Bind(listCmd, &listCommand{}, options) + + getCmd := &cobra.Command{ + Use: "get ", + Short: "Get a canvas", + Args: cobra.ExactArgs(1), + } + core.Bind(getCmd, &getCommand{}, options) + + var createFile string + createCmd := &cobra.Command{ + Use: "create [canvas-name]", + Short: "Create a canvas", + Args: cobra.MaximumNArgs(1), + } + createCmd.Flags().StringVarP(&createFile, "file", "f", "", "filename, directory, or URL to files to use to create the resource") + core.Bind(createCmd, &createCommand{file: &createFile}, options) + + var updateFile string + updateCmd := &cobra.Command{ + Use: "update", + Short: "Update a canvas from a file", + Args: cobra.NoArgs, + } + updateCmd.Flags().StringVarP(&updateFile, "file", "f", "", "filename, directory, or URL to files to use to update the resource") + _ = updateCmd.MarkFlagRequired("file") + core.Bind(updateCmd, &updateCommand{file: &updateFile}, options) + + root.AddCommand(listCmd) + root.AddCommand(getCmd) + root.AddCommand(createCmd) + root.AddCommand(updateCmd) + + return root +} diff --git a/pkg/cli/commands/canvases/update.go b/pkg/cli/commands/canvases/update.go new file mode 100644 index 0000000000..a22013c8be --- /dev/null +++ b/pkg/cli/commands/canvases/update.go @@ -0,0 +1,61 @@ +package canvases + +import ( + "fmt" + "os" + + "github.com/superplanehq/superplane/pkg/cli/commands/canvases/models" + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type updateCommand struct { + file *string +} + +func (c *updateCommand) Execute(ctx core.CommandContext) error { + filePath := "" + if c.file != nil { + filePath = *c.file + } + if filePath == "" { + return fmt.Errorf("--file is required") + } + if len(ctx.Args) > 0 { + return fmt.Errorf("update does not accept positional arguments") + } + + // #nosec + data, err := os.ReadFile(filePath) + if err != nil { + return fmt.Errorf("failed to read resource file: %w", err) + } + + _, kind, err := core.ParseYamlResourceHeaders(data) + if err != nil { + return err + } + + switch kind { + case models.CanvasKind: + resource, err := models.ParseCanvas(data) + if err != nil { + return err + } + if resource.Metadata == nil || resource.Metadata.Id == nil || resource.Metadata.GetId() == "" { + return fmt.Errorf("canvas metadata.id is required for update") + } + + canvas := models.CanvasFromCanvas(*resource) + body := openapi_client.CanvasesUpdateCanvasBody{} + body.SetCanvas(canvas) + + _, _, err = ctx.API.CanvasAPI. + CanvasesUpdateCanvas(ctx.Context, resource.Metadata.GetId()). + Body(body). + Execute() + return err + default: + return fmt.Errorf("unsupported resource kind %q for update", kind) + } +} diff --git a/pkg/cli/commands/components/get.go b/pkg/cli/commands/components/get.go new file mode 100644 index 0000000000..4fa12b93c9 --- /dev/null +++ b/pkg/cli/commands/components/get.go @@ -0,0 +1,58 @@ +package components + +import ( + "fmt" + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type getCommand struct{} + +func (c *getCommand) Execute(ctx core.CommandContext) error { + name := ctx.Args[0] + var component openapi_client.ComponentsComponent + + integrationName, componentName, scoped := core.ParseIntegrationScopedName(name) + if scoped { + integration, err := core.FindIntegrationDefinition(ctx, integrationName) + if err != nil { + return err + } + + resolvedComponent, err := findIntegrationComponent(integration, componentName) + if err != nil { + return err + } + component = resolvedComponent + } else { + response, _, err := ctx.API.ComponentAPI.ComponentsDescribeComponent(ctx.Context, name).Execute() + if err != nil { + return err + } + component = response.GetComponent() + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "Name: %s\n", component.GetName()) + _, _ = fmt.Fprintf(stdout, "Label: %s\n", component.GetLabel()) + _, err := fmt.Fprintf(stdout, "Description: %s\n", component.GetDescription()) + return err + }) + } + + return ctx.Renderer.Render(component) +} + +func findIntegrationComponent(integration openapi_client.IntegrationsIntegrationDefinition, name string) (openapi_client.ComponentsComponent, error) { + for _, component := range integration.GetComponents() { + componentName := component.GetName() + if componentName == name || componentName == fmt.Sprintf("%s.%s", integration.GetName(), name) { + return component, nil + } + } + + return openapi_client.ComponentsComponent{}, fmt.Errorf("component %q not found in integration %q", name, integration.GetName()) +} diff --git a/pkg/cli/commands/components/list.go b/pkg/cli/commands/components/list.go new file mode 100644 index 0000000000..196a460988 --- /dev/null +++ b/pkg/cli/commands/components/list.go @@ -0,0 +1,45 @@ +package components + +import ( + "fmt" + "io" + "text/tabwriter" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type listCommand struct { + from *string +} + +func (c *listCommand) Execute(ctx core.CommandContext) error { + components := []openapi_client.ComponentsComponent{} + + if c.from != nil && *c.from != "" { + integration, err := core.FindIntegrationDefinition(ctx, *c.from) + if err != nil { + return err + } + components = integration.GetComponents() + } else { + response, _, err := ctx.API.ComponentAPI.ComponentsListComponents(ctx.Context).Execute() + if err != nil { + return err + } + components = response.GetComponents() + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") + for _, component := range components { + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", component.GetName(), component.GetLabel(), component.GetDescription()) + } + return writer.Flush() + }) + } + + return ctx.Renderer.Render(components) +} diff --git a/pkg/cli/commands/components/root.go b/pkg/cli/commands/components/root.go new file mode 100644 index 0000000000..f24f2f4c8c --- /dev/null +++ b/pkg/cli/commands/components/root.go @@ -0,0 +1,34 @@ +package components + +import ( + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "components", + Short: "Manage components", + } + + var from string + listCmd := &cobra.Command{ + Use: "list", + Short: "List components", + Args: cobra.NoArgs, + } + listCmd.Flags().StringVar(&from, "from", "", "integration name") + core.Bind(listCmd, &listCommand{from: &from}, options) + + getCmd := &cobra.Command{ + Use: "get ", + Short: "Get a component", + Args: cobra.ExactArgs(1), + } + core.Bind(getCmd, &getCommand{}, options) + + root.AddCommand(listCmd) + root.AddCommand(getCmd) + + return root +} diff --git a/pkg/cli/commands/config/root.go b/pkg/cli/commands/config/root.go new file mode 100644 index 0000000000..9fa7fa7419 --- /dev/null +++ b/pkg/cli/commands/config/root.go @@ -0,0 +1,106 @@ +package config + +import ( + "fmt" + "io" + + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +type getCommand struct{} + +func (c *getCommand) Execute(ctx core.CommandContext) error { + key := ctx.Args[0] + if !viper.IsSet(key) { + return fmt.Errorf("configuration key %q not found", key) + } + + value := viper.Get(key) + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintln(stdout, value) + return nil + }) + } + + return ctx.Renderer.Render(map[string]any{ + key: value, + }) +} + +type setCommand struct{} + +func (c *setCommand) Execute(ctx core.CommandContext) error { + key := ctx.Args[0] + value := ctx.Args[1] + + viper.Set(key, value) + if err := viper.WriteConfig(); err != nil { + return fmt.Errorf("failed to write configuration: %w", err) + } + + return nil +} + +type viewCommand struct{} + +func (c *viewCommand) Execute(ctx core.CommandContext) error { + allSettings := viper.AllSettings() + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(allSettings) + } + + if len(allSettings) == 0 { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintln(stdout, "No configuration values set") + return nil + }) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintln(stdout, "Current configuration:") + for key, value := range allSettings { + _, _ = fmt.Fprintf(stdout, " %s: %v\n", key, value) + } + return nil + }) +} + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "config", + Short: "Get and set configuration options", + Long: "Get and set CLI configuration options like API URL and authentication token.", + } + + getCmd := &cobra.Command{ + Use: "get [KEY]", + Short: "Display a configuration value", + Long: "Display the value of a specific configuration key.", + Args: cobra.ExactArgs(1), + } + core.Bind(getCmd, &getCommand{}, options) + + setCmd := &cobra.Command{ + Use: "set [KEY] [VALUE]", + Short: "Set a configuration value", + Long: "Set the value of a specific configuration key.", + Args: cobra.ExactArgs(2), + } + core.Bind(setCmd, &setCommand{}, options) + + viewCmd := &cobra.Command{ + Use: "view", + Short: "View all configuration values", + Long: "Display all configuration values currently set.", + } + core.Bind(viewCmd, &viewCommand{}, options) + + root.AddCommand(getCmd) + root.AddCommand(setCmd) + root.AddCommand(viewCmd) + + return root +} diff --git a/pkg/cli/commands/integrations/get.go b/pkg/cli/commands/integrations/get.go new file mode 100644 index 0000000000..8bd4408036 --- /dev/null +++ b/pkg/cli/commands/integrations/get.go @@ -0,0 +1,30 @@ +package integrations + +import ( + "fmt" + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" +) + +type getCommand struct{} + +func (c *getCommand) Execute(ctx core.CommandContext) error { + integration, err := core.FindIntegrationDefinition(ctx, ctx.Args[0]) + if err != nil { + return err + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "Name: %s\n", integration.GetName()) + _, _ = fmt.Fprintf(stdout, "Label: %s\n", integration.GetLabel()) + _, _ = fmt.Fprintf(stdout, "Description: %s\n", integration.GetDescription()) + _, _ = fmt.Fprintf(stdout, "Components: %d\n", len(integration.GetComponents())) + _, err := fmt.Fprintf(stdout, "Triggers: %d\n", len(integration.GetTriggers())) + return err + }) + } + + return ctx.Renderer.Render(integration) +} diff --git a/pkg/cli/commands/integrations/list.go b/pkg/cli/commands/integrations/list.go new file mode 100644 index 0000000000..a007cb0682 --- /dev/null +++ b/pkg/cli/commands/integrations/list.go @@ -0,0 +1,100 @@ +package integrations + +import ( + "fmt" + "io" + "text/tabwriter" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type listCommand struct { + connected *bool +} + +func (c *listCommand) Execute(ctx core.CommandContext) error { + if c.connected != nil && *c.connected { + return c.executeConnected(ctx) + } + + response, _, err := ctx.API.IntegrationAPI.IntegrationsListIntegrations(ctx.Context).Execute() + if err != nil { + return err + } + + integrations := response.GetIntegrations() + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") + for _, integration := range integrations { + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", integration.GetName(), integration.GetLabel(), integration.GetDescription()) + } + return writer.Flush() + }) + } + + return ctx.Renderer.Render(integrations) +} + +func (c *listCommand) executeConnected(ctx core.CommandContext) error { + me, _, err := ctx.API.MeAPI.MeMe(ctx.Context).Execute() + if err != nil { + return err + } + if !me.HasOrganizationId() { + return fmt.Errorf("organization id not found for authenticated user") + } + + connectedResponse, _, err := ctx.API.OrganizationAPI.OrganizationsListIntegrations(ctx.Context, me.GetOrganizationId()).Execute() + if err != nil { + return err + } + + availableResponse, _, err := ctx.API.IntegrationAPI.IntegrationsListIntegrations(ctx.Context).Execute() + if err != nil { + return err + } + + integrationsByName := make(map[string]openapi_client.IntegrationsIntegrationDefinition, len(availableResponse.GetIntegrations())) + for _, integration := range availableResponse.GetIntegrations() { + integrationsByName[integration.GetName()] = integration + } + + connected := connectedResponse.GetIntegrations() + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "ID\tNAME\tINTEGRATION\tLABEL\tDESCRIPTION\tSTATE") + for _, integration := range connected { + metadata := integration.GetMetadata() + spec := integration.GetSpec() + status := integration.GetStatus() + integrationName := spec.GetIntegrationName() + definition, found := integrationsByName[integrationName] + + label := "" + description := "" + if found { + label = definition.GetLabel() + description = definition.GetDescription() + } + + _, _ = fmt.Fprintf( + writer, + "%s\t%s\t%s\t%s\t%s\t%s\n", + metadata.GetId(), + metadata.GetName(), + integrationName, + label, + description, + status.GetState(), + ) + } + return writer.Flush() + }) + } + + return ctx.Renderer.Render(connected) +} diff --git a/pkg/cli/commands/integrations/list_resources.go b/pkg/cli/commands/integrations/list_resources.go new file mode 100644 index 0000000000..0747cf8560 --- /dev/null +++ b/pkg/cli/commands/integrations/list_resources.go @@ -0,0 +1,204 @@ +package integrations + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "text/tabwriter" + "time" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type integrationResourceListResponse struct { + Resources []openapi_client.OrganizationsIntegrationResourceRef `json:"resources"` +} + +type listResourcesCommand struct { + integrationID *string + resourceType *string + parameters *string +} + +func (c *listResourcesCommand) Execute(ctx core.CommandContext) error { + if c.integrationID == nil || strings.TrimSpace(*c.integrationID) == "" { + return fmt.Errorf("--id is required") + } + if c.resourceType == nil || strings.TrimSpace(*c.resourceType) == "" { + return fmt.Errorf("--type is required") + } + + extraParameters, err := parseIntegrationResourceParametersFlag(*c.parameters) + if err != nil { + return err + } + extraParameters["type"] = *c.resourceType + + me, _, err := ctx.API.MeAPI.MeMe(ctx.Context).Execute() + if err != nil { + return err + } + if !me.HasOrganizationId() { + return fmt.Errorf("organization id not found for authenticated user") + } + + integrationResponse, _, err := ctx.API.OrganizationAPI. + OrganizationsDescribeIntegration(ctx.Context, me.GetOrganizationId(), *c.integrationID). + Execute() + if err != nil { + return err + } + + integration := integrationResponse.GetIntegration() + metadata := integration.GetMetadata() + spec := integration.GetSpec() + + response, err := listIntegrationResourcesRequest( + ctx, + me.GetOrganizationId(), + metadata.GetId(), + extraParameters, + ) + if err != nil { + return err + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "INTEGRATION_ID\tINTEGRATION_NAME\tINTEGRATION\tTYPE\tNAME\tID") + for _, resource := range response.Resources { + _, _ = fmt.Fprintf( + writer, + "%s\t%s\t%s\t%s\t%s\t%s\n", + metadata.GetId(), + metadata.GetName(), + spec.GetIntegrationName(), + resource.GetType(), + resource.GetName(), + resource.GetId(), + ) + } + return writer.Flush() + }) + } + + return ctx.Renderer.Render(response.Resources) +} + +func parseIntegrationResourceParametersFlag(raw string) (map[string]string, error) { + parameters := map[string]string{} + + raw = strings.TrimSpace(raw) + if raw == "" { + return parameters, nil + } + + pairs := strings.Split(raw, ",") + for _, pair := range pairs { + trimmedPair := strings.TrimSpace(pair) + if trimmedPair == "" { + return nil, fmt.Errorf("invalid empty parameter in --parameters") + } + + key, value, found := strings.Cut(trimmedPair, "=") + if !found { + return nil, fmt.Errorf("invalid parameter %q, expected key=value", trimmedPair) + } + + key = strings.TrimSpace(key) + value = strings.TrimSpace(value) + if key == "" || value == "" { + return nil, fmt.Errorf("invalid parameter %q, expected non-empty key and value", trimmedPair) + } + + parameters[key] = value + } + + return parameters, nil +} + +func listIntegrationResourcesRequest( + ctx core.CommandContext, + organizationID string, + integrationID string, + parameters map[string]string, +) (*integrationResourceListResponse, error) { + config := ctx.API.GetConfig() + if config == nil { + return nil, fmt.Errorf("api client config is required") + } + + baseURL, err := config.ServerURLWithContext(ctx.Context, "OrganizationAPIService.OrganizationsListIntegrationResources") + if err != nil { + return nil, err + } + if strings.TrimSpace(baseURL) == "" { + return nil, fmt.Errorf("api_url is required") + } + + values := url.Values{} + for key, value := range parameters { + values.Set(key, value) + } + + baseURL = strings.TrimRight(baseURL, "/") + endpoint := fmt.Sprintf( + "%s/api/v1/organizations/%s/integrations/%s/resources", + baseURL, + url.PathEscape(organizationID), + url.PathEscape(integrationID), + ) + if encoded := values.Encode(); encoded != "" { + endpoint = endpoint + "?" + encoded + } + + request, err := http.NewRequestWithContext(ctx.Context, http.MethodGet, endpoint, nil) + if err != nil { + return nil, err + } + request.Header.Set("Accept", "application/json") + + if authorization := strings.TrimSpace(config.DefaultHeader["Authorization"]); authorization != "" { + request.Header.Set("Authorization", authorization) + } + + httpClient := config.HTTPClient + if httpClient == nil { + httpClient = &http.Client{Timeout: 30 * time.Second} + } + + response, err := httpClient.Do(request) + if err != nil { + return nil, err + } + defer response.Body.Close() + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, err + } + + if response.StatusCode >= http.StatusMultipleChoices { + errorPayload := struct { + Message string `json:"message"` + }{} + _ = json.Unmarshal(body, &errorPayload) + if errorPayload.Message != "" { + return nil, errors.New(errorPayload.Message) + } + return nil, fmt.Errorf("failed to list integration resources: %s", response.Status) + } + + payload := integrationResourceListResponse{} + if err := json.Unmarshal(body, &payload); err != nil { + return nil, err + } + + return &payload, nil +} diff --git a/pkg/cli/commands/integrations/root.go b/pkg/cli/commands/integrations/root.go new file mode 100644 index 0000000000..b7a0d94e27 --- /dev/null +++ b/pkg/cli/commands/integrations/root.go @@ -0,0 +1,50 @@ +package integrations + +import ( + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "integrations", + Short: "Manage integrations", + } + + var connected bool + listCmd := &cobra.Command{ + Use: "list", + Short: "List integrations", + Args: cobra.NoArgs, + } + listCmd.Flags().BoolVar(&connected, "connected", false, "list connected integrations for the authenticated organization") + core.Bind(listCmd, &listCommand{connected: &connected}, options) + + getCmd := &cobra.Command{ + Use: "get ", + Short: "Get integration details", + Args: cobra.ExactArgs(1), + } + core.Bind(getCmd, &getCommand{}, options) + + var integrationID string + var resourceType string + var parameters string + listResourcesCmd := &cobra.Command{ + Use: "list-resources", + Short: "List integration resources", + Args: cobra.NoArgs, + } + listResourcesCmd.Flags().StringVar(&integrationID, "id", "", "connected integration id") + listResourcesCmd.Flags().StringVar(&resourceType, "type", "", "integration resource type") + listResourcesCmd.Flags().StringVar(¶meters, "parameters", "", "additional comma-separated query parameters (key=value,key2=value2)") + _ = listResourcesCmd.MarkFlagRequired("id") + _ = listResourcesCmd.MarkFlagRequired("type") + core.Bind(listResourcesCmd, &listResourcesCommand{integrationID: &integrationID, resourceType: &resourceType, parameters: ¶meters}, options) + + root.AddCommand(listCmd) + root.AddCommand(getCmd) + root.AddCommand(listResourcesCmd) + + return root +} diff --git a/pkg/cli/commands/triggers/get.go b/pkg/cli/commands/triggers/get.go new file mode 100644 index 0000000000..0e9c1bb808 --- /dev/null +++ b/pkg/cli/commands/triggers/get.go @@ -0,0 +1,58 @@ +package triggers + +import ( + "fmt" + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type getCommand struct{} + +func (c *getCommand) Execute(ctx core.CommandContext) error { + name := ctx.Args[0] + var trigger openapi_client.TriggersTrigger + + integrationName, triggerName, scoped := core.ParseIntegrationScopedName(name) + if scoped { + integration, err := core.FindIntegrationDefinition(ctx, integrationName) + if err != nil { + return err + } + + resolvedTrigger, err := findTrigger(integration, triggerName) + if err != nil { + return err + } + trigger = resolvedTrigger + } else { + response, _, err := ctx.API.TriggerAPI.TriggersDescribeTrigger(ctx.Context, name).Execute() + if err != nil { + return err + } + trigger = response.GetTrigger() + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "Name: %s\n", trigger.GetName()) + _, _ = fmt.Fprintf(stdout, "Label: %s\n", trigger.GetLabel()) + _, err := fmt.Fprintf(stdout, "Description: %s\n", trigger.GetDescription()) + return err + }) + } + + return ctx.Renderer.Render(trigger) +} + +func findTrigger(integration openapi_client.IntegrationsIntegrationDefinition, name string) (openapi_client.TriggersTrigger, error) { + for _, trigger := range integration.GetTriggers() { + triggerName := trigger.GetName() + if triggerName == name || triggerName == fmt.Sprintf("%s.%s", integration.GetName(), name) { + return trigger, nil + } + } + + return openapi_client.TriggersTrigger{}, fmt.Errorf("trigger %q not found in integration %q", name, integration.GetName()) +} diff --git a/pkg/cli/commands/triggers/list.go b/pkg/cli/commands/triggers/list.go new file mode 100644 index 0000000000..26d27feb46 --- /dev/null +++ b/pkg/cli/commands/triggers/list.go @@ -0,0 +1,45 @@ +package triggers + +import ( + "fmt" + "io" + "text/tabwriter" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type listCommand struct { + from *string +} + +func (c *listCommand) Execute(ctx core.CommandContext) error { + triggers := []openapi_client.TriggersTrigger{} + + if c.from != nil && *c.from != "" { + integration, err := core.FindIntegrationDefinition(ctx, *c.from) + if err != nil { + return err + } + triggers = integration.GetTriggers() + } else { + response, _, err := ctx.API.TriggerAPI.TriggersListTriggers(ctx.Context).Execute() + if err != nil { + return err + } + triggers = response.GetTriggers() + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") + for _, trigger := range triggers { + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", trigger.GetName(), trigger.GetLabel(), trigger.GetDescription()) + } + return writer.Flush() + }) + } + + return ctx.Renderer.Render(triggers) +} diff --git a/pkg/cli/commands/triggers/root.go b/pkg/cli/commands/triggers/root.go new file mode 100644 index 0000000000..6b98d1ad1a --- /dev/null +++ b/pkg/cli/commands/triggers/root.go @@ -0,0 +1,34 @@ +package triggers + +import ( + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "triggers", + Short: "Manage triggers", + } + + var from string + listCmd := &cobra.Command{ + Use: "list", + Short: "List triggers", + Args: cobra.NoArgs, + } + listCmd.Flags().StringVar(&from, "from", "", "integration name") + core.Bind(listCmd, &listCommand{from: &from}, options) + + getCmd := &cobra.Command{ + Use: "get ", + Short: "Get a trigger", + Args: cobra.ExactArgs(1), + } + core.Bind(getCmd, &getCommand{}, options) + + root.AddCommand(listCmd) + root.AddCommand(getCmd) + + return root +} diff --git a/pkg/cli/config.go b/pkg/cli/config.go deleted file mode 100644 index 09e9773222..0000000000 --- a/pkg/cli/config.go +++ /dev/null @@ -1,108 +0,0 @@ -package cli - -import ( - "fmt" - "os" - - "github.com/spf13/cobra" - "github.com/spf13/viper" -) - -const ( - DefaultAPIURL = "http://localhost:8000" -) - -// Configuration keys -const ( - ConfigKeyAPIURL = "api_url" - ConfigKeyAPIToken = "api_token" - ConfigKeyFormat = "output_format" -) - -var configCmd = &cobra.Command{ - Use: "config", - Short: "Get and set configuration options", - Long: `Get and set CLI configuration options like API URL and authentication token.`, -} - -var configGetCmd = &cobra.Command{ - Use: "get [KEY]", - Short: "Display a configuration value", - Long: `Display the value of a specific configuration key.`, - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - key := args[0] - - if viper.IsSet(key) { - value := viper.GetString(key) - fmt.Println(value) - } else { - fmt.Printf("Configuration key '%s' not found\n", key) - os.Exit(1) - } - }, -} - -var configSetCmd = &cobra.Command{ - Use: "set [KEY] [VALUE]", - Short: "Set a configuration value", - Long: `Set the value of a specific configuration key.`, - Args: cobra.ExactArgs(2), - Run: func(cmd *cobra.Command, args []string) { - key := args[0] - value := args[1] - - viper.Set(key, value) - err := viper.WriteConfig() - CheckWithMessage(err, "Failed to write configuration") - }, -} - -var configViewCmd = &cobra.Command{ - Use: "view", - Short: "View all configuration values", - Long: `Display all configuration values currently set.`, - Run: func(cmd *cobra.Command, args []string) { - allSettings := viper.AllSettings() - - if len(allSettings) == 0 { - fmt.Println("No configuration values set") - return - } - - fmt.Println("Current configuration:") - for key, value := range allSettings { - fmt.Printf(" %s: %v\n", key, value) - } - }, -} - -func GetAPIURL() string { - if viper.IsSet(ConfigKeyAPIURL) { - return viper.GetString(ConfigKeyAPIURL) - } - - return DefaultAPIURL -} - -func GetAPIToken() string { - return viper.GetString(ConfigKeyAPIToken) -} - -func GetOutputFormat() string { - if viper.IsSet(ConfigKeyFormat) { - return viper.GetString(ConfigKeyFormat) - } - return "text" -} - -func init() { - RootCmd.AddCommand(configCmd) - configCmd.AddCommand(configGetCmd) - configCmd.AddCommand(configSetCmd) - configCmd.AddCommand(configViewCmd) - - // Set default configuration values - viper.SetDefault(ConfigKeyAPIURL, DefaultAPIURL) - viper.SetDefault(ConfigKeyFormat, "text") -} diff --git a/pkg/cli/core/command.go b/pkg/cli/core/command.go new file mode 100644 index 0000000000..e3507232f6 --- /dev/null +++ b/pkg/cli/core/command.go @@ -0,0 +1,146 @@ +package core + +import ( + "context" + "encoding/json" + "fmt" + "io" + + "github.com/ghodss/yaml" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type OutputFormat string + +const ( + OutputFormatText OutputFormat = "text" + OutputFormatJSON OutputFormat = "json" + OutputFormatYAML OutputFormat = "yaml" +) + +type Renderer struct { + format OutputFormat + stdout io.Writer +} + +func NewRenderer(rawFormat string, stdout io.Writer) (Renderer, error) { + format := OutputFormat(rawFormat) + if format == "" { + format = OutputFormatText + } + + switch format { + case OutputFormatText, OutputFormatJSON, OutputFormatYAML: + return Renderer{format: format, stdout: stdout}, nil + default: + return Renderer{}, fmt.Errorf("invalid output format %q, expected one of: text, json, yaml", rawFormat) + } +} + +func (r Renderer) Format() OutputFormat { + return r.format +} + +func (r Renderer) IsText() bool { + return r.format == OutputFormatText +} + +func (r Renderer) Render(value any) error { + switch r.format { + case OutputFormatJSON: + payload, err := json.MarshalIndent(value, "", " ") + if err != nil { + return err + } + + _, err = fmt.Fprintln(r.stdout, string(payload)) + return err + case OutputFormatYAML: + payload, err := yaml.Marshal(value) + if err != nil { + return err + } + + _, err = fmt.Fprintln(r.stdout, string(payload)) + return err + case OutputFormatText: + return fmt.Errorf("text output requires RenderText") + default: + return fmt.Errorf("unsupported output format %q", r.format) + } +} + +func (r Renderer) RenderText(render func(io.Writer) error) error { + if r.format != OutputFormatText { + return fmt.Errorf("RenderText can only be used with text output") + } + + return render(r.stdout) +} + +type Command interface { + Execute(ctx CommandContext) error +} + +type CommandContext struct { + Context context.Context + Cmd *cobra.Command + Args []string + Logger *log.Entry + API *openapi_client.APIClient + Renderer Renderer +} + +type BindOptions struct { + NewAPIClient func() *openapi_client.APIClient + DefaultOutputFormat func() string +} + +func NewCommandContext(cmd *cobra.Command, args []string, options BindOptions) (CommandContext, error) { + ctx := cmd.Context() + if ctx == nil { + ctx = context.Background() + } + + outputFormat := "text" + if options.DefaultOutputFormat != nil { + outputFormat = options.DefaultOutputFormat() + } + + flagValue, err := cmd.Flags().GetString("output") + if err == nil && flagValue != "" { + outputFormat = flagValue + } + + renderer, err := NewRenderer(outputFormat, cmd.OutOrStdout()) + if err != nil { + return CommandContext{}, err + } + + commandContext := CommandContext{ + Context: ctx, + Cmd: cmd, + Args: args, + Logger: log.WithField("command", cmd.CommandPath()), + Renderer: renderer, + } + + if options.NewAPIClient != nil { + commandContext.API = options.NewAPIClient() + } + + return commandContext, nil +} + +func Bind(cmd *cobra.Command, command Command, options BindOptions) { + cmd.RunE = func(cobraCmd *cobra.Command, args []string) error { + ctx, err := NewCommandContext(cobraCmd, args, options) + if err != nil { + return err + } + + return command.Execute(ctx) + } +} diff --git a/pkg/cli/core/common.go b/pkg/cli/core/common.go new file mode 100644 index 0000000000..cc6e171771 --- /dev/null +++ b/pkg/cli/core/common.go @@ -0,0 +1,58 @@ +package core + +import ( + "fmt" + "strings" + + "github.com/ghodss/yaml" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +const ( + APIVersion = "v1" +) + +func ParseYamlResourceHeaders(raw []byte) (string, string, error) { + m := make(map[string]interface{}) + + err := yaml.Unmarshal(raw, &m) + if err != nil { + return "", "", fmt.Errorf("failed to parse resource; %s", err) + } + + apiVersion, ok := m["apiVersion"].(string) + if !ok { + return "", "", fmt.Errorf("failed to parse resource's api version") + } + + kind, ok := m["kind"].(string) + if !ok { + return "", "", fmt.Errorf("failed to parse resource's kind") + } + + return apiVersion, kind, nil +} + +func ParseIntegrationScopedName(name string) (string, string, bool) { + integrationName, resourceName, hasDot := strings.Cut(name, ".") + if !hasDot || integrationName == "" || resourceName == "" { + return "", "", false + } + + return integrationName, resourceName, true +} + +func FindIntegrationDefinition(ctx CommandContext, name string) (openapi_client.IntegrationsIntegrationDefinition, error) { + response, _, err := ctx.API.IntegrationAPI.IntegrationsListIntegrations(ctx.Context).Execute() + if err != nil { + return openapi_client.IntegrationsIntegrationDefinition{}, err + } + + for _, integration := range response.GetIntegrations() { + if integration.GetName() == name { + return integration, nil + } + } + + return openapi_client.IntegrationsIntegrationDefinition{}, fmt.Errorf("integration %q not found", name) +} diff --git a/pkg/cli/create.go b/pkg/cli/create.go deleted file mode 100644 index 3fcc07f51e..0000000000 --- a/pkg/cli/create.go +++ /dev/null @@ -1,78 +0,0 @@ -package cli - -import ( - "context" - "fmt" - "os" - - "github.com/spf13/cobra" - "github.com/superplanehq/superplane/pkg/cli/models" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -var createCmd = &cobra.Command{ - Use: "create", - Short: "Create a resource from a file.", - Long: `Create a SuperPlane resource from a YAML file.`, - - Run: func(cmd *cobra.Command, args []string) { - path, err := cmd.Flags().GetString("file") - CheckWithMessage(err, "Path not provided") - - // #nosec - data, err := os.ReadFile(path) - CheckWithMessage(err, "Failed to read from resource file.") - - _, kind, err := ParseYamlResourceHeaders(data) - Check(err) - - switch kind { - case models.CanvasKind: - resource, err := models.ParseCanvas(data) - Check(err) - - canvas := models.CanvasFromCanvas(*resource) - request := openapi_client.CanvasesCreateCanvasRequest{} - request.SetCanvas(canvas) - - client := DefaultClient() - _, _, err = client.CanvasAPI.CanvasesCreateCanvas(context.Background()).Body(request).Execute() - Check(err) - default: - Fail(fmt.Sprintf("Unsupported resource kind '%s'", kind)) - } - }, -} - -var createCanvasCmd = &cobra.Command{ - Use: "canvas ", - Short: "Create a canvas", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - name := args[0] - client := DefaultClient() - - resource := models.Canvas{ - APIVersion: APIVersion, - Kind: models.CanvasKind, - Metadata: &openapi_client.CanvasesCanvasMetadata{Name: &name}, - Spec: models.EmptyCanvasSpec(), - } - - canvas := models.CanvasFromCanvas(resource) - request := openapi_client.CanvasesCreateCanvasRequest{} - request.SetCanvas(canvas) - - _, _, err := client.CanvasAPI.CanvasesCreateCanvas(context.Background()).Body(request).Execute() - Check(err) - }, -} - -func init() { - RootCmd.AddCommand(createCmd) - createCmd.AddCommand(createCanvasCmd) - - // File flag for root create command - desc := "Filename, directory, or URL to files to use to create the resource" - createCmd.Flags().StringP("file", "f", "", desc) -} diff --git a/pkg/cli/delete.go b/pkg/cli/delete.go deleted file mode 100644 index 64316d77fd..0000000000 --- a/pkg/cli/delete.go +++ /dev/null @@ -1,16 +0,0 @@ -package cli - -import ( - "github.com/spf13/cobra" -) - -// Root describe command -var deleteCmd = &cobra.Command{ - Use: "delete", - Short: "Delete SuperPlane resources", - Long: `Delete a SuperPlane resource by ID or name.`, -} - -func init() { - RootCmd.AddCommand(deleteCmd) -} diff --git a/pkg/cli/get.go b/pkg/cli/get.go deleted file mode 100644 index 9c13540ac3..0000000000 --- a/pkg/cli/get.go +++ /dev/null @@ -1,58 +0,0 @@ -package cli - -import ( - "context" - "fmt" - "os" - - "github.com/ghodss/yaml" - "github.com/google/uuid" - "github.com/spf13/cobra" - "github.com/superplanehq/superplane/pkg/cli/models" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -// Root describe command -var getCmd = &cobra.Command{ - Use: "get", - Short: "Show details of SuperPlane resources", - Long: `Get detailed information about SuperPlane resources.`, - Aliases: []string{"desc", "get"}, -} - -var getCanvasCmd = &cobra.Command{ - Use: "canvas ", - Short: "Get a canvas", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - nameOrID := args[0] - client := DefaultClient() - ctx := context.Background() - - canvasID, err := findCanvasID(ctx, client, nameOrID) - Check(err) - - response, _, err := client.CanvasAPI.CanvasesDescribeCanvas(ctx, canvasID).Execute() - Check(err) - - resource := models.CanvasResourceFromCanvas(*response.Canvas) - output, err := yaml.Marshal(resource) - Check(err) - - fmt.Fprintln(os.Stdout, string(output)) - }, -} - -func findCanvasID(ctx context.Context, client *openapi_client.APIClient, nameOrID string) (string, error) { - _, err := uuid.Parse(nameOrID) - if err == nil { - return nameOrID, nil - } - - return findCanvasIDByName(ctx, client, nameOrID) -} - -func init() { - RootCmd.AddCommand(getCmd) - getCmd.AddCommand(getCanvasCmd) -} diff --git a/pkg/cli/list.go b/pkg/cli/list.go deleted file mode 100644 index bcd3f822d3..0000000000 --- a/pkg/cli/list.go +++ /dev/null @@ -1,49 +0,0 @@ -package cli - -import ( - "context" - "fmt" - "os" - "text/tabwriter" - "time" - - "github.com/spf13/cobra" -) - -// Root list command -var listCmd = &cobra.Command{ - Use: "list", - Short: "List SuperPlane resources", - Long: `List multiple SuperPlane resources.`, - Aliases: []string{"ls"}, -} - -var listCanvasCmd = &cobra.Command{ - Use: "canvas", - Short: "List canvases", - Aliases: []string{"canvases"}, - Args: cobra.NoArgs, - Run: func(cmd *cobra.Command, args []string) { - client := DefaultClient() - ctx := context.Background() - response, _, err := client.CanvasAPI.CanvasesListCanvases(ctx).Execute() - Check(err) - - writer := tabwriter.NewWriter(os.Stdout, 0, 8, 2, ' ', 0) - fmt.Fprintln(writer, "ID\tNAME\tCREATED_AT") - for _, canvas := range response.GetCanvases() { - metadata := canvas.GetMetadata() - createdAt := "" - if metadata.HasCreatedAt() { - createdAt = metadata.GetCreatedAt().Format(time.RFC3339) - } - fmt.Fprintf(writer, "%s\t%s\t%s\n", metadata.GetId(), metadata.GetName(), createdAt) - } - _ = writer.Flush() - }, -} - -func init() { - RootCmd.AddCommand(listCmd) - listCmd.AddCommand(listCanvasCmd) -} diff --git a/pkg/cli/root.go b/pkg/cli/root.go index d9e416c9a2..bde0335692 100644 --- a/pkg/cli/root.go +++ b/pkg/cli/root.go @@ -1,6 +1,7 @@ package cli import ( + "flag" "fmt" "io" "log" @@ -9,10 +10,24 @@ import ( "github.com/mitchellh/go-homedir" "github.com/spf13/cobra" "github.com/spf13/viper" + canvases "github.com/superplanehq/superplane/pkg/cli/commands/canvases" + components "github.com/superplanehq/superplane/pkg/cli/commands/components" + config "github.com/superplanehq/superplane/pkg/cli/commands/config" + integrations "github.com/superplanehq/superplane/pkg/cli/commands/integrations" + triggers "github.com/superplanehq/superplane/pkg/cli/commands/triggers" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +const ( + DefaultAPIURL = "http://localhost:8000" + ConfigKeyAPIURL = "api_url" + ConfigKeyAPIToken = "api_token" + ConfigKeyFormat = "output_format" ) var cfgFile string var Verbose bool +var OutputFormat string var RootCmd = &cobra.Command{ Use: "superplane", @@ -26,10 +41,20 @@ var RootCmd = &cobra.Command{ } func init() { + viper.SetDefault(ConfigKeyAPIURL, DefaultAPIURL) + viper.SetDefault(ConfigKeyFormat, "text") cobra.OnInitialize(initConfig) RootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output") RootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.superplane.yaml)") + RootCmd.PersistentFlags().StringVarP(&OutputFormat, "output", "o", "", "output format: text|json|yaml (overrides config output_format)") + + options := defaultBindOptions() + RootCmd.AddCommand(canvases.NewCommand(options)) + RootCmd.AddCommand(components.NewCommand(options)) + RootCmd.AddCommand(triggers.NewCommand(options)) + RootCmd.AddCommand(integrations.NewCommand(options)) + RootCmd.AddCommand(config.NewCommand(options)) } func initConfig() { @@ -60,3 +85,49 @@ func initConfig() { } } } + +func defaultBindOptions() core.BindOptions { + return core.BindOptions{ + NewAPIClient: DefaultClient, + DefaultOutputFormat: GetOutputFormat, + } +} + +func GetAPIURL() string { + if viper.IsSet(ConfigKeyAPIURL) { + return viper.GetString(ConfigKeyAPIURL) + } + + return DefaultAPIURL +} + +func GetAPIToken() string { + return viper.GetString(ConfigKeyAPIToken) +} + +func GetOutputFormat() string { + if viper.IsSet(ConfigKeyFormat) { + return viper.GetString(ConfigKeyFormat) + } + + return "text" +} + +// Checks if an error is present. +// +// If it is present, it displays the provided message and exits with status 1. +func CheckWithMessage(err error, message string) { + if err != nil { + fmt.Fprintf(os.Stderr, "error: %+v\n", message) + + Exit(1) + } +} + +func Exit(code int) { + if flag.Lookup("test.v") == nil { + os.Exit(1) + } else { + panic(fmt.Sprintf("exit %d", code)) + } +} diff --git a/pkg/cli/update.go b/pkg/cli/update.go deleted file mode 100644 index 871e82758a..0000000000 --- a/pkg/cli/update.go +++ /dev/null @@ -1,60 +0,0 @@ -package cli - -import ( - "context" - "fmt" - "os" - - "github.com/spf13/cobra" - "github.com/superplanehq/superplane/pkg/cli/models" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -var updateCmd = &cobra.Command{ - Use: "update", - Short: "Update a resource from a file.", - Long: `Update a SuperPlane resource from a YAML file.`, - Aliases: []string{"update", "edit"}, - - Run: func(cmd *cobra.Command, args []string) { - path, err := cmd.Flags().GetString("file") - CheckWithMessage(err, "Path not provided") - - // #nosec - data, err := os.ReadFile(path) - CheckWithMessage(err, "Failed to read from resource file.") - - _, kind, err := ParseYamlResourceHeaders(data) - Check(err) - - switch kind { - case models.CanvasKind: - resource, err := models.ParseCanvas(data) - Check(err) - - client := DefaultClient() - ctx := context.Background() - - canvas := models.CanvasFromCanvas(*resource) - body := openapi_client.CanvasesUpdateCanvasBody{} - body.SetCanvas(canvas) - - _, _, err = client.CanvasAPI. - CanvasesUpdateCanvas(ctx, *resource.Metadata.Id). - Body(body). - Execute() - - Check(err) - default: - Fail(fmt.Sprintf("Unsupported resource kind '%s' for update", kind)) - } - }, -} - -func init() { - RootCmd.AddCommand(updateCmd) - - // File flag for root update command - desc := "Filename, directory, or URL to files to use to update the resource" - updateCmd.Flags().StringP("file", "f", "", desc) -} diff --git a/pkg/cli/utils.go b/pkg/cli/utils.go deleted file mode 100644 index 9504b25746..0000000000 --- a/pkg/cli/utils.go +++ /dev/null @@ -1,73 +0,0 @@ -package cli - -import ( - "context" - "fmt" - "os" - - "github.com/spf13/cobra" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -func getOneOrAnotherFlag(cmd *cobra.Command, flag1, flag2 string, required bool) string { - flag1Value, _ := cmd.Flags().GetString(flag1) - flag2Value, _ := cmd.Flags().GetString(flag2) - - if flag1Value != "" && flag2Value != "" { - fmt.Fprintf(os.Stderr, "Error: cannot specify both --%s and --%s\n", flag1, flag2) - os.Exit(1) - } - - if flag1Value != "" { - return flag1Value - } - - if flag2Value != "" { - return flag2Value - } - - if required { - fmt.Fprintf(os.Stderr, "Error: must specify either --%s or --%s\n", flag1, flag2) - os.Exit(1) - } - - return "" -} - -func getDomainOrExit(client *openapi_client.APIClient, cmd *cobra.Command) (string, string) { - response, _, err := client.MeAPI.MeMe(context.Background()).Execute() - Check(err) - - return string(openapi_client.AUTHORIZATIONDOMAINTYPE_DOMAIN_TYPE_ORGANIZATION), *response.OrganizationId -} - -func findCanvasIDByName(ctx context.Context, client *openapi_client.APIClient, name string) (string, error) { - response, _, err := client.CanvasAPI.CanvasesListCanvases(ctx).Execute() - if err != nil { - return "", err - } - - var matches []openapi_client.CanvasesCanvas - for _, canvas := range response.GetCanvases() { - if canvas.Metadata == nil || canvas.Metadata.Name == nil { - continue - } - if *canvas.Metadata.Name == name { - matches = append(matches, canvas) - } - } - - if len(matches) == 0 { - return "", fmt.Errorf("canvas %q not found", name) - } - - if len(matches) > 1 { - return "", fmt.Errorf("multiple canvases named %q found", name) - } - - if matches[0].Metadata == nil || matches[0].Metadata.Id == nil { - return "", fmt.Errorf("canvas %q is missing an id", name) - } - - return *matches[0].Metadata.Id, nil -} diff --git a/pkg/cli/whoami.go b/pkg/cli/whoami.go index 108f3a26a8..ad382df37d 100644 --- a/pkg/cli/whoami.go +++ b/pkg/cli/whoami.go @@ -1,30 +1,41 @@ package cli import ( - "context" "fmt" + "io" "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" ) +type whoamiCommand struct{} + +func (w *whoamiCommand) Execute(ctx core.CommandContext) error { + response, _, err := ctx.API.MeAPI.MeMe(ctx.Context).Execute() + if err != nil { + return err + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "ID: %s\n", response.GetId()) + _, _ = fmt.Fprintf(stdout, "Email: %s\n", response.GetEmail()) + _, _ = fmt.Fprintf(stdout, "Organization: %s\n", response.GetOrganizationId()) + return nil + }) + } + + return ctx.Renderer.Render(response) +} + var whoamiCmd = &cobra.Command{ Use: "whoami", Short: "Get information about the currently authenticated user", Aliases: []string{"events"}, Args: cobra.NoArgs, - - Run: func(cmd *cobra.Command, args []string) { - c := DefaultClient() - - response, _, err := c.MeAPI.MeMe(context.Background()).Execute() - Check(err) - - fmt.Printf("ID: %s\n", response.GetId()) - fmt.Printf("Email: %s\n", response.GetEmail()) - fmt.Printf("Organization: %s\n", response.GetOrganizationId()) - }, } func init() { + core.Bind(whoamiCmd, &whoamiCommand{}, defaultBindOptions()) RootCmd.AddCommand(whoamiCmd) } diff --git a/pkg/cli/yaml_resource.go b/pkg/cli/yaml_resource.go deleted file mode 100644 index 0ec3fe4dd0..0000000000 --- a/pkg/cli/yaml_resource.go +++ /dev/null @@ -1,35 +0,0 @@ -package cli - -import ( - "fmt" - - "github.com/ghodss/yaml" -) - -const ( - APIVersion = "v1" -) - -// returns tuple (apiVersion, kind, error) -func ParseYamlResourceHeaders(raw []byte) (string, string, error) { - m := make(map[string]interface{}) - - err := yaml.Unmarshal(raw, &m) - if err != nil { - return "", "", fmt.Errorf("failed to parse resource; %s", err) - } - - apiVersion, ok := m["apiVersion"].(string) - - if !ok { - return "", "", fmt.Errorf("failed to parse resource's api version") - } - - kind, ok := m["kind"].(string) - - if !ok { - return "", "", fmt.Errorf("failed to parse resource's kind") - } - - return apiVersion, kind, nil -} diff --git a/pkg/cli/yaml_resource_test.go b/pkg/cli/yaml_resource_test.go deleted file mode 100644 index a7bf51106e..0000000000 --- a/pkg/cli/yaml_resource_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package cli - -import ( - "testing" - - assert "github.com/stretchr/testify/assert" -) - -func Test__ParseYamlResourceHeaders__InvalidValidResource(t *testing.T) { - resource := []byte(` - kind: Projec t -apiVersio n: v1alpha`) - - _, _, err := ParseYamlResourceHeaders(resource) - - assert.Equal(t, err.Error(), "failed to parse resource; error converting YAML to JSON: yaml: line 2: found character that cannot start any token") -} - -func Test__ParseYamlResourceHeaders__ValidResource(t *testing.T) { - resource := []byte(` -kind: Project -apiVersion: v1alpha`) - - apiVersion, kind, err := ParseYamlResourceHeaders(resource) - - assert.Nil(t, err) - assert.Equal(t, kind, "Project") - assert.Equal(t, apiVersion, "v1alpha") -} - -func Test__ParseYamlResourceHeaders__KindMissing(t *testing.T) { - resource := []byte(`apiVersion: v1alpha`) - - _, _, err := ParseYamlResourceHeaders(resource) - - assert.Equal(t, err.Error(), "failed to parse resource's kind") -} - -func Test__ParseYamlResourceHeaders__ApiVersionMissing(t *testing.T) { - resource := []byte(`kind: Project`) - - _, _, err := ParseYamlResourceHeaders(resource) - - assert.Equal(t, err.Error(), "failed to parse resource's api version") -} - -func Test__ParseYamlResourceHeaders__KindIsWrongType(t *testing.T) { - resource := []byte(` -kind: - test: Project -apiVersion: v1alpha`) - - _, _, err := ParseYamlResourceHeaders(resource) - - assert.Equal(t, err.Error(), "failed to parse resource's kind") -} - -func Test__ParseYamlResourceHeaders__ApiVersionWrongType(t *testing.T) { - resource := []byte(` -kind: Project -apiVersion: - test: v1alpha`) - - _, _, err := ParseYamlResourceHeaders(resource) - - assert.Equal(t, err.Error(), "failed to parse resource's api version") -} From 0a1cd3be10fbaa9418304bad718fe2953fe150fc Mon Sep 17 00:00:00 2001 From: Aleksandar Mitrovic <61409859+AleksandarCole@users.noreply.github.com> Date: Mon, 16 Feb 2026 11:32:33 +0100 Subject: [PATCH 114/160] chore: Add new integrations to Readme (#3125) Signed-off-by: Muhammad Fuzail Zubari --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 3606cfd123..080c5bcbdf 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,7 @@ SuperPlane integrates with the tools you already use. Each integration provides +
    Claude
    Claude
    Cursor
    Cursor
    OpenAI
    OpenAI
    @@ -72,6 +73,7 @@ SuperPlane integrates with the tools you already use. Each integration provides + @@ -86,7 +88,10 @@ SuperPlane integrates with the tools you already use. Each integration provides + + +
    CircleCI
    CircleCI
    GitHub
    GitHub
    GitLab
    GitLab
    Semaphore
    Semaphore
    AWS ECR
    AWS ECR
    AWS Lambda
    AWS Lambda
    AWS CodeArtifact
    AWS CodeArtifact
    AWS CloudWatch
    AWS CloudWatch
    AWS SNS
    AWS SNS
    Cloudflare
    Cloudflare
    Hetzner Cloud
    Hetzner Cloud
    DockerHub
    DockerHub
    @@ -97,6 +102,7 @@ SuperPlane integrates with the tools you already use. Each integration provides DataDog
    DataDog
    Dash0
    Dash0
    +Prometheus
    Prometheus
    From e0b4d6a3c9194841dc882cb3d720a476c4df4818 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Mon, 16 Feb 2026 08:34:32 -0300 Subject: [PATCH 115/160] feat: manage secrets through CLI (#3126) Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- pkg/cli/commands/secrets/common.go | 142 +++++++++++++++++++++++++++++ pkg/cli/commands/secrets/create.go | 55 +++++++++++ pkg/cli/commands/secrets/delete.go | 35 +++++++ pkg/cli/commands/secrets/get.go | 34 +++++++ pkg/cli/commands/secrets/list.go | 34 +++++++ pkg/cli/commands/secrets/root.go | 63 +++++++++++++ pkg/cli/commands/secrets/update.go | 61 +++++++++++++ pkg/cli/root.go | 2 + 8 files changed, 426 insertions(+) create mode 100644 pkg/cli/commands/secrets/common.go create mode 100644 pkg/cli/commands/secrets/create.go create mode 100644 pkg/cli/commands/secrets/delete.go create mode 100644 pkg/cli/commands/secrets/get.go create mode 100644 pkg/cli/commands/secrets/list.go create mode 100644 pkg/cli/commands/secrets/root.go create mode 100644 pkg/cli/commands/secrets/update.go diff --git a/pkg/cli/commands/secrets/common.go b/pkg/cli/commands/secrets/common.go new file mode 100644 index 0000000000..d39015a5a0 --- /dev/null +++ b/pkg/cli/commands/secrets/common.go @@ -0,0 +1,142 @@ +package secrets + +import ( + "fmt" + "io" + "os" + "sort" + "strings" + "text/tabwriter" + "time" + + "github.com/ghodss/yaml" + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +const ( + SecretKind = "Secret" +) + +type secretResource struct { + APIVersion string `json:"apiVersion"` + Kind string `json:"kind"` + Metadata *openapi_client.SecretsSecretMetadata `json:"metadata,omitempty"` + Spec *openapi_client.SecretsSecretSpec `json:"spec,omitempty"` +} + +func resolveOrganizationID(ctx core.CommandContext) (string, error) { + me, _, err := ctx.API.MeAPI.MeMe(ctx.Context).Execute() + if err != nil { + return "", err + } + + if !me.HasOrganizationId() || strings.TrimSpace(me.GetOrganizationId()) == "" { + return "", fmt.Errorf("organization id not found for authenticated user") + } + + return me.GetOrganizationId(), nil +} + +func organizationDomainType() openapi_client.AuthorizationDomainType { + return openapi_client.AUTHORIZATIONDOMAINTYPE_DOMAIN_TYPE_ORGANIZATION +} + +func parseSecretFile(path string) (*secretResource, error) { + // #nosec + data, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("failed to read resource file: %w", err) + } + + apiVersion, kind, err := core.ParseYamlResourceHeaders(data) + if err != nil { + return nil, err + } + + if apiVersion != core.APIVersion { + return nil, fmt.Errorf("unsupported apiVersion %q", apiVersion) + } + + if kind != SecretKind { + return nil, fmt.Errorf("unsupported resource kind %q", kind) + } + + resource := secretResource{} + if err := yaml.Unmarshal(data, &resource); err != nil { + return nil, fmt.Errorf("failed to parse secret resource: %w", err) + } + + return &resource, nil +} + +func resourceToSecret(resource secretResource) openapi_client.SecretsSecret { + secret := openapi_client.SecretsSecret{} + if resource.Metadata != nil { + secret.SetMetadata(*resource.Metadata) + } + if resource.Spec != nil { + secret.SetSpec(*resource.Spec) + } + return secret +} + +func renderSecretListText(stdout io.Writer, items []openapi_client.SecretsSecret) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "ID\tNAME\tPROVIDER\tKEYS\tCREATED_AT") + + for _, item := range items { + metadata := item.GetMetadata() + spec := item.GetSpec() + + keyCount := 0 + if local, ok := spec.GetLocalOk(); ok && local.HasData() { + keyCount = len(local.GetData()) + } + + createdAt := "" + if metadata.HasCreatedAt() { + createdAt = metadata.GetCreatedAt().Format(time.RFC3339) + } + + _, _ = fmt.Fprintf( + writer, + "%s\t%s\t%s\t%d\t%s\n", + metadata.GetId(), + metadata.GetName(), + spec.GetProvider(), + keyCount, + createdAt, + ) + } + + return writer.Flush() +} + +func renderSecretText(stdout io.Writer, item openapi_client.SecretsSecret) error { + metadata := item.GetMetadata() + spec := item.GetSpec() + + _, _ = fmt.Fprintf(stdout, "ID: %s\n", metadata.GetId()) + _, _ = fmt.Fprintf(stdout, "Name: %s\n", metadata.GetName()) + _, _ = fmt.Fprintf(stdout, "Provider: %s\n", spec.GetProvider()) + _, _ = fmt.Fprintf(stdout, "DomainType: %s\n", metadata.GetDomainType()) + _, _ = fmt.Fprintf(stdout, "DomainID: %s\n", metadata.GetDomainId()) + if metadata.HasCreatedAt() { + _, _ = fmt.Fprintf(stdout, "CreatedAt: %s\n", metadata.GetCreatedAt().Format(time.RFC3339)) + } + + _, _ = fmt.Fprintln(stdout, "Keys:") + keys := make([]string, 0) + if local, ok := spec.GetLocalOk(); ok && local.HasData() { + for key := range local.GetData() { + keys = append(keys, key) + } + } + sort.Strings(keys) + for _, key := range keys { + _, _ = fmt.Fprintf(stdout, "- %s\n", key) + } + + return nil +} diff --git a/pkg/cli/commands/secrets/create.go b/pkg/cli/commands/secrets/create.go new file mode 100644 index 0000000000..64f3b5cfff --- /dev/null +++ b/pkg/cli/commands/secrets/create.go @@ -0,0 +1,55 @@ +package secrets + +import ( + "fmt" + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type createCommand struct { + file *string +} + +func (c *createCommand) Execute(ctx core.CommandContext) error { + filePath := "" + if c.file != nil { + filePath = *c.file + } + if filePath == "" { + return fmt.Errorf("--file is required") + } + + organizationID, err := resolveOrganizationID(ctx) + if err != nil { + return err + } + + resource, err := parseSecretFile(filePath) + if err != nil { + return err + } + + secret := resourceToSecret(*resource) + + request := openapi_client.SecretsCreateSecretRequest{} + request.SetSecret(secret) + request.SetDomainType(organizationDomainType()) + request.SetDomainId(organizationID) + + response, _, err := ctx.API.SecretAPI.SecretsCreateSecret(ctx.Context).Body(request).Execute() + if err != nil { + return err + } + + createdSecret := response.GetSecret() + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintln(stdout, "Secret created") + return renderSecretText(stdout, createdSecret) + }) + } + + return ctx.Renderer.Render(createdSecret) +} diff --git a/pkg/cli/commands/secrets/delete.go b/pkg/cli/commands/secrets/delete.go new file mode 100644 index 0000000000..6fcb9d478b --- /dev/null +++ b/pkg/cli/commands/secrets/delete.go @@ -0,0 +1,35 @@ +package secrets + +import ( + "fmt" + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" +) + +type deleteCommand struct{} + +func (c *deleteCommand) Execute(ctx core.CommandContext) error { + organizationID, err := resolveOrganizationID(ctx) + if err != nil { + return err + } + + response, _, err := ctx.API.SecretAPI. + SecretsDeleteSecret(ctx.Context, ctx.Args[0]). + DomainType(string(organizationDomainType())). + DomainId(organizationID). + Execute() + if err != nil { + return err + } + + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, err := fmt.Fprintf(stdout, "Secret deleted: %s\n", ctx.Args[0]) + return err + }) + } + + return ctx.Renderer.Render(response) +} diff --git a/pkg/cli/commands/secrets/get.go b/pkg/cli/commands/secrets/get.go new file mode 100644 index 0000000000..e58d83c55c --- /dev/null +++ b/pkg/cli/commands/secrets/get.go @@ -0,0 +1,34 @@ +package secrets + +import ( + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" +) + +type getCommand struct{} + +func (c *getCommand) Execute(ctx core.CommandContext) error { + organizationID, err := resolveOrganizationID(ctx) + if err != nil { + return err + } + + response, _, err := ctx.API.SecretAPI. + SecretsDescribeSecret(ctx.Context, ctx.Args[0]). + DomainType(string(organizationDomainType())). + DomainId(organizationID). + Execute() + if err != nil { + return err + } + + secret := response.GetSecret() + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + return renderSecretText(stdout, secret) + }) + } + + return ctx.Renderer.Render(secret) +} diff --git a/pkg/cli/commands/secrets/list.go b/pkg/cli/commands/secrets/list.go new file mode 100644 index 0000000000..8400a04860 --- /dev/null +++ b/pkg/cli/commands/secrets/list.go @@ -0,0 +1,34 @@ +package secrets + +import ( + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" +) + +type listCommand struct{} + +func (c *listCommand) Execute(ctx core.CommandContext) error { + organizationID, err := resolveOrganizationID(ctx) + if err != nil { + return err + } + + response, _, err := ctx.API.SecretAPI. + SecretsListSecrets(ctx.Context). + DomainType(string(organizationDomainType())). + DomainId(organizationID). + Execute() + if err != nil { + return err + } + + secrets := response.GetSecrets() + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + return renderSecretListText(stdout, secrets) + }) + } + + return ctx.Renderer.Render(secrets) +} diff --git a/pkg/cli/commands/secrets/root.go b/pkg/cli/commands/secrets/root.go new file mode 100644 index 0000000000..37a07b9114 --- /dev/null +++ b/pkg/cli/commands/secrets/root.go @@ -0,0 +1,63 @@ +package secrets + +import ( + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "secrets", + Short: "Manage secrets", + Aliases: []string{"secret"}, + } + + listCmd := &cobra.Command{ + Use: "list", + Short: "List secrets", + Args: cobra.NoArgs, + } + core.Bind(listCmd, &listCommand{}, options) + + getCmd := &cobra.Command{ + Use: "get ", + Short: "Get a secret", + Args: cobra.ExactArgs(1), + } + core.Bind(getCmd, &getCommand{}, options) + + createCmd := &cobra.Command{ + Use: "create", + Short: "Create a secret", + Args: cobra.NoArgs, + } + var createFile string + createCmd.Flags().StringVarP(&createFile, "file", "f", "", "filename, directory, or URL to files to use to create the resource") + _ = createCmd.MarkFlagRequired("file") + core.Bind(createCmd, &createCommand{file: &createFile}, options) + + updateCmd := &cobra.Command{ + Use: "update", + Short: "Update a secret from a file", + Args: cobra.NoArgs, + } + var updateFile string + updateCmd.Flags().StringVarP(&updateFile, "file", "f", "", "filename, directory, or URL to files to use to update the resource") + _ = updateCmd.MarkFlagRequired("file") + core.Bind(updateCmd, &updateCommand{file: &updateFile}, options) + + deleteCmd := &cobra.Command{ + Use: "delete ", + Short: "Delete a secret", + Args: cobra.ExactArgs(1), + } + core.Bind(deleteCmd, &deleteCommand{}, options) + + root.AddCommand(listCmd) + root.AddCommand(getCmd) + root.AddCommand(createCmd) + root.AddCommand(updateCmd) + root.AddCommand(deleteCmd) + + return root +} diff --git a/pkg/cli/commands/secrets/update.go b/pkg/cli/commands/secrets/update.go new file mode 100644 index 0000000000..0ddbfcc021 --- /dev/null +++ b/pkg/cli/commands/secrets/update.go @@ -0,0 +1,61 @@ +package secrets + +import ( + "fmt" + "io" + + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +type updateCommand struct { + file *string +} + +func (c *updateCommand) Execute(ctx core.CommandContext) error { + filePath := "" + if c.file != nil { + filePath = *c.file + } + if filePath == "" { + return fmt.Errorf("--file is required") + } + if len(ctx.Args) > 0 { + return fmt.Errorf("update does not accept positional arguments") + } + + organizationID, err := resolveOrganizationID(ctx) + if err != nil { + return err + } + + resource, err := parseSecretFile(filePath) + if err != nil { + return err + } + if resource.Metadata == nil || resource.Metadata.GetId() == "" { + return fmt.Errorf("secret metadata.id is required for update") + } + + secret := resourceToSecret(*resource) + + request := openapi_client.SecretsUpdateSecretBody{} + request.SetSecret(secret) + request.SetDomainType(organizationDomainType()) + request.SetDomainId(organizationID) + + response, _, err := ctx.API.SecretAPI.SecretsUpdateSecret(ctx.Context, resource.Metadata.GetId()).Body(request).Execute() + if err != nil { + return err + } + + updatedSecret := response.GetSecret() + if ctx.Renderer.IsText() { + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintln(stdout, "Secret updated") + return renderSecretText(stdout, updatedSecret) + }) + } + + return ctx.Renderer.Render(updatedSecret) +} diff --git a/pkg/cli/root.go b/pkg/cli/root.go index bde0335692..d8de18fa67 100644 --- a/pkg/cli/root.go +++ b/pkg/cli/root.go @@ -14,6 +14,7 @@ import ( components "github.com/superplanehq/superplane/pkg/cli/commands/components" config "github.com/superplanehq/superplane/pkg/cli/commands/config" integrations "github.com/superplanehq/superplane/pkg/cli/commands/integrations" + secrets "github.com/superplanehq/superplane/pkg/cli/commands/secrets" triggers "github.com/superplanehq/superplane/pkg/cli/commands/triggers" "github.com/superplanehq/superplane/pkg/cli/core" ) @@ -54,6 +55,7 @@ func init() { RootCmd.AddCommand(components.NewCommand(options)) RootCmd.AddCommand(triggers.NewCommand(options)) RootCmd.AddCommand(integrations.NewCommand(options)) + RootCmd.AddCommand(secrets.NewCommand(options)) RootCmd.AddCommand(config.NewCommand(options)) } From ee96ee78ca19e8a9d50cb6015e8caf2c4b25f31a Mon Sep 17 00:00:00 2001 From: Konstantin Komelin Date: Mon, 16 Feb 2026 12:39:47 +0100 Subject: [PATCH 116/160] docs: Eliminated naming inconcistency in the itegration guide (#3127) ## Summary There is an inconsistency in object naming in the Integration Development Guide. Some codes samples use `MyApp` instead of `MyIntegration`, which could mislead AI as well as humans. This PR fixes the inconsistency. Signed-off-by: Konstantin Komelin Signed-off-by: Muhammad Fuzail Zubari --- docs/contributing/integrations.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/contributing/integrations.md b/docs/contributing/integrations.md index 3ebe6b14c3..96d2921277 100644 --- a/docs/contributing/integrations.md +++ b/docs/contributing/integrations.md @@ -188,7 +188,7 @@ type OnEventConfiguration struct { } func (t *OnEvent) Name() string { - return "myapp.onEvent" + return "myintegration.onEvent" } func (t *OnEvent) Label() string { @@ -334,7 +334,7 @@ func (t *OnEvent) HandleWebhook(ctx core.WebhookRequestContext) (int, error) { Add the trigger to your integration's `Triggers()` method: ```go -func (a *MyApp) Triggers() []core.Trigger { +func (i *MyIntegration) Triggers() []core.Trigger { return []core.Trigger{ &OnEvent{}, } @@ -435,7 +435,7 @@ interface OnEventEventData { } /** - * Renderer for the "myapp.onEvent" trigger + * Renderer for the "myintegration.onEvent" trigger */ export const onEventTriggerRenderer: TriggerRenderer = { getTitleAndSubtitle: (event: WorkflowsWorkflowEvent): { title: string; subtitle: string } => { From 18fa2863840b2c4e04d7cd93032b4c09f764b2e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Mon, 16 Feb 2026 12:40:13 +0100 Subject: [PATCH 117/160] chore: Fix generated integration filenames (#3128) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- docs/components/{Hetzner Cloud.mdx => HetznerCloud.mdx} | 0 scripts/generate_components_docs.go | 3 +++ 2 files changed, 3 insertions(+) rename docs/components/{Hetzner Cloud.mdx => HetznerCloud.mdx} (100%) diff --git a/docs/components/Hetzner Cloud.mdx b/docs/components/HetznerCloud.mdx similarity index 100% rename from docs/components/Hetzner Cloud.mdx rename to docs/components/HetznerCloud.mdx diff --git a/scripts/generate_components_docs.go b/scripts/generate_components_docs.go index 483b53f1b1..8d72286d08 100644 --- a/scripts/generate_components_docs.go +++ b/scripts/generate_components_docs.go @@ -280,9 +280,12 @@ func slugify(value string) string { func integrationFilename(integration core.Integration) string { label := strings.TrimSpace(integration.Label()) + label = strings.ReplaceAll(label, " ", "") + if label == "" { return slugify(integration.Name()) } + return label } From cab1b98cca5f11b601ab2661cfd6d2e2bd654261 Mon Sep 17 00:00:00 2001 From: Lucas Pinheiro Date: Mon, 16 Feb 2026 11:02:55 -0300 Subject: [PATCH 118/160] feat(cli): group discovery-related commands under index command (#3131) Right now, we have this: ``` superplane integrations list -> list available integrations superplane integrations list --connected -> list connected integrations superplane integrations get -> get details about an available integration ``` The issue here is that we also need commands for managing connected integrations, and having them and the discovery-related commands under the same root is a bit confusing. So, I'm moving all the discovery-related commands to a new index command: ``` superplane index integrations superplane index integrations --name superplane index triggers superplane index triggers --from superplane index triggers --name superplane index components superplane index components --from superplane index components --name ``` With that, everything under the index command is about discovery, and finding available integrations, triggers, and components you can use, and `superplane integrations` becomes about managing connected integrations. --------- Signed-off-by: Lucas Pinheiro Signed-off-by: Muhammad Fuzail Zubari --- .cursor/skills/superplane-cli/SKILL.md | 36 ++--- pkg/cli/commands/canvases/get.go | 18 +-- pkg/cli/commands/canvases/list.go | 34 ++--- pkg/cli/commands/components/get.go | 58 -------- pkg/cli/commands/components/list.go | 45 ------ pkg/cli/commands/components/root.go | 34 ----- pkg/cli/commands/index/components.go | 130 +++++++++++++++++ pkg/cli/commands/index/integrations.go | 74 ++++++++++ pkg/cli/commands/index/root.go | 19 +++ pkg/cli/commands/index/triggers.go | 133 ++++++++++++++++++ pkg/cli/commands/integrations/get.go | 36 +++-- pkg/cli/commands/integrations/list.go | 87 ++++-------- .../commands/integrations/list_resources.go | 42 +++--- pkg/cli/commands/integrations/root.go | 12 +- pkg/cli/commands/secrets/create.go | 11 +- pkg/cli/commands/secrets/get.go | 10 +- pkg/cli/commands/secrets/list.go | 10 +- pkg/cli/commands/secrets/update.go | 11 +- pkg/cli/commands/triggers/get.go | 58 -------- pkg/cli/commands/triggers/list.go | 45 ------ pkg/cli/commands/triggers/root.go | 34 ----- pkg/cli/root.go | 6 +- 22 files changed, 505 insertions(+), 438 deletions(-) delete mode 100644 pkg/cli/commands/components/get.go delete mode 100644 pkg/cli/commands/components/list.go delete mode 100644 pkg/cli/commands/components/root.go create mode 100644 pkg/cli/commands/index/components.go create mode 100644 pkg/cli/commands/index/integrations.go create mode 100644 pkg/cli/commands/index/root.go create mode 100644 pkg/cli/commands/index/triggers.go delete mode 100644 pkg/cli/commands/triggers/get.go delete mode 100644 pkg/cli/commands/triggers/list.go delete mode 100644 pkg/cli/commands/triggers/root.go diff --git a/.cursor/skills/superplane-cli/SKILL.md b/.cursor/skills/superplane-cli/SKILL.md index 2a6679909e..2cd8a6b82e 100644 --- a/.cursor/skills/superplane-cli/SKILL.md +++ b/.cursor/skills/superplane-cli/SKILL.md @@ -12,29 +12,30 @@ Use this workflow to build or debug canvases from the CLI. Run these first: ```bash +superplane index integrations superplane integrations list -superplane integrations list --connected -superplane triggers list -superplane components list +superplane index triggers +superplane index components ``` Narrow to one integration: ```bash -superplane triggers list --from github -superplane components list --from github -superplane components list --from semaphore +superplane index triggers --from github +superplane index components --from github +superplane index components --from semaphore ``` -Use `--connected` to list organization-connected integration instances (not just available providers). +Use `superplane integrations list` to list organization-connected integration instances (not just available providers). Inspect required config fields and example payloads: ```bash -superplane triggers get github.onPush -superplane components get semaphore.runWorkflow -superplane components get github.runWorkflow -superplane components get approval +superplane index integrations --name github +superplane index triggers --name github.onPush +superplane index components --name semaphore.runWorkflow +superplane index components --name github.runWorkflow +superplane index components --name approval ``` List runtime options for `integration-resource` fields: @@ -43,7 +44,7 @@ List runtime options for `integration-resource` fields: superplane integrations list-resources --id --type --parameters key1=value1,key2=value2 ``` -Use `superplane integrations list --connected` first to find valid integration IDs. +Use `superplane integrations list` first to find valid integration IDs. ## Build canvas incrementally @@ -189,11 +190,12 @@ Symptoms of missing binding: How to resolve: -1. Run `superplane integrations list --connected` and confirm required providers are connected for the org. -2. Ensure the provider integration (GitHub, Semaphore, etc.) is installed and authenticated for the organization. -3. Reopen the node config and select valid provider resources for required fields. -4. Use `superplane integrations list-resources --id --type --parameters ...` to inspect valid option IDs/names. -5. Re-run `superplane canvases get ` and confirm node errors are cleared. +1. Run `superplane integrations list` and confirm required providers are connected for the org. +2. Use `superplane integrations get ` to inspect one connected integration when needed. +3. Ensure the provider integration (GitHub, Semaphore, etc.) is installed and authenticated for the organization. +4. Reopen the node config and select valid provider resources for required fields. +5. Use `superplane integrations list-resources --id --type --parameters ...` to inspect valid option IDs/names. +6. Re-run `superplane canvases get ` and confirm node errors are cleared. ## Troubleshooting checklist diff --git a/pkg/cli/commands/canvases/get.go b/pkg/cli/commands/canvases/get.go index 79458d1743..ec9a387736 100644 --- a/pkg/cli/commands/canvases/get.go +++ b/pkg/cli/commands/canvases/get.go @@ -24,17 +24,17 @@ func (c *getCommand) Execute(ctx core.CommandContext) error { } resource := models.CanvasResourceFromCanvas(*response.Canvas) - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - _, _ = fmt.Fprintf(stdout, "ID: %s\n", resource.Metadata.GetId()) - _, _ = fmt.Fprintf(stdout, "Name: %s\n", resource.Metadata.GetName()) - _, _ = fmt.Fprintf(stdout, "Nodes: %d\n", len(resource.Spec.GetNodes())) - _, err := fmt.Fprintf(stdout, "Edges: %d\n", len(resource.Spec.GetEdges())) - return err - }) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(resource) } - return ctx.Renderer.Render(resource) + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "ID: %s\n", resource.Metadata.GetId()) + _, _ = fmt.Fprintf(stdout, "Name: %s\n", resource.Metadata.GetName()) + _, _ = fmt.Fprintf(stdout, "Nodes: %d\n", len(resource.Spec.GetNodes())) + _, err := fmt.Fprintf(stdout, "Edges: %d\n", len(resource.Spec.GetEdges())) + return err + }) } func findCanvasID(ctx core.CommandContext, client *openapi_client.APIClient, nameOrID string) (string, error) { diff --git a/pkg/cli/commands/canvases/list.go b/pkg/cli/commands/canvases/list.go index 7818ed611d..d28b28ce3b 100644 --- a/pkg/cli/commands/canvases/list.go +++ b/pkg/cli/commands/canvases/list.go @@ -24,23 +24,23 @@ func (c *listCommand) Execute(ctx core.CommandContext) error { resources = append(resources, models.CanvasResourceFromCanvas(canvas)) } - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) - _, _ = fmt.Fprintln(writer, "ID\tNAME\tCREATED_AT") - - for _, canvas := range canvases { - metadata := canvas.GetMetadata() - createdAt := "" - if metadata.HasCreatedAt() { - createdAt = metadata.GetCreatedAt().Format(time.RFC3339) - } - _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", metadata.GetId(), metadata.GetName(), createdAt) - } - - return writer.Flush() - }) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(resources) } - return ctx.Renderer.Render(resources) + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "ID\tNAME\tCREATED_AT") + + for _, canvas := range canvases { + metadata := canvas.GetMetadata() + createdAt := "" + if metadata.HasCreatedAt() { + createdAt = metadata.GetCreatedAt().Format(time.RFC3339) + } + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", metadata.GetId(), metadata.GetName(), createdAt) + } + + return writer.Flush() + }) } diff --git a/pkg/cli/commands/components/get.go b/pkg/cli/commands/components/get.go deleted file mode 100644 index 4fa12b93c9..0000000000 --- a/pkg/cli/commands/components/get.go +++ /dev/null @@ -1,58 +0,0 @@ -package components - -import ( - "fmt" - "io" - - "github.com/superplanehq/superplane/pkg/cli/core" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -type getCommand struct{} - -func (c *getCommand) Execute(ctx core.CommandContext) error { - name := ctx.Args[0] - var component openapi_client.ComponentsComponent - - integrationName, componentName, scoped := core.ParseIntegrationScopedName(name) - if scoped { - integration, err := core.FindIntegrationDefinition(ctx, integrationName) - if err != nil { - return err - } - - resolvedComponent, err := findIntegrationComponent(integration, componentName) - if err != nil { - return err - } - component = resolvedComponent - } else { - response, _, err := ctx.API.ComponentAPI.ComponentsDescribeComponent(ctx.Context, name).Execute() - if err != nil { - return err - } - component = response.GetComponent() - } - - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - _, _ = fmt.Fprintf(stdout, "Name: %s\n", component.GetName()) - _, _ = fmt.Fprintf(stdout, "Label: %s\n", component.GetLabel()) - _, err := fmt.Fprintf(stdout, "Description: %s\n", component.GetDescription()) - return err - }) - } - - return ctx.Renderer.Render(component) -} - -func findIntegrationComponent(integration openapi_client.IntegrationsIntegrationDefinition, name string) (openapi_client.ComponentsComponent, error) { - for _, component := range integration.GetComponents() { - componentName := component.GetName() - if componentName == name || componentName == fmt.Sprintf("%s.%s", integration.GetName(), name) { - return component, nil - } - } - - return openapi_client.ComponentsComponent{}, fmt.Errorf("component %q not found in integration %q", name, integration.GetName()) -} diff --git a/pkg/cli/commands/components/list.go b/pkg/cli/commands/components/list.go deleted file mode 100644 index 196a460988..0000000000 --- a/pkg/cli/commands/components/list.go +++ /dev/null @@ -1,45 +0,0 @@ -package components - -import ( - "fmt" - "io" - "text/tabwriter" - - "github.com/superplanehq/superplane/pkg/cli/core" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -type listCommand struct { - from *string -} - -func (c *listCommand) Execute(ctx core.CommandContext) error { - components := []openapi_client.ComponentsComponent{} - - if c.from != nil && *c.from != "" { - integration, err := core.FindIntegrationDefinition(ctx, *c.from) - if err != nil { - return err - } - components = integration.GetComponents() - } else { - response, _, err := ctx.API.ComponentAPI.ComponentsListComponents(ctx.Context).Execute() - if err != nil { - return err - } - components = response.GetComponents() - } - - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) - _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") - for _, component := range components { - _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", component.GetName(), component.GetLabel(), component.GetDescription()) - } - return writer.Flush() - }) - } - - return ctx.Renderer.Render(components) -} diff --git a/pkg/cli/commands/components/root.go b/pkg/cli/commands/components/root.go deleted file mode 100644 index f24f2f4c8c..0000000000 --- a/pkg/cli/commands/components/root.go +++ /dev/null @@ -1,34 +0,0 @@ -package components - -import ( - "github.com/spf13/cobra" - "github.com/superplanehq/superplane/pkg/cli/core" -) - -func NewCommand(options core.BindOptions) *cobra.Command { - root := &cobra.Command{ - Use: "components", - Short: "Manage components", - } - - var from string - listCmd := &cobra.Command{ - Use: "list", - Short: "List components", - Args: cobra.NoArgs, - } - listCmd.Flags().StringVar(&from, "from", "", "integration name") - core.Bind(listCmd, &listCommand{from: &from}, options) - - getCmd := &cobra.Command{ - Use: "get ", - Short: "Get a component", - Args: cobra.ExactArgs(1), - } - core.Bind(getCmd, &getCommand{}, options) - - root.AddCommand(listCmd) - root.AddCommand(getCmd) - - return root -} diff --git a/pkg/cli/commands/index/components.go b/pkg/cli/commands/index/components.go new file mode 100644 index 0000000000..f63fbf0490 --- /dev/null +++ b/pkg/cli/commands/index/components.go @@ -0,0 +1,130 @@ +package index + +import ( + "fmt" + "io" + "strings" + "text/tabwriter" + + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +func newComponentsCommand(options core.BindOptions) *cobra.Command { + var from string + var name string + + cmd := &cobra.Command{ + Use: "components", + Short: "List or describe available components", + Args: cobra.NoArgs, + } + cmd.Flags().StringVar(&from, "from", "", "integration definition name") + cmd.Flags().StringVar(&name, "name", "", "component name") + core.Bind(cmd, &componentsCommand{from: &from, name: &name}, options) + + return cmd +} + +type componentsCommand struct { + from *string + name *string +} + +func (c *componentsCommand) Execute(ctx core.CommandContext) error { + name := strings.TrimSpace(*c.name) + from := strings.TrimSpace(*c.from) + + if name != "" { + return c.getComponentByName(ctx, name) + } + + components, err := c.listComponents(ctx, from) + if err != nil { + return err + } + + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(components) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") + for _, component := range components { + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", component.GetName(), component.GetLabel(), component.GetDescription()) + } + return writer.Flush() + }) +} + +func (c *componentsCommand) getComponentByName(ctx core.CommandContext, name string) error { + component, err := c.findComponentByName(ctx, name) + if err != nil { + return err + } + + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(component) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "Name: %s\n", component.GetName()) + _, _ = fmt.Fprintf(stdout, "Label: %s\n", component.GetLabel()) + _, err := fmt.Fprintf(stdout, "Description: %s\n", component.GetDescription()) + return err + }) +} + +func (c *componentsCommand) listComponents(ctx core.CommandContext, from string) ([]openapi_client.ComponentsComponent, error) { + // + // if --from is used, we grab the components from the integration + // + if from != "" { + integration, err := core.FindIntegrationDefinition(ctx, from) + if err != nil { + return nil, err + } + + return integration.GetComponents(), nil + } + + // + // Otherwise, we list core components. + // + response, _, err := ctx.API.ComponentAPI.ComponentsListComponents(ctx.Context).Execute() + if err != nil { + return nil, err + } + return response.GetComponents(), nil +} + +func (c *componentsCommand) findComponentByName(ctx core.CommandContext, name string) (openapi_client.ComponentsComponent, error) { + integrationName, componentName, scoped := core.ParseIntegrationScopedName(name) + if scoped { + integration, err := core.FindIntegrationDefinition(ctx, integrationName) + if err != nil { + return openapi_client.ComponentsComponent{}, err + } + return findIntegrationComponent(integration, componentName) + } + + response, _, err := ctx.API.ComponentAPI.ComponentsDescribeComponent(ctx.Context, name).Execute() + if err != nil { + return openapi_client.ComponentsComponent{}, err + } + + return response.GetComponent(), nil +} + +func findIntegrationComponent(integration openapi_client.IntegrationsIntegrationDefinition, name string) (openapi_client.ComponentsComponent, error) { + for _, component := range integration.GetComponents() { + componentName := component.GetName() + if componentName == name || componentName == fmt.Sprintf("%s.%s", integration.GetName(), name) { + return component, nil + } + } + + return openapi_client.ComponentsComponent{}, fmt.Errorf("component %q not found in integration %q", name, integration.GetName()) +} diff --git a/pkg/cli/commands/index/integrations.go b/pkg/cli/commands/index/integrations.go new file mode 100644 index 0000000000..74b2054eaa --- /dev/null +++ b/pkg/cli/commands/index/integrations.go @@ -0,0 +1,74 @@ +package index + +import ( + "fmt" + "io" + "strings" + "text/tabwriter" + + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func newIntegrationsCommand(options core.BindOptions) *cobra.Command { + var name string + + cmd := &cobra.Command{ + Use: "integrations", + Short: "List or describe available integration definitions", + Args: cobra.NoArgs, + } + cmd.Flags().StringVar(&name, "name", "", "integration definition name") + core.Bind(cmd, &integrationsCommand{name: &name}, options) + + return cmd +} + +type integrationsCommand struct { + name *string +} + +func (c *integrationsCommand) Execute(ctx core.CommandContext) error { + name := strings.TrimSpace(*c.name) + if name != "" { + return c.getIntegrationByName(ctx, name) + } + + response, _, err := ctx.API.IntegrationAPI.IntegrationsListIntegrations(ctx.Context).Execute() + if err != nil { + return err + } + + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(response.GetIntegrations()) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") + for _, integration := range response.GetIntegrations() { + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", integration.GetName(), integration.GetLabel(), integration.GetDescription()) + } + return writer.Flush() + }) +} + +func (c *integrationsCommand) getIntegrationByName(ctx core.CommandContext, name string) error { + integration, err := core.FindIntegrationDefinition(ctx, name) + if err != nil { + return err + } + + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(integration) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "Name: %s\n", integration.GetName()) + _, _ = fmt.Fprintf(stdout, "Label: %s\n", integration.GetLabel()) + _, _ = fmt.Fprintf(stdout, "Description: %s\n", integration.GetDescription()) + _, _ = fmt.Fprintf(stdout, "Components: %d\n", len(integration.GetComponents())) + _, err := fmt.Fprintf(stdout, "Triggers: %d\n", len(integration.GetTriggers())) + return err + }) +} diff --git a/pkg/cli/commands/index/root.go b/pkg/cli/commands/index/root.go new file mode 100644 index 0000000000..948d532cbb --- /dev/null +++ b/pkg/cli/commands/index/root.go @@ -0,0 +1,19 @@ +package index + +import ( + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" +) + +func NewCommand(options core.BindOptions) *cobra.Command { + root := &cobra.Command{ + Use: "index", + Short: "Discover available integrations, triggers, and components", + } + + root.AddCommand(newIntegrationsCommand(options)) + root.AddCommand(newTriggersCommand(options)) + root.AddCommand(newComponentsCommand(options)) + + return root +} diff --git a/pkg/cli/commands/index/triggers.go b/pkg/cli/commands/index/triggers.go new file mode 100644 index 0000000000..a9cad555b0 --- /dev/null +++ b/pkg/cli/commands/index/triggers.go @@ -0,0 +1,133 @@ +package index + +import ( + "fmt" + "io" + "strings" + "text/tabwriter" + + "github.com/spf13/cobra" + "github.com/superplanehq/superplane/pkg/cli/core" + "github.com/superplanehq/superplane/pkg/openapi_client" +) + +func newTriggersCommand(options core.BindOptions) *cobra.Command { + var from string + var name string + + cmd := &cobra.Command{ + Use: "triggers", + Short: "List or describe available triggers", + Args: cobra.NoArgs, + } + cmd.Flags().StringVar(&from, "from", "", "integration definition name") + cmd.Flags().StringVar(&name, "name", "", "trigger name") + core.Bind(cmd, &triggersCommand{from: &from, name: &name}, options) + + return cmd +} + +type triggersCommand struct { + from *string + name *string +} + +func (c *triggersCommand) Execute(ctx core.CommandContext) error { + name := strings.TrimSpace(*c.name) + from := strings.TrimSpace(*c.from) + + if name != "" { + return c.getTriggerByName(ctx, name) + } + + triggers, err := c.listTriggers(ctx, from) + if err != nil { + return err + } + + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(triggers) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") + for _, trigger := range triggers { + _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", trigger.GetName(), trigger.GetLabel(), trigger.GetDescription()) + } + return writer.Flush() + }) +} + +func (c *triggersCommand) getTriggerByName(ctx core.CommandContext, name string) error { + trigger, err := c.findTriggerByName(ctx, name) + if err != nil { + return err + } + + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(trigger) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + _, _ = fmt.Fprintf(stdout, "Name: %s\n", trigger.GetName()) + _, _ = fmt.Fprintf(stdout, "Label: %s\n", trigger.GetLabel()) + _, err := fmt.Fprintf(stdout, "Description: %s\n", trigger.GetDescription()) + return err + }) +} + +func (c *triggersCommand) listTriggers(ctx core.CommandContext, from string) ([]openapi_client.TriggersTrigger, error) { + // + // if --from is used, we grab the triggers from the integration + // + if from != "" { + integration, err := core.FindIntegrationDefinition(ctx, from) + if err != nil { + return nil, err + } + + return integration.GetTriggers(), nil + } + + // + // Otherwise, we list core triggers. + // + response, _, err := ctx.API.TriggerAPI.TriggersListTriggers(ctx.Context).Execute() + if err != nil { + return nil, err + } + + return response.GetTriggers(), nil +} + +func (c *triggersCommand) findTriggerByName(ctx core.CommandContext, name string) (openapi_client.TriggersTrigger, error) { + integrationName, triggerName, scoped := core.ParseIntegrationScopedName(name) + if scoped { + integration, err := core.FindIntegrationDefinition(ctx, integrationName) + if err != nil { + return openapi_client.TriggersTrigger{}, err + } + return findIntegrationTrigger(integration, triggerName) + } + + response, _, err := ctx.API.TriggerAPI.TriggersDescribeTrigger(ctx.Context, name).Execute() + if err != nil { + return openapi_client.TriggersTrigger{}, err + } + return response.GetTrigger(), nil +} + +func findIntegrationTrigger( + integration openapi_client.IntegrationsIntegrationDefinition, + name string, +) (openapi_client.TriggersTrigger, error) { + for _, trigger := range integration.GetTriggers() { + triggerName := trigger.GetName() + if triggerName == name || triggerName == fmt.Sprintf("%s.%s", integration.GetName(), name) { + return trigger, nil + } + } + + return openapi_client.TriggersTrigger{}, fmt.Errorf("trigger %q not found in integration %q", name, integration.GetName()) +} diff --git a/pkg/cli/commands/integrations/get.go b/pkg/cli/commands/integrations/get.go index 8bd4408036..e23c91e438 100644 --- a/pkg/cli/commands/integrations/get.go +++ b/pkg/cli/commands/integrations/get.go @@ -10,21 +10,35 @@ import ( type getCommand struct{} func (c *getCommand) Execute(ctx core.CommandContext) error { - integration, err := core.FindIntegrationDefinition(ctx, ctx.Args[0]) + me, _, err := ctx.API.MeAPI.MeMe(ctx.Context).Execute() if err != nil { return err } + if !me.HasOrganizationId() { + return fmt.Errorf("organization id not found for authenticated user") + } - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - _, _ = fmt.Fprintf(stdout, "Name: %s\n", integration.GetName()) - _, _ = fmt.Fprintf(stdout, "Label: %s\n", integration.GetLabel()) - _, _ = fmt.Fprintf(stdout, "Description: %s\n", integration.GetDescription()) - _, _ = fmt.Fprintf(stdout, "Components: %d\n", len(integration.GetComponents())) - _, err := fmt.Fprintf(stdout, "Triggers: %d\n", len(integration.GetTriggers())) - return err - }) + response, _, err := ctx.API.OrganizationAPI. + OrganizationsDescribeIntegration(ctx.Context, me.GetOrganizationId(), ctx.Args[0]). + Execute() + if err != nil { + return err } + integration := response.GetIntegration() - return ctx.Renderer.Render(integration) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(integration) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + metadata := integration.GetMetadata() + spec := integration.GetSpec() + status := integration.GetStatus() + + _, _ = fmt.Fprintf(stdout, "ID: %s\n", metadata.GetId()) + _, _ = fmt.Fprintf(stdout, "Name: %s\n", metadata.GetName()) + _, _ = fmt.Fprintf(stdout, "Integration: %s\n", spec.GetIntegrationName()) + _, err := fmt.Fprintf(stdout, "State: %s\n", status.GetState()) + return err + }) } diff --git a/pkg/cli/commands/integrations/list.go b/pkg/cli/commands/integrations/list.go index a007cb0682..8967a9a4b7 100644 --- a/pkg/cli/commands/integrations/list.go +++ b/pkg/cli/commands/integrations/list.go @@ -10,35 +10,9 @@ import ( ) type listCommand struct { - connected *bool } func (c *listCommand) Execute(ctx core.CommandContext) error { - if c.connected != nil && *c.connected { - return c.executeConnected(ctx) - } - - response, _, err := ctx.API.IntegrationAPI.IntegrationsListIntegrations(ctx.Context).Execute() - if err != nil { - return err - } - - integrations := response.GetIntegrations() - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) - _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") - for _, integration := range integrations { - _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", integration.GetName(), integration.GetLabel(), integration.GetDescription()) - } - return writer.Flush() - }) - } - - return ctx.Renderer.Render(integrations) -} - -func (c *listCommand) executeConnected(ctx core.CommandContext) error { me, _, err := ctx.API.MeAPI.MeMe(ctx.Context).Execute() if err != nil { return err @@ -63,38 +37,39 @@ func (c *listCommand) executeConnected(ctx core.CommandContext) error { } connected := connectedResponse.GetIntegrations() - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) - _, _ = fmt.Fprintln(writer, "ID\tNAME\tINTEGRATION\tLABEL\tDESCRIPTION\tSTATE") - for _, integration := range connected { - metadata := integration.GetMetadata() - spec := integration.GetSpec() - status := integration.GetStatus() - integrationName := spec.GetIntegrationName() - definition, found := integrationsByName[integrationName] + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(connected) + } - label := "" - description := "" - if found { - label = definition.GetLabel() - description = definition.GetDescription() - } + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "ID\tNAME\tINTEGRATION\tLABEL\tDESCRIPTION\tSTATE") + for _, integration := range connected { + metadata := integration.GetMetadata() + spec := integration.GetSpec() + status := integration.GetStatus() + integrationName := spec.GetIntegrationName() + definition, found := integrationsByName[integrationName] - _, _ = fmt.Fprintf( - writer, - "%s\t%s\t%s\t%s\t%s\t%s\n", - metadata.GetId(), - metadata.GetName(), - integrationName, - label, - description, - status.GetState(), - ) + label := "" + description := "" + if found { + label = definition.GetLabel() + description = definition.GetDescription() } - return writer.Flush() - }) - } - return ctx.Renderer.Render(connected) + _, _ = fmt.Fprintf( + writer, + "%s\t%s\t%s\t%s\t%s\t%s\n", + metadata.GetId(), + metadata.GetName(), + integrationName, + label, + description, + status.GetState(), + ) + } + + return writer.Flush() + }) } diff --git a/pkg/cli/commands/integrations/list_resources.go b/pkg/cli/commands/integrations/list_resources.go index 0747cf8560..e95ab5c3d3 100644 --- a/pkg/cli/commands/integrations/list_resources.go +++ b/pkg/cli/commands/integrations/list_resources.go @@ -68,27 +68,27 @@ func (c *listResourcesCommand) Execute(ctx core.CommandContext) error { return err } - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) - _, _ = fmt.Fprintln(writer, "INTEGRATION_ID\tINTEGRATION_NAME\tINTEGRATION\tTYPE\tNAME\tID") - for _, resource := range response.Resources { - _, _ = fmt.Fprintf( - writer, - "%s\t%s\t%s\t%s\t%s\t%s\n", - metadata.GetId(), - metadata.GetName(), - spec.GetIntegrationName(), - resource.GetType(), - resource.GetName(), - resource.GetId(), - ) - } - return writer.Flush() - }) - } - - return ctx.Renderer.Render(response.Resources) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(response.Resources) + } + + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) + _, _ = fmt.Fprintln(writer, "INTEGRATION_ID\tINTEGRATION_NAME\tINTEGRATION\tTYPE\tNAME\tID") + for _, resource := range response.Resources { + _, _ = fmt.Fprintf( + writer, + "%s\t%s\t%s\t%s\t%s\t%s\n", + metadata.GetId(), + metadata.GetName(), + spec.GetIntegrationName(), + resource.GetType(), + resource.GetName(), + resource.GetId(), + ) + } + return writer.Flush() + }) } func parseIntegrationResourceParametersFlag(raw string) (map[string]string, error) { diff --git a/pkg/cli/commands/integrations/root.go b/pkg/cli/commands/integrations/root.go index b7a0d94e27..9eeb73daf7 100644 --- a/pkg/cli/commands/integrations/root.go +++ b/pkg/cli/commands/integrations/root.go @@ -8,21 +8,19 @@ import ( func NewCommand(options core.BindOptions) *cobra.Command { root := &cobra.Command{ Use: "integrations", - Short: "Manage integrations", + Short: "Manage connected integrations", } - var connected bool listCmd := &cobra.Command{ Use: "list", - Short: "List integrations", + Short: "List connected integrations", Args: cobra.NoArgs, } - listCmd.Flags().BoolVar(&connected, "connected", false, "list connected integrations for the authenticated organization") - core.Bind(listCmd, &listCommand{connected: &connected}, options) + core.Bind(listCmd, &listCommand{}, options) getCmd := &cobra.Command{ - Use: "get ", - Short: "Get integration details", + Use: "get ", + Short: "Get connected integration details", Args: cobra.ExactArgs(1), } core.Bind(getCmd, &getCommand{}, options) diff --git a/pkg/cli/commands/secrets/create.go b/pkg/cli/commands/secrets/create.go index 64f3b5cfff..efe9cdab04 100644 --- a/pkg/cli/commands/secrets/create.go +++ b/pkg/cli/commands/secrets/create.go @@ -44,12 +44,11 @@ func (c *createCommand) Execute(ctx core.CommandContext) error { } createdSecret := response.GetSecret() - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - _, _ = fmt.Fprintln(stdout, "Secret created") - return renderSecretText(stdout, createdSecret) - }) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(createdSecret) } - return ctx.Renderer.Render(createdSecret) + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + return renderSecretText(stdout, createdSecret) + }) } diff --git a/pkg/cli/commands/secrets/get.go b/pkg/cli/commands/secrets/get.go index e58d83c55c..7d8ff8aa72 100644 --- a/pkg/cli/commands/secrets/get.go +++ b/pkg/cli/commands/secrets/get.go @@ -24,11 +24,11 @@ func (c *getCommand) Execute(ctx core.CommandContext) error { } secret := response.GetSecret() - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - return renderSecretText(stdout, secret) - }) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(secret) } - return ctx.Renderer.Render(secret) + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + return renderSecretText(stdout, secret) + }) } diff --git a/pkg/cli/commands/secrets/list.go b/pkg/cli/commands/secrets/list.go index 8400a04860..3f5f24c432 100644 --- a/pkg/cli/commands/secrets/list.go +++ b/pkg/cli/commands/secrets/list.go @@ -24,11 +24,11 @@ func (c *listCommand) Execute(ctx core.CommandContext) error { } secrets := response.GetSecrets() - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - return renderSecretListText(stdout, secrets) - }) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(secrets) } - return ctx.Renderer.Render(secrets) + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + return renderSecretListText(stdout, secrets) + }) } diff --git a/pkg/cli/commands/secrets/update.go b/pkg/cli/commands/secrets/update.go index 0ddbfcc021..db41c15b86 100644 --- a/pkg/cli/commands/secrets/update.go +++ b/pkg/cli/commands/secrets/update.go @@ -50,12 +50,11 @@ func (c *updateCommand) Execute(ctx core.CommandContext) error { } updatedSecret := response.GetSecret() - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - _, _ = fmt.Fprintln(stdout, "Secret updated") - return renderSecretText(stdout, updatedSecret) - }) + if !ctx.Renderer.IsText() { + return ctx.Renderer.Render(updatedSecret) } - return ctx.Renderer.Render(updatedSecret) + return ctx.Renderer.RenderText(func(stdout io.Writer) error { + return renderSecretText(stdout, updatedSecret) + }) } diff --git a/pkg/cli/commands/triggers/get.go b/pkg/cli/commands/triggers/get.go deleted file mode 100644 index 0e9c1bb808..0000000000 --- a/pkg/cli/commands/triggers/get.go +++ /dev/null @@ -1,58 +0,0 @@ -package triggers - -import ( - "fmt" - "io" - - "github.com/superplanehq/superplane/pkg/cli/core" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -type getCommand struct{} - -func (c *getCommand) Execute(ctx core.CommandContext) error { - name := ctx.Args[0] - var trigger openapi_client.TriggersTrigger - - integrationName, triggerName, scoped := core.ParseIntegrationScopedName(name) - if scoped { - integration, err := core.FindIntegrationDefinition(ctx, integrationName) - if err != nil { - return err - } - - resolvedTrigger, err := findTrigger(integration, triggerName) - if err != nil { - return err - } - trigger = resolvedTrigger - } else { - response, _, err := ctx.API.TriggerAPI.TriggersDescribeTrigger(ctx.Context, name).Execute() - if err != nil { - return err - } - trigger = response.GetTrigger() - } - - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - _, _ = fmt.Fprintf(stdout, "Name: %s\n", trigger.GetName()) - _, _ = fmt.Fprintf(stdout, "Label: %s\n", trigger.GetLabel()) - _, err := fmt.Fprintf(stdout, "Description: %s\n", trigger.GetDescription()) - return err - }) - } - - return ctx.Renderer.Render(trigger) -} - -func findTrigger(integration openapi_client.IntegrationsIntegrationDefinition, name string) (openapi_client.TriggersTrigger, error) { - for _, trigger := range integration.GetTriggers() { - triggerName := trigger.GetName() - if triggerName == name || triggerName == fmt.Sprintf("%s.%s", integration.GetName(), name) { - return trigger, nil - } - } - - return openapi_client.TriggersTrigger{}, fmt.Errorf("trigger %q not found in integration %q", name, integration.GetName()) -} diff --git a/pkg/cli/commands/triggers/list.go b/pkg/cli/commands/triggers/list.go deleted file mode 100644 index 26d27feb46..0000000000 --- a/pkg/cli/commands/triggers/list.go +++ /dev/null @@ -1,45 +0,0 @@ -package triggers - -import ( - "fmt" - "io" - "text/tabwriter" - - "github.com/superplanehq/superplane/pkg/cli/core" - "github.com/superplanehq/superplane/pkg/openapi_client" -) - -type listCommand struct { - from *string -} - -func (c *listCommand) Execute(ctx core.CommandContext) error { - triggers := []openapi_client.TriggersTrigger{} - - if c.from != nil && *c.from != "" { - integration, err := core.FindIntegrationDefinition(ctx, *c.from) - if err != nil { - return err - } - triggers = integration.GetTriggers() - } else { - response, _, err := ctx.API.TriggerAPI.TriggersListTriggers(ctx.Context).Execute() - if err != nil { - return err - } - triggers = response.GetTriggers() - } - - if ctx.Renderer.IsText() { - return ctx.Renderer.RenderText(func(stdout io.Writer) error { - writer := tabwriter.NewWriter(stdout, 0, 8, 2, ' ', 0) - _, _ = fmt.Fprintln(writer, "NAME\tLABEL\tDESCRIPTION") - for _, trigger := range triggers { - _, _ = fmt.Fprintf(writer, "%s\t%s\t%s\n", trigger.GetName(), trigger.GetLabel(), trigger.GetDescription()) - } - return writer.Flush() - }) - } - - return ctx.Renderer.Render(triggers) -} diff --git a/pkg/cli/commands/triggers/root.go b/pkg/cli/commands/triggers/root.go deleted file mode 100644 index 6b98d1ad1a..0000000000 --- a/pkg/cli/commands/triggers/root.go +++ /dev/null @@ -1,34 +0,0 @@ -package triggers - -import ( - "github.com/spf13/cobra" - "github.com/superplanehq/superplane/pkg/cli/core" -) - -func NewCommand(options core.BindOptions) *cobra.Command { - root := &cobra.Command{ - Use: "triggers", - Short: "Manage triggers", - } - - var from string - listCmd := &cobra.Command{ - Use: "list", - Short: "List triggers", - Args: cobra.NoArgs, - } - listCmd.Flags().StringVar(&from, "from", "", "integration name") - core.Bind(listCmd, &listCommand{from: &from}, options) - - getCmd := &cobra.Command{ - Use: "get ", - Short: "Get a trigger", - Args: cobra.ExactArgs(1), - } - core.Bind(getCmd, &getCommand{}, options) - - root.AddCommand(listCmd) - root.AddCommand(getCmd) - - return root -} diff --git a/pkg/cli/root.go b/pkg/cli/root.go index d8de18fa67..f14c0de9ef 100644 --- a/pkg/cli/root.go +++ b/pkg/cli/root.go @@ -11,11 +11,10 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" canvases "github.com/superplanehq/superplane/pkg/cli/commands/canvases" - components "github.com/superplanehq/superplane/pkg/cli/commands/components" config "github.com/superplanehq/superplane/pkg/cli/commands/config" + index "github.com/superplanehq/superplane/pkg/cli/commands/index" integrations "github.com/superplanehq/superplane/pkg/cli/commands/integrations" secrets "github.com/superplanehq/superplane/pkg/cli/commands/secrets" - triggers "github.com/superplanehq/superplane/pkg/cli/commands/triggers" "github.com/superplanehq/superplane/pkg/cli/core" ) @@ -52,8 +51,7 @@ func init() { options := defaultBindOptions() RootCmd.AddCommand(canvases.NewCommand(options)) - RootCmd.AddCommand(components.NewCommand(options)) - RootCmd.AddCommand(triggers.NewCommand(options)) + RootCmd.AddCommand(index.NewCommand(options)) RootCmd.AddCommand(integrations.NewCommand(options)) RootCmd.AddCommand(secrets.NewCommand(options)) RootCmd.AddCommand(config.NewCommand(options)) From b8431c81605106a404fed0dac9d0aea795352a6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20=C5=A0ar=C4=8Devi=C4=87?= Date: Mon, 16 Feb 2026 21:39:58 +0100 Subject: [PATCH 119/160] feat: Introduce service accounts (#3133) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fixes https://github.com/superplanehq/superplane/issues/3130 Adds service accounts: non-human identities for programmatic access, scoped to an organization. ### Key decision Should service accounts live under the users table, or in a separate service_accounts table. I weighted the pros/cons, and came to the conclusion that keeping them in the `users` table is the better option for the long term. I wrote down my reasoning in the [prd](https://github.com/superplanehq/superplane/pull/3133/changes#diff-da05a018162503f0566e6d770896a1ab8d0af9a9d5046cfb48a6dc8646b44a64R32-R55) ### Leftover tasks - [x] Record a video - [x] Too many changes in proto generated files (probably my cli is outdated) - [x] Check logic about who can create, delete, update service accounts - [x] Verify that e2e tests are sane --------- Signed-off-by: Igor Šarčević Signed-off-by: Muhammad Fuzail Zubari --- Makefile | 4 +- api/swagger/superplane.swagger.json | 314 +++++++ ...260216151135_add-service-accounts.down.sql | 0 ...20260216151135_add-service-accounts.up.sql | 27 + db/structure.sql | 45 +- docs/prd/service-accounts.md | 252 ++++++ pkg/authentication/authentication_test.go | 6 +- pkg/authorization/interceptor.go | 9 + .../actions/auth/add_user_to_group_test.go | 6 +- pkg/grpc/actions/auth/assign_role.go | 5 + pkg/grpc/actions/auth/assign_role_test.go | 2 +- pkg/grpc/actions/auth/common.go | 73 +- pkg/grpc/actions/auth/delete_role_test.go | 2 +- pkg/grpc/actions/auth/describe_role_test.go | 4 +- pkg/grpc/actions/auth/list_group_users.go | 27 +- pkg/grpc/actions/auth/list_users.go | 4 +- pkg/grpc/actions/auth/list_users_test.go | 2 +- .../auth/remove_user_from_group_test.go | 2 +- pkg/grpc/actions/me/get_user.go | 2 +- pkg/grpc/actions/me/regenerate_token.go | 4 + .../organizations/create_invitation.go | 4 +- .../organizations/create_invitation_test.go | 4 +- .../actions/organizations/remove_user_test.go | 2 +- pkg/grpc/actions/serviceaccounts/common.go | 28 + .../serviceaccounts/create_service_account.go | 93 ++ .../serviceaccounts/delete_service_account.go | 58 ++ .../describe_service_account.go | 40 + .../serviceaccounts/list_service_accounts.go | 37 + .../serviceaccounts/regenerate_token.go | 51 ++ .../serviceaccounts/update_service_account.go | 56 ++ pkg/grpc/server.go | 4 + pkg/grpc/service_accounts_service.go | 44 + pkg/grpc/users_service.go | 2 +- pkg/models/account_test.go | 20 +- pkg/models/constants.go | 4 + pkg/models/user.go | 84 +- pkg/openapi_client/.openapi-generator/FILES | 19 + pkg/openapi_client/api_service_accounts.go | 721 ++++++++++++++++ pkg/openapi_client/api_users.go | 17 +- pkg/openapi_client/client.go | 3 + ...accounts_create_service_account_request.go | 197 +++++ ...ccounts_create_service_account_response.go | 161 ++++ ...ounts_describe_service_account_response.go | 125 +++ ...accounts_list_service_accounts_response.go | 125 +++ ...generate_service_account_token_response.go | 125 +++ .../model_service_accounts_service_account.go | 378 ++++++++ ...ce_accounts_update_service_account_body.go | 161 ++++ ...ccounts_update_service_account_response.go | 125 +++ .../service_accounts/service_accounts.pb.go | 816 ++++++++++++++++++ .../service_accounts.pb.gw.go | 526 +++++++++++ .../service_accounts_grpc.pb.go | 309 +++++++ pkg/protos/users/users.pb.go | 23 +- pkg/public/server.go | 7 + pkg/public/server_test.go | 2 +- pkg/workers/contexts/auth_context.go | 4 +- pkg/workers/invitation_email_consumer.go | 2 +- pkg/workers/node_executor_test.go | 2 +- pkg/workers/notification_email_consumer.go | 2 +- .../notification_email_consumer_test.go | 4 +- protos/service_accounts.proto | 156 ++++ protos/users.proto | 1 + rbac/rbac_org_policy.csv | 4 + test/e2e/service_accounts_test.go | 343 ++++++++ web_src/src/api-client/index.ts | 46 + web_src/src/api-client/sdk.gen.ts | 123 +++ web_src/src/api-client/types.gen.ts | 222 +++++ .../src/components/OrganizationMenuButton.tsx | 7 + web_src/src/hooks/useOrganizationData.ts | 12 +- web_src/src/hooks/useServiceAccounts.ts | 124 +++ .../settings/AddMembersSection.tsx | 4 +- .../settings/GroupMembersPage.tsx | 2 +- .../settings/ServiceAccountDetail.tsx | 325 +++++++ .../organization/settings/ServiceAccounts.tsx | 373 ++++++++ .../src/pages/organization/settings/index.tsx | 44 +- 74 files changed, 6842 insertions(+), 119 deletions(-) create mode 100644 db/migrations/20260216151135_add-service-accounts.down.sql create mode 100644 db/migrations/20260216151135_add-service-accounts.up.sql create mode 100644 docs/prd/service-accounts.md create mode 100644 pkg/grpc/actions/serviceaccounts/common.go create mode 100644 pkg/grpc/actions/serviceaccounts/create_service_account.go create mode 100644 pkg/grpc/actions/serviceaccounts/delete_service_account.go create mode 100644 pkg/grpc/actions/serviceaccounts/describe_service_account.go create mode 100644 pkg/grpc/actions/serviceaccounts/list_service_accounts.go create mode 100644 pkg/grpc/actions/serviceaccounts/regenerate_token.go create mode 100644 pkg/grpc/actions/serviceaccounts/update_service_account.go create mode 100644 pkg/grpc/service_accounts_service.go create mode 100644 pkg/openapi_client/api_service_accounts.go create mode 100644 pkg/openapi_client/model_service_accounts_create_service_account_request.go create mode 100644 pkg/openapi_client/model_service_accounts_create_service_account_response.go create mode 100644 pkg/openapi_client/model_service_accounts_describe_service_account_response.go create mode 100644 pkg/openapi_client/model_service_accounts_list_service_accounts_response.go create mode 100644 pkg/openapi_client/model_service_accounts_regenerate_service_account_token_response.go create mode 100644 pkg/openapi_client/model_service_accounts_service_account.go create mode 100644 pkg/openapi_client/model_service_accounts_update_service_account_body.go create mode 100644 pkg/openapi_client/model_service_accounts_update_service_account_response.go create mode 100644 pkg/protos/service_accounts/service_accounts.pb.go create mode 100644 pkg/protos/service_accounts/service_accounts.pb.gw.go create mode 100644 pkg/protos/service_accounts/service_accounts_grpc.pb.go create mode 100644 protos/service_accounts.proto create mode 100644 test/e2e/service_accounts_test.go create mode 100644 web_src/src/hooks/useServiceAccounts.ts create mode 100644 web_src/src/pages/organization/settings/ServiceAccountDetail.tsx create mode 100644 web_src/src/pages/organization/settings/ServiceAccounts.tsx diff --git a/Makefile b/Makefile index b8e8674250..273a701321 100644 --- a/Makefile +++ b/Makefile @@ -227,8 +227,8 @@ gen.components.local.update: gen.components.docs rm -rf ../docs/src/content/docs/components cp -R docs/components ../docs/src/content/docs/components -MODULES := authorization,organizations,integrations,secrets,users,groups,roles,me,configuration,components,triggers,widgets,blueprints,canvases -REST_API_MODULES := authorization,organizations,integrations,secrets,users,groups,roles,me,configuration,components,triggers,widgets,blueprints,canvases +MODULES := authorization,organizations,integrations,secrets,users,groups,roles,me,configuration,components,triggers,widgets,blueprints,canvases,service_accounts +REST_API_MODULES := authorization,organizations,integrations,secrets,users,groups,roles,me,configuration,components,triggers,widgets,blueprints,canvases,service_accounts pb.gen: docker compose $(DOCKER_COMPOSE_OPTS) run --rm --no-deps app /app/scripts/protoc.sh $(MODULES) docker compose $(DOCKER_COMPOSE_OPTS) run --rm --no-deps app /app/scripts/protoc_gateway.sh $(REST_API_MODULES) diff --git a/api/swagger/superplane.swagger.json b/api/swagger/superplane.swagger.json index cc3be53aed..5237aca017 100644 --- a/api/swagger/superplane.swagger.json +++ b/api/swagger/superplane.swagger.json @@ -45,6 +45,9 @@ }, { "name": "Canvases" + }, + { + "name": "ServiceAccounts" } ], "schemes": [ @@ -2757,6 +2760,202 @@ ] } }, + "/api/v1/service-accounts": { + "get": { + "summary": "List service accounts", + "description": "Returns all service accounts in the organization", + "operationId": "ServiceAccounts_ListServiceAccounts", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/ServiceAccountsListServiceAccountsResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "tags": [ + "ServiceAccounts" + ] + }, + "post": { + "summary": "Create a service account", + "description": "Creates a new service account in the organization", + "operationId": "ServiceAccounts_CreateServiceAccount", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/ServiceAccountsCreateServiceAccountResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ServiceAccountsCreateServiceAccountRequest" + } + } + ], + "tags": [ + "ServiceAccounts" + ] + } + }, + "/api/v1/service-accounts/{id}": { + "get": { + "summary": "Describe a service account", + "description": "Returns details of a specific service account", + "operationId": "ServiceAccounts_DescribeServiceAccount", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/ServiceAccountsDescribeServiceAccountResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ServiceAccounts" + ] + }, + "delete": { + "summary": "Delete a service account", + "description": "Deletes a service account and removes its RBAC policies", + "operationId": "ServiceAccounts_DeleteServiceAccount", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/ServiceAccountsDeleteServiceAccountResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ServiceAccounts" + ] + }, + "patch": { + "summary": "Update a service account", + "description": "Updates the name or description of a service account", + "operationId": "ServiceAccounts_UpdateServiceAccount", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/ServiceAccountsUpdateServiceAccountResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ServiceAccountsUpdateServiceAccountBody" + } + } + ], + "tags": [ + "ServiceAccounts" + ] + } + }, + "/api/v1/service-accounts/{id}/token": { + "post": { + "summary": "Regenerate service account token", + "description": "Regenerates the API token for a service account", + "operationId": "ServiceAccounts_RegenerateServiceAccountToken", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/ServiceAccountsRegenerateServiceAccountTokenResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ServiceAccountsRegenerateServiceAccountTokenBody" + } + } + ], + "tags": [ + "ServiceAccounts" + ] + } + }, "/api/v1/triggers": { "get": { "summary": "List triggers", @@ -2849,6 +3048,12 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "includeServiceAccounts", + "in": "query", + "required": false, + "type": "boolean" } ], "tags": [ @@ -5152,6 +5357,115 @@ } } }, + "ServiceAccountsCreateServiceAccountRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "role": { + "type": "string" + } + } + }, + "ServiceAccountsCreateServiceAccountResponse": { + "type": "object", + "properties": { + "serviceAccount": { + "$ref": "#/definitions/ServiceAccountsServiceAccount" + }, + "token": { + "type": "string" + } + } + }, + "ServiceAccountsDeleteServiceAccountResponse": { + "type": "object" + }, + "ServiceAccountsDescribeServiceAccountResponse": { + "type": "object", + "properties": { + "serviceAccount": { + "$ref": "#/definitions/ServiceAccountsServiceAccount" + } + } + }, + "ServiceAccountsListServiceAccountsResponse": { + "type": "object", + "properties": { + "serviceAccounts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/ServiceAccountsServiceAccount" + } + } + } + }, + "ServiceAccountsRegenerateServiceAccountTokenBody": { + "type": "object" + }, + "ServiceAccountsRegenerateServiceAccountTokenResponse": { + "type": "object", + "properties": { + "token": { + "type": "string" + } + } + }, + "ServiceAccountsServiceAccount": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "organizationId": { + "type": "string" + }, + "createdBy": { + "type": "string" + }, + "hasToken": { + "type": "boolean" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "updatedAt": { + "type": "string", + "format": "date-time" + } + } + }, + "ServiceAccountsUpdateServiceAccountBody": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + } + } + }, + "ServiceAccountsUpdateServiceAccountResponse": { + "type": "object", + "properties": { + "serviceAccount": { + "$ref": "#/definitions/ServiceAccountsServiceAccount" + } + } + }, "SuperplaneBlueprintsOutputChannel": { "type": "object", "properties": { diff --git a/db/migrations/20260216151135_add-service-accounts.down.sql b/db/migrations/20260216151135_add-service-accounts.down.sql new file mode 100644 index 0000000000..e69de29bb2 diff --git a/db/migrations/20260216151135_add-service-accounts.up.sql b/db/migrations/20260216151135_add-service-accounts.up.sql new file mode 100644 index 0000000000..ceafcb333e --- /dev/null +++ b/db/migrations/20260216151135_add-service-accounts.up.sql @@ -0,0 +1,27 @@ +-- Add type column to distinguish human users from service accounts +ALTER TABLE users ADD COLUMN type varchar(50) NOT NULL DEFAULT 'human'; + +-- Add description column for service accounts +ALTER TABLE users ADD COLUMN description text; + +-- Add created_by column to track who created a service account +ALTER TABLE users ADD COLUMN created_by uuid REFERENCES users(id); + +-- Make account_id nullable (service accounts have no account) +ALTER TABLE users ALTER COLUMN account_id DROP NOT NULL; + +-- Make email nullable (service accounts have no email) +ALTER TABLE users ALTER COLUMN email DROP NOT NULL; + +-- Drop the old unique constraint +ALTER TABLE users DROP CONSTRAINT unique_user_in_organization; + +-- Add partial unique index for human users +CREATE UNIQUE INDEX unique_human_user_in_organization + ON users (organization_id, account_id, email) + WHERE type = 'human'; + +-- Add partial unique index for service accounts (unique name per org) +CREATE UNIQUE INDEX unique_service_account_in_organization + ON users (organization_id, name) + WHERE type = 'service_account'; diff --git a/db/structure.sql b/db/structure.sql index 4fbec6df86..5874db3992 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -5,7 +5,7 @@ \restrict abcdef123 -- Dumped from database version 17.5 (Debian 17.5-1.pgdg130+1) --- Dumped by pg_dump version 17.7 (Ubuntu 17.7-3.pgdg22.04+1) +-- Dumped by pg_dump version 17.8 (Ubuntu 17.8-1.pgdg22.04+1) SET statement_timeout = 0; SET lock_timeout = 0; @@ -359,14 +359,17 @@ CREATE TABLE public.secrets ( CREATE TABLE public.users ( id uuid DEFAULT public.uuid_generate_v4() NOT NULL, - account_id uuid NOT NULL, + account_id uuid, name character varying(255), email character varying(255), created_at timestamp without time zone DEFAULT CURRENT_TIMESTAMP, updated_at timestamp without time zone DEFAULT CURRENT_TIMESTAMP, deleted_at timestamp without time zone, organization_id uuid NOT NULL, - token_hash character varying(250) + token_hash character varying(250), + type character varying(50) DEFAULT 'human'::character varying NOT NULL, + description text, + created_by uuid ); @@ -768,14 +771,6 @@ ALTER TABLE ONLY public.secrets ADD CONSTRAINT secrets_pkey PRIMARY KEY (id); --- --- Name: users unique_user_in_organization; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.users - ADD CONSTRAINT unique_user_in_organization UNIQUE (organization_id, account_id, email); - - -- -- Name: group_metadata uq_group_metadata_key; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -1187,6 +1182,20 @@ CREATE INDEX idx_workflows_is_template ON public.workflows USING btree (is_templ CREATE INDEX idx_workflows_organization_id ON public.workflows USING btree (organization_id); +-- +-- Name: unique_human_user_in_organization; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX unique_human_user_in_organization ON public.users USING btree (organization_id, account_id, email) WHERE ((type)::text = 'human'::text); + + +-- +-- Name: unique_service_account_in_organization; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX unique_service_account_in_organization ON public.users USING btree (organization_id, name) WHERE ((type)::text = 'service_account'::text); + + -- -- Name: account_password_auth account_password_auth_account_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - -- @@ -1339,6 +1348,14 @@ ALTER TABLE ONLY public.users ADD CONSTRAINT users_account_id_fkey FOREIGN KEY (account_id) REFERENCES public.accounts(id); +-- +-- Name: users users_created_by_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_created_by_fkey FOREIGN KEY (created_by) REFERENCES public.users(id); + + -- -- Name: users users_organization_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - -- @@ -1496,7 +1513,7 @@ ALTER TABLE ONLY public.workflow_nodes \restrict abcdef123 -- Dumped from database version 17.5 (Debian 17.5-1.pgdg130+1) --- Dumped by pg_dump version 17.7 (Ubuntu 17.7-3.pgdg22.04+1) +-- Dumped by pg_dump version 17.8 (Ubuntu 17.8-1.pgdg22.04+1) SET statement_timeout = 0; SET lock_timeout = 0; @@ -1515,7 +1532,7 @@ SET row_security = off; -- COPY public.schema_migrations (version, dirty) FROM stdin; -20260212033945 f +20260216151135 f \. @@ -1532,7 +1549,7 @@ COPY public.schema_migrations (version, dirty) FROM stdin; \restrict abcdef123 -- Dumped from database version 17.5 (Debian 17.5-1.pgdg130+1) --- Dumped by pg_dump version 17.7 (Ubuntu 17.7-3.pgdg22.04+1) +-- Dumped by pg_dump version 17.8 (Ubuntu 17.8-1.pgdg22.04+1) SET statement_timeout = 0; SET lock_timeout = 0; diff --git a/docs/prd/service-accounts.md b/docs/prd/service-accounts.md new file mode 100644 index 0000000000..80178bf55d --- /dev/null +++ b/docs/prd/service-accounts.md @@ -0,0 +1,252 @@ +# Service Accounts + +## Overview + +Service accounts are non-human identities that provide programmatic access to the SuperPlane API. They enable CI/CD pipelines, automation scripts, external integrations, and machine-to-machine communication without being tied to any individual user's credentials. + +Today, the only way to access the SuperPlane API programmatically is through personal user API tokens. This creates several problems: + +- **Lifecycle coupling**: When a user leaves the organization, their token is revoked, breaking any automation that relied on it. +- **Shared credentials**: Teams often share a single user's token across multiple services, making it impossible to trace which system performed an action. +- **Over-permissioning**: Personal tokens inherit the user's full role, even when the automation only needs a narrow set of permissions. + +Service accounts solve these problems by introducing a first-class, non-human identity that is managed independently of any individual user. + +## Goals + +1. Allow organizations to create, manage, and delete service accounts. +2. Provide API token authentication for service accounts, reusing the existing token mechanism. +3. Integrate with the existing RBAC system so service accounts can be assigned roles and added to groups, just like regular users. +4. Provide a clear audit trail — every API action performed by a service account should be attributable to that specific service account. +5. Expose full management capabilities through the API and the web UI. + +## Non-Goals + +- **OAuth2 client credentials flow**: Service accounts authenticate via API tokens, not OAuth2. An OAuth2 integration may be built on top of this in the future. +- **Cross-organization service accounts**: Each service account is scoped to a single organization, matching the existing tenant isolation model. +- **Multiple tokens per service account**: The initial implementation uses a single token per service account (reusing the existing `token_hash` mechanism). Multi-token support for zero-downtime rotation can be added later. +- **Token expiration**: Tokens do not expire in the initial implementation. Expiration support can be added with multi-token support. + +## Architecture Decision: Unified vs. Separate Tables + +### Options Considered + +**Option A — Separate `service_accounts` table:** Service accounts live in their own table, completely independent of `users`. Both participate in the same Casbin RBAC system as interchangeable principals. Every code path that receives an identity from the request context needs a polymorphic helper to resolve the ID from either table. + +**Option B — Unified `users` table with a `type` column:** Service accounts are rows in the existing `users` table with `type = 'service_account'` and a nullable `account_id`. All existing code that looks up users by ID works unchanged. Only the few places that manage user-type-specific behavior (invitations, profile page, account providers) need guards. + +### Decision: Option B (Unified Table) + +We chose the unified table approach for the following reasons: + +**Developer experience.** With separate tables, every developer writing code that consumes an identity must remember to use a polymorphic helper instead of the direct `FindActiveUserByID` call. If they forget, it works in tests (which use human users) and only breaks when a customer uses a service account — a subtle, hard-to-catch bug class. With a unified table, `FindActiveUserByID` just works for both. + +**Complexity distribution.** Option A distributes complexity across every consumer of identity (any endpoint, any serialization path, any component action). Option B concentrates complexity in a few identity-management spots (creation, profile, invitations). There are far more consumers than producers. + +**Codebase impact analysis.** We audited every call site that would be affected. With Option A, 14+ call sites across auth middleware, gRPC actions, serialization, and component contexts need changes. With Option B, only 3-4 spots that access `AccountID` or user-type-specific features need guards. + +**Industry context.** Some platforms (GCP, Kubernetes, Azure, GitHub) use separate entities. Others (AWS IAM, GitLab, Grafana) use a unified model. Both approaches are proven in production. + +### Trade-offs Accepted + +- `account_id` becomes nullable on the `users` table. Service accounts have no account. +- The `unique_user_in_organization` constraint `(organization_id, account_id, email)` needs adjustment — service accounts don't have an `account_id` or a meaningful email. +- ~3-4 places that access `user.AccountID` need nil checks (primarily `convertUserToProto` in `pkg/grpc/actions/auth/common.go` and invitation flows). +- The `/api/v1/me` endpoints need to handle service account callers (either return a service-account-specific response or block them). + +## Detailed Design + +### Data Model + +Service accounts are stored in the existing `users` table with a `type` column to distinguish them from human users. Service accounts reuse the existing `token_hash` column for API token authentication — no new tables are needed. + +#### Changes to `users` Table + +| Change | Details | +|-----------------------|-----------------------------------------------------------------| +| Add `type` column | `string`, default `'human'`. Values: `'human'`, `'service_account'`. | +| Add `description` column | `text`, nullable. Used by service accounts for description. | +| Make `account_id` nullable | Service accounts have no account. Null for `type = 'service_account'`. | +| Make `email` nullable | Service accounts have no email. Null for `type = 'service_account'`. | +| Add `created_by` column | `uuid`, nullable. FK to `users`. Records who created the service account. Null for human users. | +| Adjust unique constraint | Replace `unique_user_in_organization(organization_id, account_id, email)` with two partial unique indexes: one for human users on `(organization_id, account_id, email) WHERE type = 'human'` and one for service accounts on `(organization_id, name) WHERE type = 'service_account'`. | + +No new tables are required. Service accounts use the existing `token_hash` column on the `users` table, which is the same mechanism human users already use for API tokens. + +### Authentication + +Service account tokens authenticate exactly like existing user API tokens — via the `Authorization: Bearer ` header. Because service accounts are rows in the `users` table and use the same `token_hash` column, **no changes to the authentication middleware are required.** + +The existing flow already works: + +1. Extract the Bearer token from the `Authorization` header. +2. Hash the token with SHA-256. +3. Look up the hash in `users.token_hash` via `FindActiveUserByTokenHash`. +4. Set request context with the `*models.User`. + +Since service accounts are `users` rows, step 3 matches them automatically. The gateway handler (`grpcGatewayHandler`) and authorization interceptor work without modification. The `x-user-id` and `x-organization-id` metadata is set from `user.ID` and `user.OrganizationID` as usual. + +### Authorization + +Service accounts participate in the existing RBAC system as first-class principals: + +- **Role assignment**: An org admin can assign any role to a service account (e.g., `org_viewer`, `org_admin`, or a custom role). +- **Group membership**: A service account can be added to groups, inheriting the group's role. +- **Permission enforcement**: The authorization interceptor does not need changes. It checks permissions based on the user ID in the context, which is the service account's user ID. + +**Restriction**: Service accounts cannot be assigned the `org_owner` role. Ownership is reserved for human users. + +### Codebase Impact + +The following areas require changes to support service accounts. Because we chose the unified table approach, most existing code works unchanged. + +#### No Changes Required + +These work automatically because service accounts are rows in the `users` table: + +- **Auth middleware** (`pkg/public/middleware/auth.go`) — `FindActiveUserByTokenHash` already matches service account tokens since they use the same `token_hash` column. +- **Authorization interceptor** (`pkg/authorization/interceptor.go`) — already uses opaque user ID strings. +- **Casbin RBAC** (`pkg/authorization/service.go`) — role assignment, permission checks, group membership. +- **Gateway handler** (`pkg/public/server.go` `grpcGatewayHandler`) — reads `user.ID` and `user.OrganizationID`. +- **`AuthContext`** (`pkg/workers/contexts/auth_context.go`) — holds `*models.User`, works for both types. +- **Component runtime** (approval, wait, time gate) — uses `core.User{ID, Name, Email}` from `AuthContext`. +- **Canvas/Blueprint `created_by`** — stores a user UUID, serialization resolves it via `FindMaybeDeletedUserByID`, which works. +- **Execution `cancelled_by`** — stores a user UUID, batch-resolved via `FindMaybeDeletedUsersByIDs`, which works. +- **Secret `created_by`** — stores the user ID string directly, works. +- **Trigger/execution actions** (`InvokeNodeTriggerAction`, `InvokeNodeExecutionAction`, `CancelExecution`) — call `FindActiveUserByID`, which works. + +#### Changes Required + +| Area | File(s) | Change | +|------|---------|--------| +| **User serialization** | `pkg/grpc/actions/auth/common.go` | `convertUserToProto` calls `FindAccountByID(user.AccountID)` — needs nil check for service accounts (no account). Return a simplified proto with no account providers. | +| **User listing** | `pkg/grpc/actions/auth/common.go` | `GetUsersWithRolesInDomain` / `ListUsers` — decide whether service accounts appear in the user list or need a separate listing. | +| **`/api/v1/me`** | `pkg/grpc/actions/me/get_user.go` | Return a response that works for both types. | +| **`/api/v1/me/token`** | `pkg/grpc/actions/me/regenerate_token.go` | Block for service accounts — they manage tokens via the service account token endpoints. | +| **Invitation flow** | `pkg/grpc/actions/organizations/create_invitation.go` | No change needed — invitations work by email, and service accounts have no email. Naturally excluded. | +| **Assign role** | `pkg/grpc/actions/auth/assign_role.go` | `FindUser` resolves by ID or email. Works for service accounts (by ID). Add guard to prevent `org_owner` assignment. | +| **Delete organization** | `pkg/grpc/actions/organizations/delete_organization.go` | No change needed — uses user ID for logging only. | + +### API Design + +All service account endpoints are organization-scoped and follow the existing API patterns. + +#### gRPC Service Definition + +```protobuf +service ServiceAccounts { + rpc CreateServiceAccount(CreateServiceAccountRequest) returns (CreateServiceAccountResponse); + rpc ListServiceAccounts(ListServiceAccountsRequest) returns (ListServiceAccountsResponse); + rpc DescribeServiceAccount(DescribeServiceAccountRequest) returns (DescribeServiceAccountResponse); + rpc UpdateServiceAccount(UpdateServiceAccountRequest) returns (UpdateServiceAccountResponse); + rpc DeleteServiceAccount(DeleteServiceAccountRequest) returns (DeleteServiceAccountResponse); + rpc RegenerateServiceAccountToken(RegenerateServiceAccountTokenRequest) returns (RegenerateServiceAccountTokenResponse); +} +``` + +#### REST Endpoints (via gRPC-Gateway) + +| Method | Path | Description | +|----------|--------------------------------------------------------------|------------------------------------------| +| `POST` | `/api/v1/service-accounts` | Create a service account. | +| `GET` | `/api/v1/service-accounts` | List service accounts in the org. | +| `GET` | `/api/v1/service-accounts/{id}` | Get service account details. | +| `PATCH` | `/api/v1/service-accounts/{id}` | Update name/description. | +| `DELETE` | `/api/v1/service-accounts/{id}` | Delete a service account. | +| `POST` | `/api/v1/service-accounts/{id}/token` | Regenerate the service account's token. | + +#### Authorization Rules + +| Endpoint | Required Permission | +|-----------------------------------|------------------------------| +| `CreateServiceAccount` | `service_accounts:create` | +| `ListServiceAccounts` | `service_accounts:read` | +| `DescribeServiceAccount` | `service_accounts:read` | +| `UpdateServiceAccount` | `service_accounts:update` | +| `DeleteServiceAccount` | `service_accounts:delete` | +| `RegenerateServiceAccountToken` | `service_accounts:update` | + +These permissions should be added to the `org_admin` and `org_owner` roles. The `org_viewer` role gets `service_accounts:read` only. + +### Token Management + +Service accounts use the same single-token mechanism as human users (the `token_hash` column on the `users` table). Token management works identically to the existing `RegenerateToken` endpoint for human users: + +- A token is generated as a cryptographically random string using the existing `crypto.Base64String` function. +- The hash is stored in `users.token_hash` using the existing `crypto.HashToken` function. +- The raw token is returned **only once** at creation time. After that, only the hash is stored. +- Regenerating a token replaces the previous one immediately. + +**Future enhancement**: Multi-token support (via a dedicated `service_account_tokens` table) can be added later to enable zero-downtime rotation, token expiration, named tokens, and per-token usage tracking. + +### Web UI + +The service accounts management UI should be accessible under **Organization Settings > Service Accounts**. + +#### List View + +- Table showing: name, description, role, has token, creation date, created by. +- Actions: create new, view details, delete. + +#### Detail View + +- Service account metadata (name, description, role). +- Edit name and description inline. +- Token section: + - Shows whether a token exists. + - "Regenerate Token" button that shows the raw token once with a copy button and a warning that it won't be shown again. +- Role assignment section: + - Current role displayed with option to change. +- Group membership section: + - List of groups the service account belongs to with option to add/remove. + +## Implementation Plan + +### Phase 1: Schema Migration + +1. Add `type` column (`string`, default `'human'`) to `users` table. +2. Add `description` column (`text`, nullable) to `users` table. +3. Add `created_by` column (`uuid`, nullable) to `users` table. +4. Make `account_id` nullable on `users` table. +5. Make `email` nullable on `users` table. +6. Replace `unique_user_in_organization` with partial unique indexes. +7. Run migration against dev and test databases. + +### Phase 2: Core Backend + +1. Update `models.User` struct to include `Type`, `Description`, and `CreatedBy` fields. +2. Add `UserTypeHuman` and `UserTypeServiceAccount` constants to `pkg/models/constants.go`. +3. Update `convertUserToProto` in `pkg/grpc/actions/auth/common.go` to handle nullable `AccountID`. +4. Define protobuf service in `protos/service_accounts.proto`. +5. Implement gRPC actions in `pkg/grpc/actions/service_accounts/`. +6. Add RBAC permissions (`service_accounts:create/read/update/delete`) to the organization policy templates. +7. Add authorization rules to the interceptor in `pkg/authorization/interceptor.go`. +8. Guard `/api/v1/me/token` endpoint against service account callers. + +### Phase 3: API Integration + +1. Register gRPC-Gateway routes in `pkg/public/server.go`. +2. Regenerate protobuf, OpenAPI spec, and SDK clients. +3. Add E2E tests for all service account CRUD operations and token authentication. + +### Phase 4: Web UI + +1. Add "Service Accounts" page under Organization Settings. +2. Implement list view with create/delete actions. +3. Implement detail view with token management. +4. Implement role and group management for service accounts. + +## Security Considerations + +- **Token storage**: Raw tokens are never stored. Only SHA-256 hashes are persisted (same as human user tokens). +- **Token display**: The raw token is shown exactly once at creation/regeneration time. It cannot be retrieved afterwards. +- **Deletion cascade**: Deleting a service account clears its token hash and removes all RBAC policies associated with it. +- **No owner role**: Service accounts cannot be assigned the `org_owner` role to prevent privilege escalation through non-human identities. +- **Rate limiting**: Service account token authentication follows the same rate-limiting rules as user token authentication (same code path). + +## Decisions + +- **Data model**: Unified `users` table with a `type` column (see Architecture Decision section above). +- **Token model**: Single token per service account, reusing the existing `token_hash` column. Multi-token support is a future enhancement. +- **Service account quotas**: 100 service accounts per organization. +- **Impersonation**: Not supported. Service accounts cannot be impersonated through the UI. diff --git a/pkg/authentication/authentication_test.go b/pkg/authentication/authentication_test.go index f918e50725..f9c0cfeab8 100644 --- a/pkg/authentication/authentication_test.go +++ b/pkg/authentication/authentication_test.go @@ -43,8 +43,8 @@ func TestHandler_findOrCreateAccountForProvider(t *testing.T) { user := &models.User{ OrganizationID: r.Organization.ID, - AccountID: account.ID, - Email: originalEmail, + AccountID: &account.ID, + Email: &originalEmail, Name: account.Name, } err = database.Conn().Create(user).Error @@ -83,7 +83,7 @@ func TestHandler_findOrCreateAccountForProvider(t *testing.T) { var userFromDB models.User err = database.Conn().Where("id = ?", user.ID).First(&userFromDB).Error require.NoError(t, err) - assert.Equal(t, newEmail, userFromDB.Email) + assert.Equal(t, newEmail, userFromDB.GetEmail()) var providerFromDB models.AccountProvider err = database.Conn().Where("id = ?", provider.ID).First(&providerFromDB).Error diff --git a/pkg/authorization/interceptor.go b/pkg/authorization/interceptor.go index 1cfc098ef3..231937b0d3 100644 --- a/pkg/authorization/interceptor.go +++ b/pkg/authorization/interceptor.go @@ -11,6 +11,7 @@ import ( pbOrganization "github.com/superplanehq/superplane/pkg/protos/organizations" pbRoles "github.com/superplanehq/superplane/pkg/protos/roles" pbSecrets "github.com/superplanehq/superplane/pkg/protos/secrets" + pbServiceAccounts "github.com/superplanehq/superplane/pkg/protos/service_accounts" pbUsers "github.com/superplanehq/superplane/pkg/protos/users" "google.golang.org/grpc" "google.golang.org/grpc/codes" @@ -114,6 +115,14 @@ func NewAuthorizationInterceptor(authService Authorization) *AuthorizationInterc pbCanvases.Canvases_InvokeNodeTriggerAction_FullMethodName: {Resource: "canvases", Action: "update", DomainType: models.DomainTypeOrganization}, pbCanvases.Canvases_ListNodeEvents_FullMethodName: {Resource: "canvases", Action: "read", DomainType: models.DomainTypeOrganization}, pbCanvases.Canvases_EmitNodeEvent_FullMethodName: {Resource: "canvases", Action: "update", DomainType: models.DomainTypeOrganization}, + + // Service Accounts rules + pbServiceAccounts.ServiceAccounts_CreateServiceAccount_FullMethodName: {Resource: "service_accounts", Action: "create", DomainType: models.DomainTypeOrganization}, + pbServiceAccounts.ServiceAccounts_ListServiceAccounts_FullMethodName: {Resource: "service_accounts", Action: "read", DomainType: models.DomainTypeOrganization}, + pbServiceAccounts.ServiceAccounts_DescribeServiceAccount_FullMethodName: {Resource: "service_accounts", Action: "read", DomainType: models.DomainTypeOrganization}, + pbServiceAccounts.ServiceAccounts_UpdateServiceAccount_FullMethodName: {Resource: "service_accounts", Action: "update", DomainType: models.DomainTypeOrganization}, + pbServiceAccounts.ServiceAccounts_DeleteServiceAccount_FullMethodName: {Resource: "service_accounts", Action: "delete", DomainType: models.DomainTypeOrganization}, + pbServiceAccounts.ServiceAccounts_RegenerateServiceAccountToken_FullMethodName: {Resource: "service_accounts", Action: "update", DomainType: models.DomainTypeOrganization}, } return &AuthorizationInterceptor{ diff --git a/pkg/grpc/actions/auth/add_user_to_group_test.go b/pkg/grpc/actions/auth/add_user_to_group_test.go index e4cdd6e0f2..50f20b2825 100644 --- a/pkg/grpc/actions/auth/add_user_to_group_test.go +++ b/pkg/grpc/actions/auth/add_user_to_group_test.go @@ -31,19 +31,19 @@ func Test_AddUserToGroup(t *testing.T) { response, err := ListGroupUsers(context.Background(), models.DomainTypeOrganization, orgID, groupName, r.AuthService) require.NoError(t, err) assert.True(t, slices.ContainsFunc(response.Users, func(user *users.User) bool { - return user.Metadata.Id == newUser.ID.String() && user.Metadata.Email == newUser.Email + return user.Metadata.Id == newUser.ID.String() && user.Metadata.Email == newUser.GetEmail() })) }) t.Run("add user to organization group with email", func(t *testing.T) { newUser := support.CreateUser(t, r, r.Organization.ID) - _, err := AddUserToGroup(ctx, orgID, models.DomainTypeOrganization, orgID, "", newUser.Email, groupName, r.AuthService) + _, err := AddUserToGroup(ctx, orgID, models.DomainTypeOrganization, orgID, "", newUser.GetEmail(), groupName, r.AuthService) require.NoError(t, err) response, err := ListGroupUsers(context.Background(), models.DomainTypeOrganization, orgID, groupName, r.AuthService) require.NoError(t, err) assert.True(t, slices.ContainsFunc(response.Users, func(user *users.User) bool { - return user.Metadata.Id == newUser.ID.String() && user.Metadata.Email == newUser.Email + return user.Metadata.Id == newUser.ID.String() && user.Metadata.Email == newUser.GetEmail() })) }) diff --git a/pkg/grpc/actions/auth/assign_role.go b/pkg/grpc/actions/auth/assign_role.go index 90b13a7633..bfb9a0e8a5 100644 --- a/pkg/grpc/actions/auth/assign_role.go +++ b/pkg/grpc/actions/auth/assign_role.go @@ -6,6 +6,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/superplanehq/superplane/pkg/authentication" "github.com/superplanehq/superplane/pkg/authorization" + "github.com/superplanehq/superplane/pkg/models" pb "github.com/superplanehq/superplane/pkg/protos/roles" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" @@ -30,6 +31,10 @@ func AssignRole(ctx context.Context, orgID, domainType, domainID, roleName, user return nil, status.Error(codes.PermissionDenied, "cannot change your own role") } + if user.IsServiceAccount() && roleName == models.RoleOrgOwner { + return nil, status.Error(codes.InvalidArgument, "service accounts cannot be assigned the org_owner role") + } + err = authService.AssignRole(user.ID.String(), roleName, domainID, domainType) if err != nil { log.Errorf("Error assigning role %s to %s: %v", roleName, user.ID.String(), err) diff --git a/pkg/grpc/actions/auth/assign_role_test.go b/pkg/grpc/actions/auth/assign_role_test.go index 53d7682ad9..1b14486865 100644 --- a/pkg/grpc/actions/auth/assign_role_test.go +++ b/pkg/grpc/actions/auth/assign_role_test.go @@ -44,7 +44,7 @@ func Test_AssignRole(t *testing.T) { t.Run("assign role with user email", func(t *testing.T) { newUser := support.CreateUser(t, r, r.Organization.ID) - resp, err := AssignRole(ctx, orgID, models.DomainTypeOrganization, orgID, models.RoleOrgAdmin, "", newUser.Email, r.AuthService) + resp, err := AssignRole(ctx, orgID, models.DomainTypeOrganization, orgID, models.RoleOrgAdmin, "", newUser.GetEmail(), r.AuthService) require.NoError(t, err) assert.NotNil(t, resp) }) diff --git a/pkg/grpc/actions/auth/common.go b/pkg/grpc/actions/auth/common.go index 5700784287..bfde9b84aa 100644 --- a/pkg/grpc/actions/auth/common.go +++ b/pkg/grpc/actions/auth/common.go @@ -94,7 +94,7 @@ func FindUser(org, id, email string) (*models.User, error) { return models.FindActiveUserByEmail(orgID.String(), email) } -func GetUsersWithRolesInDomain(domainID, domainType string, authService authorization.Authorization) ([]*pbUsers.User, error) { +func GetUsersWithRolesInDomain(domainID, domainType string, includeServiceAccounts bool, authService authorization.Authorization) ([]*pbUsers.User, error) { if domainType != models.DomainTypeOrganization { return nil, status.Error(codes.InvalidArgument, "domain type must be organization") } @@ -139,9 +139,25 @@ func GetUsersWithRolesInDomain(domainID, domainType string, authService authoriz } } + userIDs := make([]string, 0, len(userRoleMap)) + for userID := range userRoleMap { + userIDs = append(userIDs, userID) + } + + var dbUsers []models.User + if includeServiceAccounts { + dbUsers, err = models.FindUsersByIDs(userIDs) + } else { + dbUsers, err = models.FindHumanUsersByIDs(userIDs) + } + if err != nil { + return nil, status.Error(codes.Internal, "failed to fetch users") + } + var users []*pbUsers.User - for userID, roleAssignments := range userRoleMap { - user, err := convertUserToProto(userID, roleAssignments) + for i := range dbUsers { + roleAssignments := userRoleMap[dbUsers[i].ID.String()] + user, err := convertUserToProto(&dbUsers[i], roleAssignments) if err != nil { continue } @@ -151,39 +167,34 @@ func GetUsersWithRolesInDomain(domainID, domainType string, authService authoriz return users, nil } -func convertUserToProto(userID string, roleAssignments []*pbUsers.UserRoleAssignment) (*pbUsers.User, error) { - dbUser, err := models.FindUnscopedUserByID(userID) - if err != nil { - return nil, err - } - - account, err := models.FindAccountByID(dbUser.AccountID.String()) - if err != nil { - return nil, err - } - - providers, err := account.GetAccountProviders() - if err != nil { - return nil, err - } - - pbAccountProviders := make([]*pbUsers.AccountProvider, len(providers)) - for i, provider := range providers { - pbAccountProviders[i] = &pb.AccountProvider{ - ProviderType: provider.Provider, - ProviderId: provider.ProviderID, - Email: provider.Email, - DisplayName: provider.Name, - AvatarUrl: provider.AvatarURL, - CreatedAt: timestamppb.New(provider.CreatedAt), - UpdatedAt: timestamppb.New(provider.UpdatedAt), +func convertUserToProto(dbUser *models.User, roleAssignments []*pbUsers.UserRoleAssignment) (*pbUsers.User, error) { + var pbAccountProviders []*pbUsers.AccountProvider + + if dbUser.AccountID != nil { + account, err := models.FindAccountByID(dbUser.AccountID.String()) + if err == nil { + providers, err := account.GetAccountProviders() + if err == nil { + pbAccountProviders = make([]*pbUsers.AccountProvider, len(providers)) + for i, provider := range providers { + pbAccountProviders[i] = &pb.AccountProvider{ + ProviderType: provider.Provider, + ProviderId: provider.ProviderID, + Email: provider.Email, + DisplayName: provider.Name, + AvatarUrl: provider.AvatarURL, + CreatedAt: timestamppb.New(provider.CreatedAt), + UpdatedAt: timestamppb.New(provider.UpdatedAt), + } + } + } } } return &pb.User{ Metadata: &pb.User_Metadata{ - Id: userID, - Email: dbUser.Email, + Id: dbUser.ID.String(), + Email: dbUser.GetEmail(), CreatedAt: timestamppb.New(dbUser.CreatedAt), UpdatedAt: timestamppb.New(dbUser.UpdatedAt), }, diff --git a/pkg/grpc/actions/auth/delete_role_test.go b/pkg/grpc/actions/auth/delete_role_test.go index 8ae9547fcf..e6b44c6f4d 100644 --- a/pkg/grpc/actions/auth/delete_role_test.go +++ b/pkg/grpc/actions/auth/delete_role_test.go @@ -131,7 +131,7 @@ func Test_DeleteRole(t *testing.T) { account, err := models.CreateAccount("only-role-user", "only-role-user@test.com") require.NoError(t, err) - user, err := models.CreateUser(r.Organization.ID, account.ID, account.Name, account.Email) + user, err := models.CreateUser(r.Organization.ID, account.ID, account.Email, account.Name) require.NoError(t, err) err = r.AuthService.AssignRole(user.ID.String(), "test-role-only-users", orgID, models.DomainTypeOrganization) diff --git a/pkg/grpc/actions/auth/describe_role_test.go b/pkg/grpc/actions/auth/describe_role_test.go index bc39e7ba06..30b602271a 100644 --- a/pkg/grpc/actions/auth/describe_role_test.go +++ b/pkg/grpc/actions/auth/describe_role_test.go @@ -22,8 +22,8 @@ func Test_DescribeRole(t *testing.T) { assert.NotNil(t, resp.Role.Spec.InheritedRole) assert.Equal(t, models.RoleOrgAdmin, resp.Role.Metadata.Name) assert.Equal(t, models.RoleOrgViewer, resp.Role.Spec.InheritedRole.Metadata.Name) - assert.Len(t, resp.Role.Spec.Permissions, 29) - assert.Len(t, resp.Role.Spec.InheritedRole.Spec.Permissions, 6) + assert.Len(t, resp.Role.Spec.Permissions, 33) + assert.Len(t, resp.Role.Spec.InheritedRole.Spec.Permissions, 7) assert.Equal(t, "Admin", resp.Role.Spec.DisplayName) assert.Equal(t, "Viewer", resp.Role.Spec.InheritedRole.Spec.DisplayName) assert.Contains(t, resp.Role.Spec.Description, "Can manage canvases, users, groups, and roles") diff --git a/pkg/grpc/actions/auth/list_group_users.go b/pkg/grpc/actions/auth/list_group_users.go index f5bb497966..2270cc1c94 100644 --- a/pkg/grpc/actions/auth/list_group_users.go +++ b/pkg/grpc/actions/auth/list_group_users.go @@ -38,18 +38,23 @@ func ListGroupUsers(ctx context.Context, domainType, domainID, groupName string, roleMetadata := roleMetadataMap[role] - var users []*pbUsers.User - for _, userID := range userIDs { - roleAssignment := &pbUsers.UserRoleAssignment{ - RoleName: role, - RoleDisplayName: roleMetadata.DisplayName, - RoleDescription: roleMetadata.Description, - DomainType: actions.DomainTypeToProto(domainType), - DomainId: domainID, - AssignedAt: timestamppb.Now(), - } + dbUsers, err := models.FindUsersByIDs(userIDs) + if err != nil { + return nil, status.Error(codes.Internal, "failed to fetch group users") + } - user, err := convertUserToProto(userID, []*pbUsers.UserRoleAssignment{roleAssignment}) + roleAssignment := &pbUsers.UserRoleAssignment{ + RoleName: role, + RoleDisplayName: roleMetadata.DisplayName, + RoleDescription: roleMetadata.Description, + DomainType: actions.DomainTypeToProto(domainType), + DomainId: domainID, + AssignedAt: timestamppb.Now(), + } + + var users []*pbUsers.User + for i := range dbUsers { + user, err := convertUserToProto(&dbUsers[i], []*pbUsers.UserRoleAssignment{roleAssignment}) if err != nil { continue } diff --git a/pkg/grpc/actions/auth/list_users.go b/pkg/grpc/actions/auth/list_users.go index dcb3a6b44c..7b0d2df6ac 100644 --- a/pkg/grpc/actions/auth/list_users.go +++ b/pkg/grpc/actions/auth/list_users.go @@ -9,8 +9,8 @@ import ( "google.golang.org/grpc/status" ) -func ListUsers(ctx context.Context, domainType string, domainID string, authService authorization.Authorization) (*pb.ListUsersResponse, error) { - users, err := GetUsersWithRolesInDomain(domainID, domainType, authService) +func ListUsers(ctx context.Context, domainType string, domainID string, includeServiceAccounts bool, authService authorization.Authorization) (*pb.ListUsersResponse, error) { + users, err := GetUsersWithRolesInDomain(domainID, domainType, includeServiceAccounts, authService) if err != nil { return nil, status.Error(codes.Internal, "failed to get canvas users") } diff --git a/pkg/grpc/actions/auth/list_users_test.go b/pkg/grpc/actions/auth/list_users_test.go index 138ef95d2c..6352fc737d 100644 --- a/pkg/grpc/actions/auth/list_users_test.go +++ b/pkg/grpc/actions/auth/list_users_test.go @@ -14,7 +14,7 @@ import ( func Test__ListUsers(t *testing.T) { r := support.Setup(t) - resp, err := ListUsers(context.Background(), models.DomainTypeOrganization, r.Organization.ID.String(), r.AuthService) + resp, err := ListUsers(context.Background(), models.DomainTypeOrganization, r.Organization.ID.String(), false, r.AuthService) require.NoError(t, err) require.NotNil(t, resp) assert.Len(t, resp.Users, 1) diff --git a/pkg/grpc/actions/auth/remove_user_from_group_test.go b/pkg/grpc/actions/auth/remove_user_from_group_test.go index 1bad2b76b1..248c612935 100644 --- a/pkg/grpc/actions/auth/remove_user_from_group_test.go +++ b/pkg/grpc/actions/auth/remove_user_from_group_test.go @@ -35,7 +35,7 @@ func Test_RemoveUserFromGroup(t *testing.T) { t.Run("remove user from group with user email", func(t *testing.T) { require.NoError(t, r.AuthService.AddUserToGroup(orgID, models.DomainTypeOrganization, newUser.ID.String(), groupName)) - _, err := RemoveUserFromGroup(ctx, orgID, models.DomainTypeOrganization, orgID, "", newUser.Email, groupName, r.AuthService) + _, err := RemoveUserFromGroup(ctx, orgID, models.DomainTypeOrganization, orgID, "", newUser.GetEmail(), groupName, r.AuthService) require.NoError(t, err) }) diff --git a/pkg/grpc/actions/me/get_user.go b/pkg/grpc/actions/me/get_user.go index 8678045ca8..7d0732196c 100644 --- a/pkg/grpc/actions/me/get_user.go +++ b/pkg/grpc/actions/me/get_user.go @@ -29,7 +29,7 @@ func GetUser(ctx context.Context) (*pb.User, error) { return &pb.User{ Id: user.ID.String(), - Email: user.Email, + Email: user.GetEmail(), OrganizationId: orgID, CreatedAt: timestamppb.New(user.CreatedAt), HasToken: user.TokenHash != "", diff --git a/pkg/grpc/actions/me/regenerate_token.go b/pkg/grpc/actions/me/regenerate_token.go index b7bec17f78..e552066901 100644 --- a/pkg/grpc/actions/me/regenerate_token.go +++ b/pkg/grpc/actions/me/regenerate_token.go @@ -27,6 +27,10 @@ func RegenerateToken(ctx context.Context) (*pb.RegenerateTokenResponse, error) { return nil, status.Error(codes.NotFound, "user not found") } + if user.IsServiceAccount() { + return nil, status.Error(codes.PermissionDenied, "service accounts must use the service account token endpoint") + } + plainToken, err := crypto.Base64String(64) if err != nil { return nil, status.Error(codes.Internal, "failed to generate new token") diff --git a/pkg/grpc/actions/organizations/create_invitation.go b/pkg/grpc/actions/organizations/create_invitation.go index dfda09587f..95903f1bb6 100644 --- a/pkg/grpc/actions/organizations/create_invitation.go +++ b/pkg/grpc/actions/organizations/create_invitation.go @@ -47,12 +47,12 @@ func CreateInvitation(ctx context.Context, authService authorization.Authorizati func handleExistingUser(authService authorization.Authorization, authenticatedUserID, orgID uuid.UUID, user *models.User) (*pb.CreateInvitationResponse, error) { if !user.DeletedAt.Valid { - return nil, status.Errorf(codes.InvalidArgument, "user %s is already an active member of organization", user.Email) + return nil, status.Errorf(codes.InvalidArgument, "user %s is already an active member of organization", user.GetEmail()) } var invitation *models.OrganizationInvitation err := database.Conn().Transaction(func(tx *gorm.DB) error { - i, err := models.CreateInvitationInTransaction(tx, orgID, authenticatedUserID, user.Email, models.InvitationStateAccepted) + i, err := models.CreateInvitationInTransaction(tx, orgID, authenticatedUserID, user.GetEmail(), models.InvitationStateAccepted) if err != nil { return status.Errorf(codes.InvalidArgument, "Failed to create invitation: %v", err) } diff --git a/pkg/grpc/actions/organizations/create_invitation_test.go b/pkg/grpc/actions/organizations/create_invitation_test.go index 33144cbfd0..01d716ad92 100644 --- a/pkg/grpc/actions/organizations/create_invitation_test.go +++ b/pkg/grpc/actions/organizations/create_invitation_test.go @@ -131,8 +131,8 @@ func Test__CreateInvitation(t *testing.T) { // user, err := models.FindActiveUserByEmail(r.Organization.ID.String(), account.Email) require.NoError(t, err) - assert.Equal(t, account.ID, user.AccountID) - assert.Equal(t, account.Email, user.Email) + assert.Equal(t, &account.ID, user.AccountID) + assert.Equal(t, account.Email, user.GetEmail()) assert.Equal(t, account.Name, user.Name) assert.Equal(t, r.Organization.ID, user.OrganizationID) diff --git a/pkg/grpc/actions/organizations/remove_user_test.go b/pkg/grpc/actions/organizations/remove_user_test.go index e6e8177d24..95b8cf8052 100644 --- a/pkg/grpc/actions/organizations/remove_user_test.go +++ b/pkg/grpc/actions/organizations/remove_user_test.go @@ -55,7 +55,7 @@ func Test_RemoveUser(t *testing.T) { require.NotNil(t, user.DeletedAt) _, err = models.FindActiveUserByID(orgID, newUser.ID.String()) require.ErrorIs(t, err, gorm.ErrRecordNotFound) - _, err = models.FindActiveUserByEmail(orgID, newUser.Email) + _, err = models.FindActiveUserByEmail(orgID, newUser.GetEmail()) require.ErrorIs(t, err, gorm.ErrRecordNotFound) _, err = models.FindActiveUserByTokenHash(newUser.TokenHash) require.ErrorIs(t, err, gorm.ErrRecordNotFound) diff --git a/pkg/grpc/actions/serviceaccounts/common.go b/pkg/grpc/actions/serviceaccounts/common.go new file mode 100644 index 0000000000..87de902845 --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/common.go @@ -0,0 +1,28 @@ +package serviceaccounts + +import ( + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/protobuf/types/known/timestamppb" +) + +func serializeServiceAccount(user *models.User) *pb.ServiceAccount { + sa := &pb.ServiceAccount{ + Id: user.ID.String(), + Name: user.Name, + OrganizationId: user.OrganizationID.String(), + HasToken: user.TokenHash != "", + CreatedAt: timestamppb.New(user.CreatedAt), + UpdatedAt: timestamppb.New(user.UpdatedAt), + } + + if user.Description != nil { + sa.Description = *user.Description + } + + if user.CreatedBy != nil { + sa.CreatedBy = user.CreatedBy.String() + } + + return sa +} diff --git a/pkg/grpc/actions/serviceaccounts/create_service_account.go b/pkg/grpc/actions/serviceaccounts/create_service_account.go new file mode 100644 index 0000000000..624165520f --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/create_service_account.go @@ -0,0 +1,93 @@ +package serviceaccounts + +import ( + "context" + + "github.com/google/uuid" + "github.com/superplanehq/superplane/pkg/authentication" + "github.com/superplanehq/superplane/pkg/authorization" + "github.com/superplanehq/superplane/pkg/crypto" + "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "gorm.io/gorm" +) + +func CreateServiceAccount(ctx context.Context, req *pb.CreateServiceAccountRequest, authService authorization.Authorization) (*pb.CreateServiceAccountResponse, error) { + userID, userIsSet := authentication.GetUserIdFromMetadata(ctx) + if !userIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + orgID, orgIsSet := authentication.GetOrganizationIdFromMetadata(ctx) + if !orgIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + if req.Name == "" { + return nil, status.Error(codes.InvalidArgument, "name is required") + } + + validRoles := map[string]bool{ + models.RoleOrgAdmin: true, + models.RoleOrgViewer: true, + } + + if req.Role == "" { + return nil, status.Error(codes.InvalidArgument, "role is required") + } + + if !validRoles[req.Role] { + return nil, status.Error(codes.InvalidArgument, "invalid role for service account; must be org_admin or org_viewer") + } + + orgUUID, err := uuid.Parse(orgID) + if err != nil { + return nil, status.Error(codes.InvalidArgument, "invalid organization ID") + } + + createdByUUID, err := uuid.Parse(userID) + if err != nil { + return nil, status.Error(codes.InvalidArgument, "invalid user ID") + } + + var description *string + if req.Description != "" { + description = &req.Description + } + + plainToken, err := crypto.Base64String(64) + if err != nil { + return nil, status.Error(codes.Internal, "failed to generate token") + } + + var sa *models.User + err = database.Conn().Transaction(func(tx *gorm.DB) error { + var txErr error + sa, txErr = models.CreateServiceAccount(tx, orgUUID, req.Name, description, createdByUUID) + if txErr != nil { + return txErr + } + + sa.TokenHash = crypto.HashToken(plainToken) + sa.UpdatedAt = sa.CreatedAt + txErr = tx.Save(sa).Error + if txErr != nil { + return txErr + } + + txErr = authService.AssignRole(sa.ID.String(), req.Role, orgID, models.DomainTypeOrganization) + return txErr + }) + + if err != nil { + return nil, status.Errorf(codes.Internal, "failed to create service account: %v", err) + } + + return &pb.CreateServiceAccountResponse{ + ServiceAccount: serializeServiceAccount(sa), + Token: plainToken, + }, nil +} diff --git a/pkg/grpc/actions/serviceaccounts/delete_service_account.go b/pkg/grpc/actions/serviceaccounts/delete_service_account.go new file mode 100644 index 0000000000..43250fd552 --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/delete_service_account.go @@ -0,0 +1,58 @@ +package serviceaccounts + +import ( + "context" + + log "github.com/sirupsen/logrus" + "github.com/superplanehq/superplane/pkg/authentication" + "github.com/superplanehq/superplane/pkg/authorization" + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func DeleteServiceAccount(ctx context.Context, req *pb.DeleteServiceAccountRequest, authService authorization.Authorization) (*pb.DeleteServiceAccountResponse, error) { + _, userIsSet := authentication.GetUserIdFromMetadata(ctx) + if !userIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + orgID, orgIsSet := authentication.GetOrganizationIdFromMetadata(ctx) + if !orgIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + if req.Id == "" { + return nil, status.Error(codes.InvalidArgument, "id is required") + } + + user, err := models.FindActiveUserByID(orgID, req.Id) + if err != nil { + return nil, status.Error(codes.NotFound, "service account not found") + } + + if !user.IsServiceAccount() { + return nil, status.Error(codes.NotFound, "service account not found") + } + + // Remove all RBAC roles before deleting + roles, err := authService.GetUserRolesForOrg(user.ID.String(), orgID) + if err != nil { + log.Errorf("Error determining roles for service account %s: %v", user.ID, err) + } else { + for _, role := range roles { + err = authService.RemoveRole(user.ID.String(), role.Name, orgID, models.DomainTypeOrganization) + if err != nil { + log.Errorf("Error removing role %s for service account %s: %v", role.Name, user.ID, err) + } + } + } + + err = user.Delete() + if err != nil { + return nil, status.Error(codes.Internal, "failed to delete service account") + } + + return &pb.DeleteServiceAccountResponse{}, nil +} diff --git a/pkg/grpc/actions/serviceaccounts/describe_service_account.go b/pkg/grpc/actions/serviceaccounts/describe_service_account.go new file mode 100644 index 0000000000..3e3586f289 --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/describe_service_account.go @@ -0,0 +1,40 @@ +package serviceaccounts + +import ( + "context" + + "github.com/superplanehq/superplane/pkg/authentication" + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func DescribeServiceAccount(ctx context.Context, req *pb.DescribeServiceAccountRequest) (*pb.DescribeServiceAccountResponse, error) { + _, userIsSet := authentication.GetUserIdFromMetadata(ctx) + if !userIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + orgID, orgIsSet := authentication.GetOrganizationIdFromMetadata(ctx) + if !orgIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + if req.Id == "" { + return nil, status.Error(codes.InvalidArgument, "id is required") + } + + user, err := models.FindActiveUserByID(orgID, req.Id) + if err != nil { + return nil, status.Error(codes.NotFound, "service account not found") + } + + if !user.IsServiceAccount() { + return nil, status.Error(codes.NotFound, "service account not found") + } + + return &pb.DescribeServiceAccountResponse{ + ServiceAccount: serializeServiceAccount(user), + }, nil +} diff --git a/pkg/grpc/actions/serviceaccounts/list_service_accounts.go b/pkg/grpc/actions/serviceaccounts/list_service_accounts.go new file mode 100644 index 0000000000..dadd48a391 --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/list_service_accounts.go @@ -0,0 +1,37 @@ +package serviceaccounts + +import ( + "context" + + "github.com/superplanehq/superplane/pkg/authentication" + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func ListServiceAccounts(ctx context.Context) (*pb.ListServiceAccountsResponse, error) { + _, userIsSet := authentication.GetUserIdFromMetadata(ctx) + if !userIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + orgID, orgIsSet := authentication.GetOrganizationIdFromMetadata(ctx) + if !orgIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + users, err := models.FindServiceAccountsByOrganization(orgID) + if err != nil { + return nil, status.Error(codes.Internal, "failed to list service accounts") + } + + serviceAccounts := make([]*pb.ServiceAccount, len(users)) + for i := range users { + serviceAccounts[i] = serializeServiceAccount(&users[i]) + } + + return &pb.ListServiceAccountsResponse{ + ServiceAccounts: serviceAccounts, + }, nil +} diff --git a/pkg/grpc/actions/serviceaccounts/regenerate_token.go b/pkg/grpc/actions/serviceaccounts/regenerate_token.go new file mode 100644 index 0000000000..2016a42485 --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/regenerate_token.go @@ -0,0 +1,51 @@ +package serviceaccounts + +import ( + "context" + + "github.com/superplanehq/superplane/pkg/authentication" + "github.com/superplanehq/superplane/pkg/crypto" + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func RegenerateServiceAccountToken(ctx context.Context, req *pb.RegenerateServiceAccountTokenRequest) (*pb.RegenerateServiceAccountTokenResponse, error) { + _, userIsSet := authentication.GetUserIdFromMetadata(ctx) + if !userIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + orgID, orgIsSet := authentication.GetOrganizationIdFromMetadata(ctx) + if !orgIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + if req.Id == "" { + return nil, status.Error(codes.InvalidArgument, "id is required") + } + + user, err := models.FindActiveUserByID(orgID, req.Id) + if err != nil { + return nil, status.Error(codes.NotFound, "service account not found") + } + + if !user.IsServiceAccount() { + return nil, status.Error(codes.NotFound, "service account not found") + } + + plainToken, err := crypto.Base64String(64) + if err != nil { + return nil, status.Error(codes.Internal, "failed to generate new token") + } + + err = user.UpdateTokenHash(crypto.HashToken(plainToken)) + if err != nil { + return nil, status.Error(codes.Internal, "failed to update token") + } + + return &pb.RegenerateServiceAccountTokenResponse{ + Token: plainToken, + }, nil +} diff --git a/pkg/grpc/actions/serviceaccounts/update_service_account.go b/pkg/grpc/actions/serviceaccounts/update_service_account.go new file mode 100644 index 0000000000..9ba80633d8 --- /dev/null +++ b/pkg/grpc/actions/serviceaccounts/update_service_account.go @@ -0,0 +1,56 @@ +package serviceaccounts + +import ( + "context" + "time" + + "github.com/superplanehq/superplane/pkg/authentication" + "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/models" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func UpdateServiceAccount(ctx context.Context, req *pb.UpdateServiceAccountRequest) (*pb.UpdateServiceAccountResponse, error) { + _, userIsSet := authentication.GetUserIdFromMetadata(ctx) + if !userIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + orgID, orgIsSet := authentication.GetOrganizationIdFromMetadata(ctx) + if !orgIsSet { + return nil, status.Error(codes.Unauthenticated, "user not authenticated") + } + + if req.Id == "" { + return nil, status.Error(codes.InvalidArgument, "id is required") + } + + user, err := models.FindActiveUserByID(orgID, req.Id) + if err != nil { + return nil, status.Error(codes.NotFound, "service account not found") + } + + if !user.IsServiceAccount() { + return nil, status.Error(codes.NotFound, "service account not found") + } + + if req.Name != "" { + user.Name = req.Name + } + + if req.Description != "" { + user.Description = &req.Description + } + + user.UpdatedAt = time.Now() + err = database.Conn().Save(user).Error + if err != nil { + return nil, status.Error(codes.Internal, "failed to update service account") + } + + return &pb.UpdateServiceAccountResponse{ + ServiceAccount: serializeServiceAccount(user), + }, nil +} diff --git a/pkg/grpc/server.go b/pkg/grpc/server.go index 97559e332f..c4ed18afef 100644 --- a/pkg/grpc/server.go +++ b/pkg/grpc/server.go @@ -22,6 +22,7 @@ import ( organizationPb "github.com/superplanehq/superplane/pkg/protos/organizations" pbRoles "github.com/superplanehq/superplane/pkg/protos/roles" secretPb "github.com/superplanehq/superplane/pkg/protos/secrets" + pbServiceAccounts "github.com/superplanehq/superplane/pkg/protos/service_accounts" triggerPb "github.com/superplanehq/superplane/pkg/protos/triggers" pbUsers "github.com/superplanehq/superplane/pkg/protos/users" widgetPb "github.com/superplanehq/superplane/pkg/protos/widgets" @@ -124,6 +125,9 @@ func RunServer(baseURL, webhooksBaseURL, basePath string, encryptor crypto.Encry integrationService := NewIntegrationService(encryptor, registry) integrationpb.RegisterIntegrationsServer(grpcServer, integrationService) + serviceAccountsService := NewServiceAccountsService(authService) + pbServiceAccounts.RegisterServiceAccountsServer(grpcServer, serviceAccountsService) + reflection.Register(grpcServer) // diff --git a/pkg/grpc/service_accounts_service.go b/pkg/grpc/service_accounts_service.go new file mode 100644 index 0000000000..af8557cda0 --- /dev/null +++ b/pkg/grpc/service_accounts_service.go @@ -0,0 +1,44 @@ +package grpc + +import ( + "context" + + "github.com/superplanehq/superplane/pkg/authorization" + "github.com/superplanehq/superplane/pkg/grpc/actions/serviceaccounts" + pb "github.com/superplanehq/superplane/pkg/protos/service_accounts" +) + +type ServiceAccountsService struct { + pb.UnimplementedServiceAccountsServer + authService authorization.Authorization +} + +func NewServiceAccountsService(authService authorization.Authorization) *ServiceAccountsService { + return &ServiceAccountsService{ + authService: authService, + } +} + +func (s *ServiceAccountsService) CreateServiceAccount(ctx context.Context, req *pb.CreateServiceAccountRequest) (*pb.CreateServiceAccountResponse, error) { + return serviceaccounts.CreateServiceAccount(ctx, req, s.authService) +} + +func (s *ServiceAccountsService) ListServiceAccounts(ctx context.Context, req *pb.ListServiceAccountsRequest) (*pb.ListServiceAccountsResponse, error) { + return serviceaccounts.ListServiceAccounts(ctx) +} + +func (s *ServiceAccountsService) DescribeServiceAccount(ctx context.Context, req *pb.DescribeServiceAccountRequest) (*pb.DescribeServiceAccountResponse, error) { + return serviceaccounts.DescribeServiceAccount(ctx, req) +} + +func (s *ServiceAccountsService) UpdateServiceAccount(ctx context.Context, req *pb.UpdateServiceAccountRequest) (*pb.UpdateServiceAccountResponse, error) { + return serviceaccounts.UpdateServiceAccount(ctx, req) +} + +func (s *ServiceAccountsService) DeleteServiceAccount(ctx context.Context, req *pb.DeleteServiceAccountRequest) (*pb.DeleteServiceAccountResponse, error) { + return serviceaccounts.DeleteServiceAccount(ctx, req, s.authService) +} + +func (s *ServiceAccountsService) RegenerateServiceAccountToken(ctx context.Context, req *pb.RegenerateServiceAccountTokenRequest) (*pb.RegenerateServiceAccountTokenResponse, error) { + return serviceaccounts.RegenerateServiceAccountToken(ctx, req) +} diff --git a/pkg/grpc/users_service.go b/pkg/grpc/users_service.go index 93ee092ad1..653e904826 100644 --- a/pkg/grpc/users_service.go +++ b/pkg/grpc/users_service.go @@ -34,5 +34,5 @@ func (s *UsersService) ListUserRoles(ctx context.Context, req *pb.ListUserRolesR func (s *UsersService) ListUsers(ctx context.Context, req *pb.ListUsersRequest) (*pb.ListUsersResponse, error) { domainType := ctx.Value(authorization.DomainTypeContextKey).(string) domainID := ctx.Value(authorization.DomainIdContextKey).(string) - return auth.ListUsers(ctx, domainType, domainID, s.authService) + return auth.ListUsers(ctx, domainType, domainID, req.IncludeServiceAccounts, s.authService) } diff --git a/pkg/models/account_test.go b/pkg/models/account_test.go index 21aea47257..c3b7c43ce5 100644 --- a/pkg/models/account_test.go +++ b/pkg/models/account_test.go @@ -92,8 +92,8 @@ func TestAccount_UpdateEmail(t *testing.T) { user := &User{ OrganizationID: orgID, - AccountID: account.ID, - Email: account.Email, + AccountID: &account.ID, + Email: &account.Email, Name: account.Name, } err = database.Conn().Create(user).Error @@ -103,8 +103,8 @@ func TestAccount_UpdateEmail(t *testing.T) { require.NoError(t, err) otherUser := &User{ OrganizationID: orgID, - AccountID: otherAccount.ID, - Email: otherAccount.Email, + AccountID: &otherAccount.ID, + Email: &otherAccount.Email, Name: otherAccount.Name, } err = database.Conn().Create(otherUser).Error @@ -128,13 +128,13 @@ func TestAccount_UpdateEmail(t *testing.T) { var userFromDB User err = database.Conn().Where("id = ?", user.ID).First(&userFromDB).Error require.NoError(t, err) - assert.Equal(t, normalizedNewEmail, userFromDB.Email) + assert.Equal(t, normalizedNewEmail, userFromDB.GetEmail()) var otherUserFromDB User err = database.Conn().Where("id = ?", otherUser.ID).First(&otherUserFromDB).Error require.NoError(t, err) - assert.Equal(t, otherAccount.Email, otherUserFromDB.Email) - assert.NotEqual(t, normalizedNewEmail, otherUserFromDB.Email) + assert.Equal(t, otherAccount.Email, otherUserFromDB.GetEmail()) + assert.NotEqual(t, normalizedNewEmail, otherUserFromDB.GetEmail()) }) t.Run("should normalize email", func(t *testing.T) { @@ -191,8 +191,8 @@ func TestAccount_UpdateEmailForProvider(t *testing.T) { user := &User{ OrganizationID: orgID, - AccountID: account.ID, - Email: account.Email, + AccountID: &account.ID, + Email: &account.Email, Name: account.Name, } err = database.Conn().Create(user).Error @@ -238,7 +238,7 @@ func TestAccount_UpdateEmailForProvider(t *testing.T) { var userFromDB User err = database.Conn().Where("id = ?", user.ID).First(&userFromDB).Error require.NoError(t, err) - assert.Equal(t, normalizedNewEmail, userFromDB.Email) + assert.Equal(t, normalizedNewEmail, userFromDB.GetEmail()) var githubProviderFromDB AccountProvider err = database.Conn().Where("id = ?", githubProvider.ID).First(&githubProviderFromDB).Error diff --git a/pkg/models/constants.go b/pkg/models/constants.go index 331a441ee2..35aee9b034 100644 --- a/pkg/models/constants.go +++ b/pkg/models/constants.go @@ -25,6 +25,10 @@ const ( MetaDescOrgOwner = "Full control over organization settings, billing, and member management." MetaDescOrgAdmin = "Can manage canvases, users, groups, and roles within the organization." MetaDescOrgViewer = "Read-only access to organization resources and information." + + // User types + UserTypeHuman = "human" + UserTypeServiceAccount = "service_account" ) var ( diff --git a/pkg/models/user.go b/pkg/models/user.go index 658a74057e..1af60ea6d8 100644 --- a/pkg/models/user.go +++ b/pkg/models/user.go @@ -12,15 +12,29 @@ import ( type User struct { ID uuid.UUID `gorm:"type:uuid;primary_key;default:gen_random_uuid()"` OrganizationID uuid.UUID - AccountID uuid.UUID - Email string + AccountID *uuid.UUID + Email *string Name string + Type string + Description *string + CreatedBy *uuid.UUID TokenHash string CreatedAt time.Time UpdatedAt time.Time DeletedAt gorm.DeletedAt } +func (u *User) IsServiceAccount() bool { + return u.Type == UserTypeServiceAccount +} + +func (u *User) GetEmail() string { + if u.Email != nil { + return *u.Email + } + return "" +} + func (u *User) Delete() error { now := time.Now() return database.Conn().Unscoped(). @@ -53,11 +67,13 @@ func CreateUser(orgID, accountID uuid.UUID, email, name string) (*User, error) { } func CreateUserInTransaction(tx *gorm.DB, orgID, accountID uuid.UUID, email, name string) (*User, error) { + normalizedEmail := utils.NormalizeEmail(email) user := &User{ OrganizationID: orgID, - AccountID: accountID, - Email: utils.NormalizeEmail(email), + AccountID: &accountID, + Email: &normalizedEmail, Name: name, + Type: UserTypeHuman, } err := tx.Create(user).Error @@ -68,6 +84,39 @@ func CreateUserInTransaction(tx *gorm.DB, orgID, accountID uuid.UUID, email, nam return user, nil } +func CreateServiceAccount(tx *gorm.DB, orgID uuid.UUID, name string, description *string, createdBy uuid.UUID) (*User, error) { + user := &User{ + OrganizationID: orgID, + Name: name, + Type: UserTypeServiceAccount, + Description: description, + CreatedBy: &createdBy, + } + + err := tx.Create(user).Error + if err != nil { + return nil, err + } + + return user, nil +} + +func FindServiceAccountsByOrganization(orgID string) ([]User, error) { + return FindServiceAccountsByOrganizationInTransaction(database.Conn(), orgID) +} + +func FindServiceAccountsByOrganizationInTransaction(tx *gorm.DB, orgID string) ([]User, error) { + var users []User + + err := tx. + Where("organization_id = ?", orgID). + Where("type = ?", UserTypeServiceAccount). + Find(&users). + Error + + return users, err +} + func FindUnscopedUserByID(id string) (*User, error) { var user User userUUID, err := uuid.Parse(id) @@ -79,6 +128,33 @@ func FindUnscopedUserByID(id string) (*User, error) { return &user, err } +func FindUsersByIDs(ids []string) ([]User, error) { + if len(ids) == 0 { + return nil, nil + } + + var users []User + err := database.Conn(). + Where("id IN ?", ids). + Find(&users).Error + + return users, err +} + +func FindHumanUsersByIDs(ids []string) ([]User, error) { + if len(ids) == 0 { + return nil, nil + } + + var users []User + err := database.Conn(). + Where("id IN ?", ids). + Where("type = ?", UserTypeHuman). + Find(&users).Error + + return users, err +} + // NOTE: this method returns soft deleted users too. // Make sure you really need to use it this one, // and not FindActiveUserByID instead. diff --git a/pkg/openapi_client/.openapi-generator/FILES b/pkg/openapi_client/.openapi-generator/FILES index 48df6d2af3..a34eb5ca84 100644 --- a/pkg/openapi_client/.openapi-generator/FILES +++ b/pkg/openapi_client/.openapi-generator/FILES @@ -15,6 +15,7 @@ api_me.go api_organization.go api_roles.go api_secret.go +api_service_accounts.go api_trigger.go api_users.go api_widget.go @@ -179,6 +180,15 @@ docs/SecretsUpdateSecretBody.md docs/SecretsUpdateSecretNameBody.md docs/SecretsUpdateSecretNameResponse.md docs/SecretsUpdateSecretResponse.md +docs/ServiceAccountsAPI.md +docs/ServiceAccountsCreateServiceAccountRequest.md +docs/ServiceAccountsCreateServiceAccountResponse.md +docs/ServiceAccountsDescribeServiceAccountResponse.md +docs/ServiceAccountsListServiceAccountsResponse.md +docs/ServiceAccountsRegenerateServiceAccountTokenResponse.md +docs/ServiceAccountsServiceAccount.md +docs/ServiceAccountsUpdateServiceAccountBody.md +docs/ServiceAccountsUpdateServiceAccountResponse.md docs/SuperplaneBlueprintsOutputChannel.md docs/SuperplaneBlueprintsUserRef.md docs/SuperplaneCanvasesUserRef.md @@ -352,6 +362,14 @@ model_secrets_update_secret_body.go model_secrets_update_secret_name_body.go model_secrets_update_secret_name_response.go model_secrets_update_secret_response.go +model_service_accounts_create_service_account_request.go +model_service_accounts_create_service_account_response.go +model_service_accounts_describe_service_account_response.go +model_service_accounts_list_service_accounts_response.go +model_service_accounts_regenerate_service_account_token_response.go +model_service_accounts_service_account.go +model_service_accounts_update_service_account_body.go +model_service_accounts_update_service_account_response.go model_superplane_blueprints_output_channel.go model_superplane_blueprints_user_ref.go model_superplane_canvases_user_ref.go @@ -387,6 +405,7 @@ test/api_me_test.go test/api_organization_test.go test/api_roles_test.go test/api_secret_test.go +test/api_service_accounts_test.go test/api_trigger_test.go test/api_users_test.go test/api_widget_test.go diff --git a/pkg/openapi_client/api_service_accounts.go b/pkg/openapi_client/api_service_accounts.go new file mode 100644 index 0000000000..9e4b13259c --- /dev/null +++ b/pkg/openapi_client/api_service_accounts.go @@ -0,0 +1,721 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "bytes" + "context" + "io" + "net/http" + "net/url" + "strings" +) + +// ServiceAccountsAPIService ServiceAccountsAPI service +type ServiceAccountsAPIService service + +type ApiServiceAccountsCreateServiceAccountRequest struct { + ctx context.Context + ApiService *ServiceAccountsAPIService + body *ServiceAccountsCreateServiceAccountRequest +} + +func (r ApiServiceAccountsCreateServiceAccountRequest) Body(body ServiceAccountsCreateServiceAccountRequest) ApiServiceAccountsCreateServiceAccountRequest { + r.body = &body + return r +} + +func (r ApiServiceAccountsCreateServiceAccountRequest) Execute() (*ServiceAccountsCreateServiceAccountResponse, *http.Response, error) { + return r.ApiService.ServiceAccountsCreateServiceAccountExecute(r) +} + +/* +ServiceAccountsCreateServiceAccount Create a service account + +Creates a new service account in the organization + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiServiceAccountsCreateServiceAccountRequest +*/ +func (a *ServiceAccountsAPIService) ServiceAccountsCreateServiceAccount(ctx context.Context) ApiServiceAccountsCreateServiceAccountRequest { + return ApiServiceAccountsCreateServiceAccountRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return ServiceAccountsCreateServiceAccountResponse +func (a *ServiceAccountsAPIService) ServiceAccountsCreateServiceAccountExecute(r ApiServiceAccountsCreateServiceAccountRequest) (*ServiceAccountsCreateServiceAccountResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPost + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *ServiceAccountsCreateServiceAccountResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ServiceAccountsAPIService.ServiceAccountsCreateServiceAccount") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/v1/service-accounts" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.body == nil { + return localVarReturnValue, nil, reportError("body is required and must be specified") + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + // body params + localVarPostBody = r.body + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v GooglerpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiServiceAccountsDeleteServiceAccountRequest struct { + ctx context.Context + ApiService *ServiceAccountsAPIService + id string +} + +func (r ApiServiceAccountsDeleteServiceAccountRequest) Execute() (map[string]interface{}, *http.Response, error) { + return r.ApiService.ServiceAccountsDeleteServiceAccountExecute(r) +} + +/* +ServiceAccountsDeleteServiceAccount Delete a service account + +Deletes a service account and removes its RBAC policies + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param id + @return ApiServiceAccountsDeleteServiceAccountRequest +*/ +func (a *ServiceAccountsAPIService) ServiceAccountsDeleteServiceAccount(ctx context.Context, id string) ApiServiceAccountsDeleteServiceAccountRequest { + return ApiServiceAccountsDeleteServiceAccountRequest{ + ApiService: a, + ctx: ctx, + id: id, + } +} + +// Execute executes the request +// +// @return map[string]interface{} +func (a *ServiceAccountsAPIService) ServiceAccountsDeleteServiceAccountExecute(r ApiServiceAccountsDeleteServiceAccountRequest) (map[string]interface{}, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodDelete + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue map[string]interface{} + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ServiceAccountsAPIService.ServiceAccountsDeleteServiceAccount") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/v1/service-accounts/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v GooglerpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiServiceAccountsDescribeServiceAccountRequest struct { + ctx context.Context + ApiService *ServiceAccountsAPIService + id string +} + +func (r ApiServiceAccountsDescribeServiceAccountRequest) Execute() (*ServiceAccountsDescribeServiceAccountResponse, *http.Response, error) { + return r.ApiService.ServiceAccountsDescribeServiceAccountExecute(r) +} + +/* +ServiceAccountsDescribeServiceAccount Describe a service account + +Returns details of a specific service account + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param id + @return ApiServiceAccountsDescribeServiceAccountRequest +*/ +func (a *ServiceAccountsAPIService) ServiceAccountsDescribeServiceAccount(ctx context.Context, id string) ApiServiceAccountsDescribeServiceAccountRequest { + return ApiServiceAccountsDescribeServiceAccountRequest{ + ApiService: a, + ctx: ctx, + id: id, + } +} + +// Execute executes the request +// +// @return ServiceAccountsDescribeServiceAccountResponse +func (a *ServiceAccountsAPIService) ServiceAccountsDescribeServiceAccountExecute(r ApiServiceAccountsDescribeServiceAccountRequest) (*ServiceAccountsDescribeServiceAccountResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *ServiceAccountsDescribeServiceAccountResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ServiceAccountsAPIService.ServiceAccountsDescribeServiceAccount") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/v1/service-accounts/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v GooglerpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiServiceAccountsListServiceAccountsRequest struct { + ctx context.Context + ApiService *ServiceAccountsAPIService +} + +func (r ApiServiceAccountsListServiceAccountsRequest) Execute() (*ServiceAccountsListServiceAccountsResponse, *http.Response, error) { + return r.ApiService.ServiceAccountsListServiceAccountsExecute(r) +} + +/* +ServiceAccountsListServiceAccounts List service accounts + +Returns all service accounts in the organization + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiServiceAccountsListServiceAccountsRequest +*/ +func (a *ServiceAccountsAPIService) ServiceAccountsListServiceAccounts(ctx context.Context) ApiServiceAccountsListServiceAccountsRequest { + return ApiServiceAccountsListServiceAccountsRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return ServiceAccountsListServiceAccountsResponse +func (a *ServiceAccountsAPIService) ServiceAccountsListServiceAccountsExecute(r ApiServiceAccountsListServiceAccountsRequest) (*ServiceAccountsListServiceAccountsResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *ServiceAccountsListServiceAccountsResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ServiceAccountsAPIService.ServiceAccountsListServiceAccounts") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/v1/service-accounts" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v GooglerpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiServiceAccountsRegenerateServiceAccountTokenRequest struct { + ctx context.Context + ApiService *ServiceAccountsAPIService + id string + body *map[string]interface{} +} + +func (r ApiServiceAccountsRegenerateServiceAccountTokenRequest) Body(body map[string]interface{}) ApiServiceAccountsRegenerateServiceAccountTokenRequest { + r.body = &body + return r +} + +func (r ApiServiceAccountsRegenerateServiceAccountTokenRequest) Execute() (*ServiceAccountsRegenerateServiceAccountTokenResponse, *http.Response, error) { + return r.ApiService.ServiceAccountsRegenerateServiceAccountTokenExecute(r) +} + +/* +ServiceAccountsRegenerateServiceAccountToken Regenerate service account token + +Regenerates the API token for a service account + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param id + @return ApiServiceAccountsRegenerateServiceAccountTokenRequest +*/ +func (a *ServiceAccountsAPIService) ServiceAccountsRegenerateServiceAccountToken(ctx context.Context, id string) ApiServiceAccountsRegenerateServiceAccountTokenRequest { + return ApiServiceAccountsRegenerateServiceAccountTokenRequest{ + ApiService: a, + ctx: ctx, + id: id, + } +} + +// Execute executes the request +// +// @return ServiceAccountsRegenerateServiceAccountTokenResponse +func (a *ServiceAccountsAPIService) ServiceAccountsRegenerateServiceAccountTokenExecute(r ApiServiceAccountsRegenerateServiceAccountTokenRequest) (*ServiceAccountsRegenerateServiceAccountTokenResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPost + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *ServiceAccountsRegenerateServiceAccountTokenResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ServiceAccountsAPIService.ServiceAccountsRegenerateServiceAccountToken") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/v1/service-accounts/{id}/token" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.body == nil { + return localVarReturnValue, nil, reportError("body is required and must be specified") + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + // body params + localVarPostBody = r.body + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v GooglerpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiServiceAccountsUpdateServiceAccountRequest struct { + ctx context.Context + ApiService *ServiceAccountsAPIService + id string + body *ServiceAccountsUpdateServiceAccountBody +} + +func (r ApiServiceAccountsUpdateServiceAccountRequest) Body(body ServiceAccountsUpdateServiceAccountBody) ApiServiceAccountsUpdateServiceAccountRequest { + r.body = &body + return r +} + +func (r ApiServiceAccountsUpdateServiceAccountRequest) Execute() (*ServiceAccountsUpdateServiceAccountResponse, *http.Response, error) { + return r.ApiService.ServiceAccountsUpdateServiceAccountExecute(r) +} + +/* +ServiceAccountsUpdateServiceAccount Update a service account + +Updates the name or description of a service account + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param id + @return ApiServiceAccountsUpdateServiceAccountRequest +*/ +func (a *ServiceAccountsAPIService) ServiceAccountsUpdateServiceAccount(ctx context.Context, id string) ApiServiceAccountsUpdateServiceAccountRequest { + return ApiServiceAccountsUpdateServiceAccountRequest{ + ApiService: a, + ctx: ctx, + id: id, + } +} + +// Execute executes the request +// +// @return ServiceAccountsUpdateServiceAccountResponse +func (a *ServiceAccountsAPIService) ServiceAccountsUpdateServiceAccountExecute(r ApiServiceAccountsUpdateServiceAccountRequest) (*ServiceAccountsUpdateServiceAccountResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPatch + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *ServiceAccountsUpdateServiceAccountResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ServiceAccountsAPIService.ServiceAccountsUpdateServiceAccount") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/api/v1/service-accounts/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.body == nil { + return localVarReturnValue, nil, reportError("body is required and must be specified") + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + // body params + localVarPostBody = r.body + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v GooglerpcStatus + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} diff --git a/pkg/openapi_client/api_users.go b/pkg/openapi_client/api_users.go index 68c499bac2..23a4a13c1a 100644 --- a/pkg/openapi_client/api_users.go +++ b/pkg/openapi_client/api_users.go @@ -290,10 +290,11 @@ func (a *UsersAPIService) UsersListUserRolesExecute(r ApiUsersListUserRolesReque } type ApiUsersListUsersRequest struct { - ctx context.Context - ApiService *UsersAPIService - domainType *string - domainId *string + ctx context.Context + ApiService *UsersAPIService + domainType *string + domainId *string + includeServiceAccounts *bool } func (r ApiUsersListUsersRequest) DomainType(domainType string) ApiUsersListUsersRequest { @@ -306,6 +307,11 @@ func (r ApiUsersListUsersRequest) DomainId(domainId string) ApiUsersListUsersReq return r } +func (r ApiUsersListUsersRequest) IncludeServiceAccounts(includeServiceAccounts bool) ApiUsersListUsersRequest { + r.includeServiceAccounts = &includeServiceAccounts + return r +} + func (r ApiUsersListUsersRequest) Execute() (*UsersListUsersResponse, *http.Response, error) { return r.ApiService.UsersListUsersExecute(r) } @@ -356,6 +362,9 @@ func (a *UsersAPIService) UsersListUsersExecute(r ApiUsersListUsersRequest) (*Us if r.domainId != nil { parameterAddToHeaderOrQuery(localVarQueryParams, "domainId", r.domainId, "", "") } + if r.includeServiceAccounts != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "includeServiceAccounts", r.includeServiceAccounts, "", "") + } // to determine the Content-Type header localVarHTTPContentTypes := []string{} diff --git a/pkg/openapi_client/client.go b/pkg/openapi_client/client.go index ced4b51c04..d82aba33e4 100644 --- a/pkg/openapi_client/client.go +++ b/pkg/openapi_client/client.go @@ -73,6 +73,8 @@ type APIClient struct { SecretAPI *SecretAPIService + ServiceAccountsAPI *ServiceAccountsAPIService + TriggerAPI *TriggerAPIService UsersAPI *UsersAPIService @@ -108,6 +110,7 @@ func NewAPIClient(cfg *Configuration) *APIClient { c.OrganizationAPI = (*OrganizationAPIService)(&c.common) c.RolesAPI = (*RolesAPIService)(&c.common) c.SecretAPI = (*SecretAPIService)(&c.common) + c.ServiceAccountsAPI = (*ServiceAccountsAPIService)(&c.common) c.TriggerAPI = (*TriggerAPIService)(&c.common) c.UsersAPI = (*UsersAPIService)(&c.common) c.WidgetAPI = (*WidgetAPIService)(&c.common) diff --git a/pkg/openapi_client/model_service_accounts_create_service_account_request.go b/pkg/openapi_client/model_service_accounts_create_service_account_request.go new file mode 100644 index 0000000000..858868c7a4 --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_create_service_account_request.go @@ -0,0 +1,197 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsCreateServiceAccountRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsCreateServiceAccountRequest{} + +// ServiceAccountsCreateServiceAccountRequest struct for ServiceAccountsCreateServiceAccountRequest +type ServiceAccountsCreateServiceAccountRequest struct { + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + Role *string `json:"role,omitempty"` +} + +// NewServiceAccountsCreateServiceAccountRequest instantiates a new ServiceAccountsCreateServiceAccountRequest object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsCreateServiceAccountRequest() *ServiceAccountsCreateServiceAccountRequest { + this := ServiceAccountsCreateServiceAccountRequest{} + return &this +} + +// NewServiceAccountsCreateServiceAccountRequestWithDefaults instantiates a new ServiceAccountsCreateServiceAccountRequest object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsCreateServiceAccountRequestWithDefaults() *ServiceAccountsCreateServiceAccountRequest { + this := ServiceAccountsCreateServiceAccountRequest{} + return &this +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *ServiceAccountsCreateServiceAccountRequest) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsCreateServiceAccountRequest) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *ServiceAccountsCreateServiceAccountRequest) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *ServiceAccountsCreateServiceAccountRequest) SetName(v string) { + o.Name = &v +} + +// GetDescription returns the Description field value if set, zero value otherwise. +func (o *ServiceAccountsCreateServiceAccountRequest) GetDescription() string { + if o == nil || IsNil(o.Description) { + var ret string + return ret + } + return *o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsCreateServiceAccountRequest) GetDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.Description) { + return nil, false + } + return o.Description, true +} + +// HasDescription returns a boolean if a field has been set. +func (o *ServiceAccountsCreateServiceAccountRequest) HasDescription() bool { + if o != nil && !IsNil(o.Description) { + return true + } + + return false +} + +// SetDescription gets a reference to the given string and assigns it to the Description field. +func (o *ServiceAccountsCreateServiceAccountRequest) SetDescription(v string) { + o.Description = &v +} + +// GetRole returns the Role field value if set, zero value otherwise. +func (o *ServiceAccountsCreateServiceAccountRequest) GetRole() string { + if o == nil || IsNil(o.Role) { + var ret string + return ret + } + return *o.Role +} + +// GetRoleOk returns a tuple with the Role field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsCreateServiceAccountRequest) GetRoleOk() (*string, bool) { + if o == nil || IsNil(o.Role) { + return nil, false + } + return o.Role, true +} + +// HasRole returns a boolean if a field has been set. +func (o *ServiceAccountsCreateServiceAccountRequest) HasRole() bool { + if o != nil && !IsNil(o.Role) { + return true + } + + return false +} + +// SetRole gets a reference to the given string and assigns it to the Role field. +func (o *ServiceAccountsCreateServiceAccountRequest) SetRole(v string) { + o.Role = &v +} + +func (o ServiceAccountsCreateServiceAccountRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsCreateServiceAccountRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } + if !IsNil(o.Description) { + toSerialize["description"] = o.Description + } + if !IsNil(o.Role) { + toSerialize["role"] = o.Role + } + return toSerialize, nil +} + +type NullableServiceAccountsCreateServiceAccountRequest struct { + value *ServiceAccountsCreateServiceAccountRequest + isSet bool +} + +func (v NullableServiceAccountsCreateServiceAccountRequest) Get() *ServiceAccountsCreateServiceAccountRequest { + return v.value +} + +func (v *NullableServiceAccountsCreateServiceAccountRequest) Set(val *ServiceAccountsCreateServiceAccountRequest) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsCreateServiceAccountRequest) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsCreateServiceAccountRequest) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsCreateServiceAccountRequest(val *ServiceAccountsCreateServiceAccountRequest) *NullableServiceAccountsCreateServiceAccountRequest { + return &NullableServiceAccountsCreateServiceAccountRequest{value: val, isSet: true} +} + +func (v NullableServiceAccountsCreateServiceAccountRequest) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsCreateServiceAccountRequest) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_create_service_account_response.go b/pkg/openapi_client/model_service_accounts_create_service_account_response.go new file mode 100644 index 0000000000..a7e23d2215 --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_create_service_account_response.go @@ -0,0 +1,161 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsCreateServiceAccountResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsCreateServiceAccountResponse{} + +// ServiceAccountsCreateServiceAccountResponse struct for ServiceAccountsCreateServiceAccountResponse +type ServiceAccountsCreateServiceAccountResponse struct { + ServiceAccount *ServiceAccountsServiceAccount `json:"serviceAccount,omitempty"` + Token *string `json:"token,omitempty"` +} + +// NewServiceAccountsCreateServiceAccountResponse instantiates a new ServiceAccountsCreateServiceAccountResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsCreateServiceAccountResponse() *ServiceAccountsCreateServiceAccountResponse { + this := ServiceAccountsCreateServiceAccountResponse{} + return &this +} + +// NewServiceAccountsCreateServiceAccountResponseWithDefaults instantiates a new ServiceAccountsCreateServiceAccountResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsCreateServiceAccountResponseWithDefaults() *ServiceAccountsCreateServiceAccountResponse { + this := ServiceAccountsCreateServiceAccountResponse{} + return &this +} + +// GetServiceAccount returns the ServiceAccount field value if set, zero value otherwise. +func (o *ServiceAccountsCreateServiceAccountResponse) GetServiceAccount() ServiceAccountsServiceAccount { + if o == nil || IsNil(o.ServiceAccount) { + var ret ServiceAccountsServiceAccount + return ret + } + return *o.ServiceAccount +} + +// GetServiceAccountOk returns a tuple with the ServiceAccount field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsCreateServiceAccountResponse) GetServiceAccountOk() (*ServiceAccountsServiceAccount, bool) { + if o == nil || IsNil(o.ServiceAccount) { + return nil, false + } + return o.ServiceAccount, true +} + +// HasServiceAccount returns a boolean if a field has been set. +func (o *ServiceAccountsCreateServiceAccountResponse) HasServiceAccount() bool { + if o != nil && !IsNil(o.ServiceAccount) { + return true + } + + return false +} + +// SetServiceAccount gets a reference to the given ServiceAccountsServiceAccount and assigns it to the ServiceAccount field. +func (o *ServiceAccountsCreateServiceAccountResponse) SetServiceAccount(v ServiceAccountsServiceAccount) { + o.ServiceAccount = &v +} + +// GetToken returns the Token field value if set, zero value otherwise. +func (o *ServiceAccountsCreateServiceAccountResponse) GetToken() string { + if o == nil || IsNil(o.Token) { + var ret string + return ret + } + return *o.Token +} + +// GetTokenOk returns a tuple with the Token field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsCreateServiceAccountResponse) GetTokenOk() (*string, bool) { + if o == nil || IsNil(o.Token) { + return nil, false + } + return o.Token, true +} + +// HasToken returns a boolean if a field has been set. +func (o *ServiceAccountsCreateServiceAccountResponse) HasToken() bool { + if o != nil && !IsNil(o.Token) { + return true + } + + return false +} + +// SetToken gets a reference to the given string and assigns it to the Token field. +func (o *ServiceAccountsCreateServiceAccountResponse) SetToken(v string) { + o.Token = &v +} + +func (o ServiceAccountsCreateServiceAccountResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsCreateServiceAccountResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.ServiceAccount) { + toSerialize["serviceAccount"] = o.ServiceAccount + } + if !IsNil(o.Token) { + toSerialize["token"] = o.Token + } + return toSerialize, nil +} + +type NullableServiceAccountsCreateServiceAccountResponse struct { + value *ServiceAccountsCreateServiceAccountResponse + isSet bool +} + +func (v NullableServiceAccountsCreateServiceAccountResponse) Get() *ServiceAccountsCreateServiceAccountResponse { + return v.value +} + +func (v *NullableServiceAccountsCreateServiceAccountResponse) Set(val *ServiceAccountsCreateServiceAccountResponse) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsCreateServiceAccountResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsCreateServiceAccountResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsCreateServiceAccountResponse(val *ServiceAccountsCreateServiceAccountResponse) *NullableServiceAccountsCreateServiceAccountResponse { + return &NullableServiceAccountsCreateServiceAccountResponse{value: val, isSet: true} +} + +func (v NullableServiceAccountsCreateServiceAccountResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsCreateServiceAccountResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_describe_service_account_response.go b/pkg/openapi_client/model_service_accounts_describe_service_account_response.go new file mode 100644 index 0000000000..30d8e6057e --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_describe_service_account_response.go @@ -0,0 +1,125 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsDescribeServiceAccountResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsDescribeServiceAccountResponse{} + +// ServiceAccountsDescribeServiceAccountResponse struct for ServiceAccountsDescribeServiceAccountResponse +type ServiceAccountsDescribeServiceAccountResponse struct { + ServiceAccount *ServiceAccountsServiceAccount `json:"serviceAccount,omitempty"` +} + +// NewServiceAccountsDescribeServiceAccountResponse instantiates a new ServiceAccountsDescribeServiceAccountResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsDescribeServiceAccountResponse() *ServiceAccountsDescribeServiceAccountResponse { + this := ServiceAccountsDescribeServiceAccountResponse{} + return &this +} + +// NewServiceAccountsDescribeServiceAccountResponseWithDefaults instantiates a new ServiceAccountsDescribeServiceAccountResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsDescribeServiceAccountResponseWithDefaults() *ServiceAccountsDescribeServiceAccountResponse { + this := ServiceAccountsDescribeServiceAccountResponse{} + return &this +} + +// GetServiceAccount returns the ServiceAccount field value if set, zero value otherwise. +func (o *ServiceAccountsDescribeServiceAccountResponse) GetServiceAccount() ServiceAccountsServiceAccount { + if o == nil || IsNil(o.ServiceAccount) { + var ret ServiceAccountsServiceAccount + return ret + } + return *o.ServiceAccount +} + +// GetServiceAccountOk returns a tuple with the ServiceAccount field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsDescribeServiceAccountResponse) GetServiceAccountOk() (*ServiceAccountsServiceAccount, bool) { + if o == nil || IsNil(o.ServiceAccount) { + return nil, false + } + return o.ServiceAccount, true +} + +// HasServiceAccount returns a boolean if a field has been set. +func (o *ServiceAccountsDescribeServiceAccountResponse) HasServiceAccount() bool { + if o != nil && !IsNil(o.ServiceAccount) { + return true + } + + return false +} + +// SetServiceAccount gets a reference to the given ServiceAccountsServiceAccount and assigns it to the ServiceAccount field. +func (o *ServiceAccountsDescribeServiceAccountResponse) SetServiceAccount(v ServiceAccountsServiceAccount) { + o.ServiceAccount = &v +} + +func (o ServiceAccountsDescribeServiceAccountResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsDescribeServiceAccountResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.ServiceAccount) { + toSerialize["serviceAccount"] = o.ServiceAccount + } + return toSerialize, nil +} + +type NullableServiceAccountsDescribeServiceAccountResponse struct { + value *ServiceAccountsDescribeServiceAccountResponse + isSet bool +} + +func (v NullableServiceAccountsDescribeServiceAccountResponse) Get() *ServiceAccountsDescribeServiceAccountResponse { + return v.value +} + +func (v *NullableServiceAccountsDescribeServiceAccountResponse) Set(val *ServiceAccountsDescribeServiceAccountResponse) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsDescribeServiceAccountResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsDescribeServiceAccountResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsDescribeServiceAccountResponse(val *ServiceAccountsDescribeServiceAccountResponse) *NullableServiceAccountsDescribeServiceAccountResponse { + return &NullableServiceAccountsDescribeServiceAccountResponse{value: val, isSet: true} +} + +func (v NullableServiceAccountsDescribeServiceAccountResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsDescribeServiceAccountResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_list_service_accounts_response.go b/pkg/openapi_client/model_service_accounts_list_service_accounts_response.go new file mode 100644 index 0000000000..b6d914a903 --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_list_service_accounts_response.go @@ -0,0 +1,125 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsListServiceAccountsResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsListServiceAccountsResponse{} + +// ServiceAccountsListServiceAccountsResponse struct for ServiceAccountsListServiceAccountsResponse +type ServiceAccountsListServiceAccountsResponse struct { + ServiceAccounts []ServiceAccountsServiceAccount `json:"serviceAccounts,omitempty"` +} + +// NewServiceAccountsListServiceAccountsResponse instantiates a new ServiceAccountsListServiceAccountsResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsListServiceAccountsResponse() *ServiceAccountsListServiceAccountsResponse { + this := ServiceAccountsListServiceAccountsResponse{} + return &this +} + +// NewServiceAccountsListServiceAccountsResponseWithDefaults instantiates a new ServiceAccountsListServiceAccountsResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsListServiceAccountsResponseWithDefaults() *ServiceAccountsListServiceAccountsResponse { + this := ServiceAccountsListServiceAccountsResponse{} + return &this +} + +// GetServiceAccounts returns the ServiceAccounts field value if set, zero value otherwise. +func (o *ServiceAccountsListServiceAccountsResponse) GetServiceAccounts() []ServiceAccountsServiceAccount { + if o == nil || IsNil(o.ServiceAccounts) { + var ret []ServiceAccountsServiceAccount + return ret + } + return o.ServiceAccounts +} + +// GetServiceAccountsOk returns a tuple with the ServiceAccounts field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsListServiceAccountsResponse) GetServiceAccountsOk() ([]ServiceAccountsServiceAccount, bool) { + if o == nil || IsNil(o.ServiceAccounts) { + return nil, false + } + return o.ServiceAccounts, true +} + +// HasServiceAccounts returns a boolean if a field has been set. +func (o *ServiceAccountsListServiceAccountsResponse) HasServiceAccounts() bool { + if o != nil && !IsNil(o.ServiceAccounts) { + return true + } + + return false +} + +// SetServiceAccounts gets a reference to the given []ServiceAccountsServiceAccount and assigns it to the ServiceAccounts field. +func (o *ServiceAccountsListServiceAccountsResponse) SetServiceAccounts(v []ServiceAccountsServiceAccount) { + o.ServiceAccounts = v +} + +func (o ServiceAccountsListServiceAccountsResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsListServiceAccountsResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.ServiceAccounts) { + toSerialize["serviceAccounts"] = o.ServiceAccounts + } + return toSerialize, nil +} + +type NullableServiceAccountsListServiceAccountsResponse struct { + value *ServiceAccountsListServiceAccountsResponse + isSet bool +} + +func (v NullableServiceAccountsListServiceAccountsResponse) Get() *ServiceAccountsListServiceAccountsResponse { + return v.value +} + +func (v *NullableServiceAccountsListServiceAccountsResponse) Set(val *ServiceAccountsListServiceAccountsResponse) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsListServiceAccountsResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsListServiceAccountsResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsListServiceAccountsResponse(val *ServiceAccountsListServiceAccountsResponse) *NullableServiceAccountsListServiceAccountsResponse { + return &NullableServiceAccountsListServiceAccountsResponse{value: val, isSet: true} +} + +func (v NullableServiceAccountsListServiceAccountsResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsListServiceAccountsResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_regenerate_service_account_token_response.go b/pkg/openapi_client/model_service_accounts_regenerate_service_account_token_response.go new file mode 100644 index 0000000000..e417e9987c --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_regenerate_service_account_token_response.go @@ -0,0 +1,125 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsRegenerateServiceAccountTokenResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsRegenerateServiceAccountTokenResponse{} + +// ServiceAccountsRegenerateServiceAccountTokenResponse struct for ServiceAccountsRegenerateServiceAccountTokenResponse +type ServiceAccountsRegenerateServiceAccountTokenResponse struct { + Token *string `json:"token,omitempty"` +} + +// NewServiceAccountsRegenerateServiceAccountTokenResponse instantiates a new ServiceAccountsRegenerateServiceAccountTokenResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsRegenerateServiceAccountTokenResponse() *ServiceAccountsRegenerateServiceAccountTokenResponse { + this := ServiceAccountsRegenerateServiceAccountTokenResponse{} + return &this +} + +// NewServiceAccountsRegenerateServiceAccountTokenResponseWithDefaults instantiates a new ServiceAccountsRegenerateServiceAccountTokenResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsRegenerateServiceAccountTokenResponseWithDefaults() *ServiceAccountsRegenerateServiceAccountTokenResponse { + this := ServiceAccountsRegenerateServiceAccountTokenResponse{} + return &this +} + +// GetToken returns the Token field value if set, zero value otherwise. +func (o *ServiceAccountsRegenerateServiceAccountTokenResponse) GetToken() string { + if o == nil || IsNil(o.Token) { + var ret string + return ret + } + return *o.Token +} + +// GetTokenOk returns a tuple with the Token field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsRegenerateServiceAccountTokenResponse) GetTokenOk() (*string, bool) { + if o == nil || IsNil(o.Token) { + return nil, false + } + return o.Token, true +} + +// HasToken returns a boolean if a field has been set. +func (o *ServiceAccountsRegenerateServiceAccountTokenResponse) HasToken() bool { + if o != nil && !IsNil(o.Token) { + return true + } + + return false +} + +// SetToken gets a reference to the given string and assigns it to the Token field. +func (o *ServiceAccountsRegenerateServiceAccountTokenResponse) SetToken(v string) { + o.Token = &v +} + +func (o ServiceAccountsRegenerateServiceAccountTokenResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsRegenerateServiceAccountTokenResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Token) { + toSerialize["token"] = o.Token + } + return toSerialize, nil +} + +type NullableServiceAccountsRegenerateServiceAccountTokenResponse struct { + value *ServiceAccountsRegenerateServiceAccountTokenResponse + isSet bool +} + +func (v NullableServiceAccountsRegenerateServiceAccountTokenResponse) Get() *ServiceAccountsRegenerateServiceAccountTokenResponse { + return v.value +} + +func (v *NullableServiceAccountsRegenerateServiceAccountTokenResponse) Set(val *ServiceAccountsRegenerateServiceAccountTokenResponse) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsRegenerateServiceAccountTokenResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsRegenerateServiceAccountTokenResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsRegenerateServiceAccountTokenResponse(val *ServiceAccountsRegenerateServiceAccountTokenResponse) *NullableServiceAccountsRegenerateServiceAccountTokenResponse { + return &NullableServiceAccountsRegenerateServiceAccountTokenResponse{value: val, isSet: true} +} + +func (v NullableServiceAccountsRegenerateServiceAccountTokenResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsRegenerateServiceAccountTokenResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_service_account.go b/pkg/openapi_client/model_service_accounts_service_account.go new file mode 100644 index 0000000000..a87102210d --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_service_account.go @@ -0,0 +1,378 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" + "time" +) + +// checks if the ServiceAccountsServiceAccount type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsServiceAccount{} + +// ServiceAccountsServiceAccount struct for ServiceAccountsServiceAccount +type ServiceAccountsServiceAccount struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + OrganizationId *string `json:"organizationId,omitempty"` + CreatedBy *string `json:"createdBy,omitempty"` + HasToken *bool `json:"hasToken,omitempty"` + CreatedAt *time.Time `json:"createdAt,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// NewServiceAccountsServiceAccount instantiates a new ServiceAccountsServiceAccount object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsServiceAccount() *ServiceAccountsServiceAccount { + this := ServiceAccountsServiceAccount{} + return &this +} + +// NewServiceAccountsServiceAccountWithDefaults instantiates a new ServiceAccountsServiceAccount object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsServiceAccountWithDefaults() *ServiceAccountsServiceAccount { + this := ServiceAccountsServiceAccount{} + return &this +} + +// GetId returns the Id field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetId() string { + if o == nil || IsNil(o.Id) { + var ret string + return ret + } + return *o.Id +} + +// GetIdOk returns a tuple with the Id field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetIdOk() (*string, bool) { + if o == nil || IsNil(o.Id) { + return nil, false + } + return o.Id, true +} + +// HasId returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasId() bool { + if o != nil && !IsNil(o.Id) { + return true + } + + return false +} + +// SetId gets a reference to the given string and assigns it to the Id field. +func (o *ServiceAccountsServiceAccount) SetId(v string) { + o.Id = &v +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *ServiceAccountsServiceAccount) SetName(v string) { + o.Name = &v +} + +// GetDescription returns the Description field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetDescription() string { + if o == nil || IsNil(o.Description) { + var ret string + return ret + } + return *o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.Description) { + return nil, false + } + return o.Description, true +} + +// HasDescription returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasDescription() bool { + if o != nil && !IsNil(o.Description) { + return true + } + + return false +} + +// SetDescription gets a reference to the given string and assigns it to the Description field. +func (o *ServiceAccountsServiceAccount) SetDescription(v string) { + o.Description = &v +} + +// GetOrganizationId returns the OrganizationId field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetOrganizationId() string { + if o == nil || IsNil(o.OrganizationId) { + var ret string + return ret + } + return *o.OrganizationId +} + +// GetOrganizationIdOk returns a tuple with the OrganizationId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetOrganizationIdOk() (*string, bool) { + if o == nil || IsNil(o.OrganizationId) { + return nil, false + } + return o.OrganizationId, true +} + +// HasOrganizationId returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasOrganizationId() bool { + if o != nil && !IsNil(o.OrganizationId) { + return true + } + + return false +} + +// SetOrganizationId gets a reference to the given string and assigns it to the OrganizationId field. +func (o *ServiceAccountsServiceAccount) SetOrganizationId(v string) { + o.OrganizationId = &v +} + +// GetCreatedBy returns the CreatedBy field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetCreatedBy() string { + if o == nil || IsNil(o.CreatedBy) { + var ret string + return ret + } + return *o.CreatedBy +} + +// GetCreatedByOk returns a tuple with the CreatedBy field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetCreatedByOk() (*string, bool) { + if o == nil || IsNil(o.CreatedBy) { + return nil, false + } + return o.CreatedBy, true +} + +// HasCreatedBy returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasCreatedBy() bool { + if o != nil && !IsNil(o.CreatedBy) { + return true + } + + return false +} + +// SetCreatedBy gets a reference to the given string and assigns it to the CreatedBy field. +func (o *ServiceAccountsServiceAccount) SetCreatedBy(v string) { + o.CreatedBy = &v +} + +// GetHasToken returns the HasToken field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetHasToken() bool { + if o == nil || IsNil(o.HasToken) { + var ret bool + return ret + } + return *o.HasToken +} + +// GetHasTokenOk returns a tuple with the HasToken field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetHasTokenOk() (*bool, bool) { + if o == nil || IsNil(o.HasToken) { + return nil, false + } + return o.HasToken, true +} + +// HasHasToken returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasHasToken() bool { + if o != nil && !IsNil(o.HasToken) { + return true + } + + return false +} + +// SetHasToken gets a reference to the given bool and assigns it to the HasToken field. +func (o *ServiceAccountsServiceAccount) SetHasToken(v bool) { + o.HasToken = &v +} + +// GetCreatedAt returns the CreatedAt field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetCreatedAt() time.Time { + if o == nil || IsNil(o.CreatedAt) { + var ret time.Time + return ret + } + return *o.CreatedAt +} + +// GetCreatedAtOk returns a tuple with the CreatedAt field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetCreatedAtOk() (*time.Time, bool) { + if o == nil || IsNil(o.CreatedAt) { + return nil, false + } + return o.CreatedAt, true +} + +// HasCreatedAt returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasCreatedAt() bool { + if o != nil && !IsNil(o.CreatedAt) { + return true + } + + return false +} + +// SetCreatedAt gets a reference to the given time.Time and assigns it to the CreatedAt field. +func (o *ServiceAccountsServiceAccount) SetCreatedAt(v time.Time) { + o.CreatedAt = &v +} + +// GetUpdatedAt returns the UpdatedAt field value if set, zero value otherwise. +func (o *ServiceAccountsServiceAccount) GetUpdatedAt() time.Time { + if o == nil || IsNil(o.UpdatedAt) { + var ret time.Time + return ret + } + return *o.UpdatedAt +} + +// GetUpdatedAtOk returns a tuple with the UpdatedAt field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsServiceAccount) GetUpdatedAtOk() (*time.Time, bool) { + if o == nil || IsNil(o.UpdatedAt) { + return nil, false + } + return o.UpdatedAt, true +} + +// HasUpdatedAt returns a boolean if a field has been set. +func (o *ServiceAccountsServiceAccount) HasUpdatedAt() bool { + if o != nil && !IsNil(o.UpdatedAt) { + return true + } + + return false +} + +// SetUpdatedAt gets a reference to the given time.Time and assigns it to the UpdatedAt field. +func (o *ServiceAccountsServiceAccount) SetUpdatedAt(v time.Time) { + o.UpdatedAt = &v +} + +func (o ServiceAccountsServiceAccount) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsServiceAccount) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Id) { + toSerialize["id"] = o.Id + } + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } + if !IsNil(o.Description) { + toSerialize["description"] = o.Description + } + if !IsNil(o.OrganizationId) { + toSerialize["organizationId"] = o.OrganizationId + } + if !IsNil(o.CreatedBy) { + toSerialize["createdBy"] = o.CreatedBy + } + if !IsNil(o.HasToken) { + toSerialize["hasToken"] = o.HasToken + } + if !IsNil(o.CreatedAt) { + toSerialize["createdAt"] = o.CreatedAt + } + if !IsNil(o.UpdatedAt) { + toSerialize["updatedAt"] = o.UpdatedAt + } + return toSerialize, nil +} + +type NullableServiceAccountsServiceAccount struct { + value *ServiceAccountsServiceAccount + isSet bool +} + +func (v NullableServiceAccountsServiceAccount) Get() *ServiceAccountsServiceAccount { + return v.value +} + +func (v *NullableServiceAccountsServiceAccount) Set(val *ServiceAccountsServiceAccount) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsServiceAccount) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsServiceAccount) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsServiceAccount(val *ServiceAccountsServiceAccount) *NullableServiceAccountsServiceAccount { + return &NullableServiceAccountsServiceAccount{value: val, isSet: true} +} + +func (v NullableServiceAccountsServiceAccount) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsServiceAccount) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_update_service_account_body.go b/pkg/openapi_client/model_service_accounts_update_service_account_body.go new file mode 100644 index 0000000000..c3a1b41f3d --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_update_service_account_body.go @@ -0,0 +1,161 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsUpdateServiceAccountBody type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsUpdateServiceAccountBody{} + +// ServiceAccountsUpdateServiceAccountBody struct for ServiceAccountsUpdateServiceAccountBody +type ServiceAccountsUpdateServiceAccountBody struct { + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` +} + +// NewServiceAccountsUpdateServiceAccountBody instantiates a new ServiceAccountsUpdateServiceAccountBody object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsUpdateServiceAccountBody() *ServiceAccountsUpdateServiceAccountBody { + this := ServiceAccountsUpdateServiceAccountBody{} + return &this +} + +// NewServiceAccountsUpdateServiceAccountBodyWithDefaults instantiates a new ServiceAccountsUpdateServiceAccountBody object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsUpdateServiceAccountBodyWithDefaults() *ServiceAccountsUpdateServiceAccountBody { + this := ServiceAccountsUpdateServiceAccountBody{} + return &this +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *ServiceAccountsUpdateServiceAccountBody) GetName() string { + if o == nil || IsNil(o.Name) { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsUpdateServiceAccountBody) GetNameOk() (*string, bool) { + if o == nil || IsNil(o.Name) { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *ServiceAccountsUpdateServiceAccountBody) HasName() bool { + if o != nil && !IsNil(o.Name) { + return true + } + + return false +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *ServiceAccountsUpdateServiceAccountBody) SetName(v string) { + o.Name = &v +} + +// GetDescription returns the Description field value if set, zero value otherwise. +func (o *ServiceAccountsUpdateServiceAccountBody) GetDescription() string { + if o == nil || IsNil(o.Description) { + var ret string + return ret + } + return *o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsUpdateServiceAccountBody) GetDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.Description) { + return nil, false + } + return o.Description, true +} + +// HasDescription returns a boolean if a field has been set. +func (o *ServiceAccountsUpdateServiceAccountBody) HasDescription() bool { + if o != nil && !IsNil(o.Description) { + return true + } + + return false +} + +// SetDescription gets a reference to the given string and assigns it to the Description field. +func (o *ServiceAccountsUpdateServiceAccountBody) SetDescription(v string) { + o.Description = &v +} + +func (o ServiceAccountsUpdateServiceAccountBody) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsUpdateServiceAccountBody) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Name) { + toSerialize["name"] = o.Name + } + if !IsNil(o.Description) { + toSerialize["description"] = o.Description + } + return toSerialize, nil +} + +type NullableServiceAccountsUpdateServiceAccountBody struct { + value *ServiceAccountsUpdateServiceAccountBody + isSet bool +} + +func (v NullableServiceAccountsUpdateServiceAccountBody) Get() *ServiceAccountsUpdateServiceAccountBody { + return v.value +} + +func (v *NullableServiceAccountsUpdateServiceAccountBody) Set(val *ServiceAccountsUpdateServiceAccountBody) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsUpdateServiceAccountBody) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsUpdateServiceAccountBody) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsUpdateServiceAccountBody(val *ServiceAccountsUpdateServiceAccountBody) *NullableServiceAccountsUpdateServiceAccountBody { + return &NullableServiceAccountsUpdateServiceAccountBody{value: val, isSet: true} +} + +func (v NullableServiceAccountsUpdateServiceAccountBody) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsUpdateServiceAccountBody) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/openapi_client/model_service_accounts_update_service_account_response.go b/pkg/openapi_client/model_service_accounts_update_service_account_response.go new file mode 100644 index 0000000000..d34208eb68 --- /dev/null +++ b/pkg/openapi_client/model_service_accounts_update_service_account_response.go @@ -0,0 +1,125 @@ +/* +Superplane Organizations API + +API for managing organizations in the Superplane service + +API version: 1.0 +Contact: support@superplane.com +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi_client + +import ( + "encoding/json" +) + +// checks if the ServiceAccountsUpdateServiceAccountResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ServiceAccountsUpdateServiceAccountResponse{} + +// ServiceAccountsUpdateServiceAccountResponse struct for ServiceAccountsUpdateServiceAccountResponse +type ServiceAccountsUpdateServiceAccountResponse struct { + ServiceAccount *ServiceAccountsServiceAccount `json:"serviceAccount,omitempty"` +} + +// NewServiceAccountsUpdateServiceAccountResponse instantiates a new ServiceAccountsUpdateServiceAccountResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewServiceAccountsUpdateServiceAccountResponse() *ServiceAccountsUpdateServiceAccountResponse { + this := ServiceAccountsUpdateServiceAccountResponse{} + return &this +} + +// NewServiceAccountsUpdateServiceAccountResponseWithDefaults instantiates a new ServiceAccountsUpdateServiceAccountResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewServiceAccountsUpdateServiceAccountResponseWithDefaults() *ServiceAccountsUpdateServiceAccountResponse { + this := ServiceAccountsUpdateServiceAccountResponse{} + return &this +} + +// GetServiceAccount returns the ServiceAccount field value if set, zero value otherwise. +func (o *ServiceAccountsUpdateServiceAccountResponse) GetServiceAccount() ServiceAccountsServiceAccount { + if o == nil || IsNil(o.ServiceAccount) { + var ret ServiceAccountsServiceAccount + return ret + } + return *o.ServiceAccount +} + +// GetServiceAccountOk returns a tuple with the ServiceAccount field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ServiceAccountsUpdateServiceAccountResponse) GetServiceAccountOk() (*ServiceAccountsServiceAccount, bool) { + if o == nil || IsNil(o.ServiceAccount) { + return nil, false + } + return o.ServiceAccount, true +} + +// HasServiceAccount returns a boolean if a field has been set. +func (o *ServiceAccountsUpdateServiceAccountResponse) HasServiceAccount() bool { + if o != nil && !IsNil(o.ServiceAccount) { + return true + } + + return false +} + +// SetServiceAccount gets a reference to the given ServiceAccountsServiceAccount and assigns it to the ServiceAccount field. +func (o *ServiceAccountsUpdateServiceAccountResponse) SetServiceAccount(v ServiceAccountsServiceAccount) { + o.ServiceAccount = &v +} + +func (o ServiceAccountsUpdateServiceAccountResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ServiceAccountsUpdateServiceAccountResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.ServiceAccount) { + toSerialize["serviceAccount"] = o.ServiceAccount + } + return toSerialize, nil +} + +type NullableServiceAccountsUpdateServiceAccountResponse struct { + value *ServiceAccountsUpdateServiceAccountResponse + isSet bool +} + +func (v NullableServiceAccountsUpdateServiceAccountResponse) Get() *ServiceAccountsUpdateServiceAccountResponse { + return v.value +} + +func (v *NullableServiceAccountsUpdateServiceAccountResponse) Set(val *ServiceAccountsUpdateServiceAccountResponse) { + v.value = val + v.isSet = true +} + +func (v NullableServiceAccountsUpdateServiceAccountResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableServiceAccountsUpdateServiceAccountResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableServiceAccountsUpdateServiceAccountResponse(val *ServiceAccountsUpdateServiceAccountResponse) *NullableServiceAccountsUpdateServiceAccountResponse { + return &NullableServiceAccountsUpdateServiceAccountResponse{value: val, isSet: true} +} + +func (v NullableServiceAccountsUpdateServiceAccountResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableServiceAccountsUpdateServiceAccountResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/pkg/protos/service_accounts/service_accounts.pb.go b/pkg/protos/service_accounts/service_accounts.pb.go new file mode 100644 index 0000000000..d5b388cd23 --- /dev/null +++ b/pkg/protos/service_accounts/service_accounts.pb.go @@ -0,0 +1,816 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.6 +// protoc v3.15.8 +// source: service_accounts.proto + +package service_accounts + +import ( + timestamp "github.com/golang/protobuf/ptypes/timestamp" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" + _ "google.golang.org/genproto/googleapis/api/annotations" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" + unsafe "unsafe" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ServiceAccount struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + OrganizationId string `protobuf:"bytes,4,opt,name=organization_id,json=organizationId,proto3" json:"organization_id,omitempty"` + CreatedBy string `protobuf:"bytes,5,opt,name=created_by,json=createdBy,proto3" json:"created_by,omitempty"` + HasToken bool `protobuf:"varint,6,opt,name=has_token,json=hasToken,proto3" json:"has_token,omitempty"` + CreatedAt *timestamp.Timestamp `protobuf:"bytes,7,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + UpdatedAt *timestamp.Timestamp `protobuf:"bytes,8,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ServiceAccount) Reset() { + *x = ServiceAccount{} + mi := &file_service_accounts_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ServiceAccount) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServiceAccount) ProtoMessage() {} + +func (x *ServiceAccount) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServiceAccount.ProtoReflect.Descriptor instead. +func (*ServiceAccount) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{0} +} + +func (x *ServiceAccount) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *ServiceAccount) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ServiceAccount) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *ServiceAccount) GetOrganizationId() string { + if x != nil { + return x.OrganizationId + } + return "" +} + +func (x *ServiceAccount) GetCreatedBy() string { + if x != nil { + return x.CreatedBy + } + return "" +} + +func (x *ServiceAccount) GetHasToken() bool { + if x != nil { + return x.HasToken + } + return false +} + +func (x *ServiceAccount) GetCreatedAt() *timestamp.Timestamp { + if x != nil { + return x.CreatedAt + } + return nil +} + +func (x *ServiceAccount) GetUpdatedAt() *timestamp.Timestamp { + if x != nil { + return x.UpdatedAt + } + return nil +} + +type CreateServiceAccountRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + Role string `protobuf:"bytes,3,opt,name=role,proto3" json:"role,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateServiceAccountRequest) Reset() { + *x = CreateServiceAccountRequest{} + mi := &file_service_accounts_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateServiceAccountRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateServiceAccountRequest) ProtoMessage() {} + +func (x *CreateServiceAccountRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateServiceAccountRequest.ProtoReflect.Descriptor instead. +func (*CreateServiceAccountRequest) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateServiceAccountRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *CreateServiceAccountRequest) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *CreateServiceAccountRequest) GetRole() string { + if x != nil { + return x.Role + } + return "" +} + +type CreateServiceAccountResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + ServiceAccount *ServiceAccount `protobuf:"bytes,1,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + Token string `protobuf:"bytes,2,opt,name=token,proto3" json:"token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateServiceAccountResponse) Reset() { + *x = CreateServiceAccountResponse{} + mi := &file_service_accounts_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateServiceAccountResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateServiceAccountResponse) ProtoMessage() {} + +func (x *CreateServiceAccountResponse) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateServiceAccountResponse.ProtoReflect.Descriptor instead. +func (*CreateServiceAccountResponse) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{2} +} + +func (x *CreateServiceAccountResponse) GetServiceAccount() *ServiceAccount { + if x != nil { + return x.ServiceAccount + } + return nil +} + +func (x *CreateServiceAccountResponse) GetToken() string { + if x != nil { + return x.Token + } + return "" +} + +type ListServiceAccountsRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListServiceAccountsRequest) Reset() { + *x = ListServiceAccountsRequest{} + mi := &file_service_accounts_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListServiceAccountsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListServiceAccountsRequest) ProtoMessage() {} + +func (x *ListServiceAccountsRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListServiceAccountsRequest.ProtoReflect.Descriptor instead. +func (*ListServiceAccountsRequest) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{3} +} + +type ListServiceAccountsResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + ServiceAccounts []*ServiceAccount `protobuf:"bytes,1,rep,name=service_accounts,json=serviceAccounts,proto3" json:"service_accounts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListServiceAccountsResponse) Reset() { + *x = ListServiceAccountsResponse{} + mi := &file_service_accounts_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListServiceAccountsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListServiceAccountsResponse) ProtoMessage() {} + +func (x *ListServiceAccountsResponse) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListServiceAccountsResponse.ProtoReflect.Descriptor instead. +func (*ListServiceAccountsResponse) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{4} +} + +func (x *ListServiceAccountsResponse) GetServiceAccounts() []*ServiceAccount { + if x != nil { + return x.ServiceAccounts + } + return nil +} + +type DescribeServiceAccountRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DescribeServiceAccountRequest) Reset() { + *x = DescribeServiceAccountRequest{} + mi := &file_service_accounts_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DescribeServiceAccountRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DescribeServiceAccountRequest) ProtoMessage() {} + +func (x *DescribeServiceAccountRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DescribeServiceAccountRequest.ProtoReflect.Descriptor instead. +func (*DescribeServiceAccountRequest) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{5} +} + +func (x *DescribeServiceAccountRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +type DescribeServiceAccountResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + ServiceAccount *ServiceAccount `protobuf:"bytes,1,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DescribeServiceAccountResponse) Reset() { + *x = DescribeServiceAccountResponse{} + mi := &file_service_accounts_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DescribeServiceAccountResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DescribeServiceAccountResponse) ProtoMessage() {} + +func (x *DescribeServiceAccountResponse) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DescribeServiceAccountResponse.ProtoReflect.Descriptor instead. +func (*DescribeServiceAccountResponse) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{6} +} + +func (x *DescribeServiceAccountResponse) GetServiceAccount() *ServiceAccount { + if x != nil { + return x.ServiceAccount + } + return nil +} + +type UpdateServiceAccountRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *UpdateServiceAccountRequest) Reset() { + *x = UpdateServiceAccountRequest{} + mi := &file_service_accounts_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UpdateServiceAccountRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateServiceAccountRequest) ProtoMessage() {} + +func (x *UpdateServiceAccountRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateServiceAccountRequest.ProtoReflect.Descriptor instead. +func (*UpdateServiceAccountRequest) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{7} +} + +func (x *UpdateServiceAccountRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *UpdateServiceAccountRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *UpdateServiceAccountRequest) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +type UpdateServiceAccountResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + ServiceAccount *ServiceAccount `protobuf:"bytes,1,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *UpdateServiceAccountResponse) Reset() { + *x = UpdateServiceAccountResponse{} + mi := &file_service_accounts_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UpdateServiceAccountResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateServiceAccountResponse) ProtoMessage() {} + +func (x *UpdateServiceAccountResponse) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateServiceAccountResponse.ProtoReflect.Descriptor instead. +func (*UpdateServiceAccountResponse) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{8} +} + +func (x *UpdateServiceAccountResponse) GetServiceAccount() *ServiceAccount { + if x != nil { + return x.ServiceAccount + } + return nil +} + +type DeleteServiceAccountRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DeleteServiceAccountRequest) Reset() { + *x = DeleteServiceAccountRequest{} + mi := &file_service_accounts_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteServiceAccountRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteServiceAccountRequest) ProtoMessage() {} + +func (x *DeleteServiceAccountRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteServiceAccountRequest.ProtoReflect.Descriptor instead. +func (*DeleteServiceAccountRequest) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{9} +} + +func (x *DeleteServiceAccountRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +type DeleteServiceAccountResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DeleteServiceAccountResponse) Reset() { + *x = DeleteServiceAccountResponse{} + mi := &file_service_accounts_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteServiceAccountResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteServiceAccountResponse) ProtoMessage() {} + +func (x *DeleteServiceAccountResponse) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteServiceAccountResponse.ProtoReflect.Descriptor instead. +func (*DeleteServiceAccountResponse) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{10} +} + +type RegenerateServiceAccountTokenRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *RegenerateServiceAccountTokenRequest) Reset() { + *x = RegenerateServiceAccountTokenRequest{} + mi := &file_service_accounts_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RegenerateServiceAccountTokenRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RegenerateServiceAccountTokenRequest) ProtoMessage() {} + +func (x *RegenerateServiceAccountTokenRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RegenerateServiceAccountTokenRequest.ProtoReflect.Descriptor instead. +func (*RegenerateServiceAccountTokenRequest) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{11} +} + +func (x *RegenerateServiceAccountTokenRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +type RegenerateServiceAccountTokenResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Token string `protobuf:"bytes,1,opt,name=token,proto3" json:"token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *RegenerateServiceAccountTokenResponse) Reset() { + *x = RegenerateServiceAccountTokenResponse{} + mi := &file_service_accounts_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RegenerateServiceAccountTokenResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RegenerateServiceAccountTokenResponse) ProtoMessage() {} + +func (x *RegenerateServiceAccountTokenResponse) ProtoReflect() protoreflect.Message { + mi := &file_service_accounts_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RegenerateServiceAccountTokenResponse.ProtoReflect.Descriptor instead. +func (*RegenerateServiceAccountTokenResponse) Descriptor() ([]byte, []int) { + return file_service_accounts_proto_rawDescGZIP(), []int{12} +} + +func (x *RegenerateServiceAccountTokenResponse) GetToken() string { + if x != nil { + return x.Token + } + return "" +} + +var File_service_accounts_proto protoreflect.FileDescriptor + +const file_service_accounts_proto_rawDesc = "" + + "\n" + + "\x16service_accounts.proto\x12\x1aSuperplane.ServiceAccounts\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\xb1\x02\n" + + "\x0eServiceAccount\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x12'\n" + + "\x0forganization_id\x18\x04 \x01(\tR\x0eorganizationId\x12\x1d\n" + + "\n" + + "created_by\x18\x05 \x01(\tR\tcreatedBy\x12\x1b\n" + + "\thas_token\x18\x06 \x01(\bR\bhasToken\x129\n" + + "\n" + + "created_at\x18\a \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x129\n" + + "\n" + + "updated_at\x18\b \x01(\v2\x1a.google.protobuf.TimestampR\tupdatedAt\"g\n" + + "\x1bCreateServiceAccountRequest\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x02 \x01(\tR\vdescription\x12\x12\n" + + "\x04role\x18\x03 \x01(\tR\x04role\"\x89\x01\n" + + "\x1cCreateServiceAccountResponse\x12S\n" + + "\x0fservice_account\x18\x01 \x01(\v2*.Superplane.ServiceAccounts.ServiceAccountR\x0eserviceAccount\x12\x14\n" + + "\x05token\x18\x02 \x01(\tR\x05token\"\x1c\n" + + "\x1aListServiceAccountsRequest\"t\n" + + "\x1bListServiceAccountsResponse\x12U\n" + + "\x10service_accounts\x18\x01 \x03(\v2*.Superplane.ServiceAccounts.ServiceAccountR\x0fserviceAccounts\"/\n" + + "\x1dDescribeServiceAccountRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"u\n" + + "\x1eDescribeServiceAccountResponse\x12S\n" + + "\x0fservice_account\x18\x01 \x01(\v2*.Superplane.ServiceAccounts.ServiceAccountR\x0eserviceAccount\"c\n" + + "\x1bUpdateServiceAccountRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\"s\n" + + "\x1cUpdateServiceAccountResponse\x12S\n" + + "\x0fservice_account\x18\x01 \x01(\v2*.Superplane.ServiceAccounts.ServiceAccountR\x0eserviceAccount\"-\n" + + "\x1bDeleteServiceAccountRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\x1e\n" + + "\x1cDeleteServiceAccountResponse\"6\n" + + "$RegenerateServiceAccountTokenRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"=\n" + + "%RegenerateServiceAccountTokenResponse\x12\x14\n" + + "\x05token\x18\x01 \x01(\tR\x05token2\xba\r\n" + + "\x0fServiceAccounts\x12\x90\x02\n" + + "\x14CreateServiceAccount\x127.Superplane.ServiceAccounts.CreateServiceAccountRequest\x1a8.Superplane.ServiceAccounts.CreateServiceAccountResponse\"\x84\x01\x92A^\n" + + "\x0fServiceAccounts\x12\x18Create a service account\x1a1Creates a new service account in the organization\x82\xd3\xe4\x93\x02\x1d:\x01*\"\x18/api/v1/service-accounts\x12\x85\x02\n" + + "\x13ListServiceAccounts\x126.Superplane.ServiceAccounts.ListServiceAccountsRequest\x1a7.Superplane.ServiceAccounts.ListServiceAccountsResponse\"}\x92AZ\n" + + "\x0fServiceAccounts\x12\x15List service accounts\x1a0Returns all service accounts in the organization\x82\xd3\xe4\x93\x02\x1a\x12\x18/api/v1/service-accounts\x12\x96\x02\n" + + "\x16DescribeServiceAccount\x129.Superplane.ServiceAccounts.DescribeServiceAccountRequest\x1a:.Superplane.ServiceAccounts.DescribeServiceAccountResponse\"\x84\x01\x92A\\\n" + + "\x0fServiceAccounts\x12\x1aDescribe a service account\x1a-Returns details of a specific service account\x82\xd3\xe4\x93\x02\x1f\x12\x1d/api/v1/service-accounts/{id}\x12\x98\x02\n" + + "\x14UpdateServiceAccount\x127.Superplane.ServiceAccounts.UpdateServiceAccountRequest\x1a8.Superplane.ServiceAccounts.UpdateServiceAccountResponse\"\x8c\x01\x92Aa\n" + + "\x0fServiceAccounts\x12\x18Update a service account\x1a4Updates the name or description of a service account\x82\xd3\xe4\x93\x02\":\x01*2\x1d/api/v1/service-accounts/{id}\x12\x98\x02\n" + + "\x14DeleteServiceAccount\x127.Superplane.ServiceAccounts.DeleteServiceAccountRequest\x1a8.Superplane.ServiceAccounts.DeleteServiceAccountResponse\"\x8c\x01\x92Ad\n" + + "\x0fServiceAccounts\x12\x18Delete a service account\x1a7Deletes a service account and removes its RBAC policies\x82\xd3\xe4\x93\x02\x1f*\x1d/api/v1/service-accounts/{id}\x12\xbc\x02\n" + + "\x1dRegenerateServiceAccountToken\x12@.Superplane.ServiceAccounts.RegenerateServiceAccountTokenRequest\x1aA.Superplane.ServiceAccounts.RegenerateServiceAccountTokenResponse\"\x95\x01\x92Ad\n" + + "\x0fServiceAccounts\x12 Regenerate service account token\x1a/Regenerates the API token for a service account\x82\xd3\xe4\x93\x02(:\x01*\"#/api/v1/service-accounts/{id}/tokenB\xe0\x01\x92A\x9c\x01\x12r\n" + + "\x1fSuperplane Service Accounts API\x12#API for Superplane Service Accounts\"%\n" + + "\vAPI Support\x1a\x16support@superplane.com2\x031.0*\x02\x01\x022\x10application/json:\x10application/jsonZ>github.com/superplanehq/superplane/pkg/protos/service_accountsb\x06proto3" + +var ( + file_service_accounts_proto_rawDescOnce sync.Once + file_service_accounts_proto_rawDescData []byte +) + +func file_service_accounts_proto_rawDescGZIP() []byte { + file_service_accounts_proto_rawDescOnce.Do(func() { + file_service_accounts_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_service_accounts_proto_rawDesc), len(file_service_accounts_proto_rawDesc))) + }) + return file_service_accounts_proto_rawDescData +} + +var file_service_accounts_proto_msgTypes = make([]protoimpl.MessageInfo, 13) +var file_service_accounts_proto_goTypes = []any{ + (*ServiceAccount)(nil), // 0: Superplane.ServiceAccounts.ServiceAccount + (*CreateServiceAccountRequest)(nil), // 1: Superplane.ServiceAccounts.CreateServiceAccountRequest + (*CreateServiceAccountResponse)(nil), // 2: Superplane.ServiceAccounts.CreateServiceAccountResponse + (*ListServiceAccountsRequest)(nil), // 3: Superplane.ServiceAccounts.ListServiceAccountsRequest + (*ListServiceAccountsResponse)(nil), // 4: Superplane.ServiceAccounts.ListServiceAccountsResponse + (*DescribeServiceAccountRequest)(nil), // 5: Superplane.ServiceAccounts.DescribeServiceAccountRequest + (*DescribeServiceAccountResponse)(nil), // 6: Superplane.ServiceAccounts.DescribeServiceAccountResponse + (*UpdateServiceAccountRequest)(nil), // 7: Superplane.ServiceAccounts.UpdateServiceAccountRequest + (*UpdateServiceAccountResponse)(nil), // 8: Superplane.ServiceAccounts.UpdateServiceAccountResponse + (*DeleteServiceAccountRequest)(nil), // 9: Superplane.ServiceAccounts.DeleteServiceAccountRequest + (*DeleteServiceAccountResponse)(nil), // 10: Superplane.ServiceAccounts.DeleteServiceAccountResponse + (*RegenerateServiceAccountTokenRequest)(nil), // 11: Superplane.ServiceAccounts.RegenerateServiceAccountTokenRequest + (*RegenerateServiceAccountTokenResponse)(nil), // 12: Superplane.ServiceAccounts.RegenerateServiceAccountTokenResponse + (*timestamp.Timestamp)(nil), // 13: google.protobuf.Timestamp +} +var file_service_accounts_proto_depIdxs = []int32{ + 13, // 0: Superplane.ServiceAccounts.ServiceAccount.created_at:type_name -> google.protobuf.Timestamp + 13, // 1: Superplane.ServiceAccounts.ServiceAccount.updated_at:type_name -> google.protobuf.Timestamp + 0, // 2: Superplane.ServiceAccounts.CreateServiceAccountResponse.service_account:type_name -> Superplane.ServiceAccounts.ServiceAccount + 0, // 3: Superplane.ServiceAccounts.ListServiceAccountsResponse.service_accounts:type_name -> Superplane.ServiceAccounts.ServiceAccount + 0, // 4: Superplane.ServiceAccounts.DescribeServiceAccountResponse.service_account:type_name -> Superplane.ServiceAccounts.ServiceAccount + 0, // 5: Superplane.ServiceAccounts.UpdateServiceAccountResponse.service_account:type_name -> Superplane.ServiceAccounts.ServiceAccount + 1, // 6: Superplane.ServiceAccounts.ServiceAccounts.CreateServiceAccount:input_type -> Superplane.ServiceAccounts.CreateServiceAccountRequest + 3, // 7: Superplane.ServiceAccounts.ServiceAccounts.ListServiceAccounts:input_type -> Superplane.ServiceAccounts.ListServiceAccountsRequest + 5, // 8: Superplane.ServiceAccounts.ServiceAccounts.DescribeServiceAccount:input_type -> Superplane.ServiceAccounts.DescribeServiceAccountRequest + 7, // 9: Superplane.ServiceAccounts.ServiceAccounts.UpdateServiceAccount:input_type -> Superplane.ServiceAccounts.UpdateServiceAccountRequest + 9, // 10: Superplane.ServiceAccounts.ServiceAccounts.DeleteServiceAccount:input_type -> Superplane.ServiceAccounts.DeleteServiceAccountRequest + 11, // 11: Superplane.ServiceAccounts.ServiceAccounts.RegenerateServiceAccountToken:input_type -> Superplane.ServiceAccounts.RegenerateServiceAccountTokenRequest + 2, // 12: Superplane.ServiceAccounts.ServiceAccounts.CreateServiceAccount:output_type -> Superplane.ServiceAccounts.CreateServiceAccountResponse + 4, // 13: Superplane.ServiceAccounts.ServiceAccounts.ListServiceAccounts:output_type -> Superplane.ServiceAccounts.ListServiceAccountsResponse + 6, // 14: Superplane.ServiceAccounts.ServiceAccounts.DescribeServiceAccount:output_type -> Superplane.ServiceAccounts.DescribeServiceAccountResponse + 8, // 15: Superplane.ServiceAccounts.ServiceAccounts.UpdateServiceAccount:output_type -> Superplane.ServiceAccounts.UpdateServiceAccountResponse + 10, // 16: Superplane.ServiceAccounts.ServiceAccounts.DeleteServiceAccount:output_type -> Superplane.ServiceAccounts.DeleteServiceAccountResponse + 12, // 17: Superplane.ServiceAccounts.ServiceAccounts.RegenerateServiceAccountToken:output_type -> Superplane.ServiceAccounts.RegenerateServiceAccountTokenResponse + 12, // [12:18] is the sub-list for method output_type + 6, // [6:12] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name +} + +func init() { file_service_accounts_proto_init() } +func file_service_accounts_proto_init() { + if File_service_accounts_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_service_accounts_proto_rawDesc), len(file_service_accounts_proto_rawDesc)), + NumEnums: 0, + NumMessages: 13, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_service_accounts_proto_goTypes, + DependencyIndexes: file_service_accounts_proto_depIdxs, + MessageInfos: file_service_accounts_proto_msgTypes, + }.Build() + File_service_accounts_proto = out.File + file_service_accounts_proto_goTypes = nil + file_service_accounts_proto_depIdxs = nil +} diff --git a/pkg/protos/service_accounts/service_accounts.pb.gw.go b/pkg/protos/service_accounts/service_accounts.pb.gw.go new file mode 100644 index 0000000000..7f3b758f52 --- /dev/null +++ b/pkg/protos/service_accounts/service_accounts.pb.gw.go @@ -0,0 +1,526 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: service_accounts.proto + +/* +Package service_accounts is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package service_accounts + +import ( + "context" + "errors" + "io" + "net/http" + + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" +) + +// Suppress "imported and not used" errors +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) + +func request_ServiceAccounts_CreateServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceAccountsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateServiceAccountRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.CreateServiceAccount(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ServiceAccounts_CreateServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceAccountsServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateServiceAccountRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.CreateServiceAccount(ctx, &protoReq) + return msg, metadata, err +} + +func request_ServiceAccounts_ListServiceAccounts_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceAccountsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListServiceAccountsRequest + metadata runtime.ServerMetadata + ) + io.Copy(io.Discard, req.Body) + msg, err := client.ListServiceAccounts(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ServiceAccounts_ListServiceAccounts_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceAccountsServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListServiceAccountsRequest + metadata runtime.ServerMetadata + ) + msg, err := server.ListServiceAccounts(ctx, &protoReq) + return msg, metadata, err +} + +func request_ServiceAccounts_DescribeServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceAccountsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq DescribeServiceAccountRequest + metadata runtime.ServerMetadata + err error + ) + io.Copy(io.Discard, req.Body) + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := client.DescribeServiceAccount(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ServiceAccounts_DescribeServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceAccountsServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq DescribeServiceAccountRequest + metadata runtime.ServerMetadata + err error + ) + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := server.DescribeServiceAccount(ctx, &protoReq) + return msg, metadata, err +} + +func request_ServiceAccounts_UpdateServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceAccountsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq UpdateServiceAccountRequest + metadata runtime.ServerMetadata + err error + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := client.UpdateServiceAccount(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ServiceAccounts_UpdateServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceAccountsServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq UpdateServiceAccountRequest + metadata runtime.ServerMetadata + err error + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := server.UpdateServiceAccount(ctx, &protoReq) + return msg, metadata, err +} + +func request_ServiceAccounts_DeleteServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceAccountsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq DeleteServiceAccountRequest + metadata runtime.ServerMetadata + err error + ) + io.Copy(io.Discard, req.Body) + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := client.DeleteServiceAccount(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ServiceAccounts_DeleteServiceAccount_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceAccountsServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq DeleteServiceAccountRequest + metadata runtime.ServerMetadata + err error + ) + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := server.DeleteServiceAccount(ctx, &protoReq) + return msg, metadata, err +} + +func request_ServiceAccounts_RegenerateServiceAccountToken_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceAccountsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq RegenerateServiceAccountTokenRequest + metadata runtime.ServerMetadata + err error + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := client.RegenerateServiceAccountToken(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ServiceAccounts_RegenerateServiceAccountToken_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceAccountsServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq RegenerateServiceAccountTokenRequest + metadata runtime.ServerMetadata + err error + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + protoReq.Id, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + msg, err := server.RegenerateServiceAccountToken(ctx, &protoReq) + return msg, metadata, err +} + +// RegisterServiceAccountsHandlerServer registers the http handlers for service ServiceAccounts to "mux". +// UnaryRPC :call ServiceAccountsServer directly. +// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. +// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterServiceAccountsHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. +func RegisterServiceAccountsHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ServiceAccountsServer) error { + mux.Handle(http.MethodPost, pattern_ServiceAccounts_CreateServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/CreateServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ServiceAccounts_CreateServiceAccount_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_CreateServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ServiceAccounts_ListServiceAccounts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/ListServiceAccounts", runtime.WithHTTPPathPattern("/api/v1/service-accounts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ServiceAccounts_ListServiceAccounts_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_ListServiceAccounts_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ServiceAccounts_DescribeServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/DescribeServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ServiceAccounts_DescribeServiceAccount_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_DescribeServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPatch, pattern_ServiceAccounts_UpdateServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/UpdateServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ServiceAccounts_UpdateServiceAccount_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_UpdateServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodDelete, pattern_ServiceAccounts_DeleteServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/DeleteServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ServiceAccounts_DeleteServiceAccount_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_DeleteServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ServiceAccounts_RegenerateServiceAccountToken_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/RegenerateServiceAccountToken", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}/token")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ServiceAccounts_RegenerateServiceAccountToken_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_RegenerateServiceAccountToken_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + + return nil +} + +// RegisterServiceAccountsHandlerFromEndpoint is same as RegisterServiceAccountsHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterServiceAccountsHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.NewClient(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + return RegisterServiceAccountsHandler(ctx, mux, conn) +} + +// RegisterServiceAccountsHandler registers the http handlers for service ServiceAccounts to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterServiceAccountsHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterServiceAccountsHandlerClient(ctx, mux, NewServiceAccountsClient(conn)) +} + +// RegisterServiceAccountsHandlerClient registers the http handlers for service ServiceAccounts +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ServiceAccountsClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ServiceAccountsClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "ServiceAccountsClient" to call the correct interceptors. This client ignores the HTTP middlewares. +func RegisterServiceAccountsHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ServiceAccountsClient) error { + mux.Handle(http.MethodPost, pattern_ServiceAccounts_CreateServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/CreateServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ServiceAccounts_CreateServiceAccount_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_CreateServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ServiceAccounts_ListServiceAccounts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/ListServiceAccounts", runtime.WithHTTPPathPattern("/api/v1/service-accounts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ServiceAccounts_ListServiceAccounts_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_ListServiceAccounts_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ServiceAccounts_DescribeServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/DescribeServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ServiceAccounts_DescribeServiceAccount_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_DescribeServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPatch, pattern_ServiceAccounts_UpdateServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/UpdateServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ServiceAccounts_UpdateServiceAccount_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_UpdateServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodDelete, pattern_ServiceAccounts_DeleteServiceAccount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/DeleteServiceAccount", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ServiceAccounts_DeleteServiceAccount_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_DeleteServiceAccount_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ServiceAccounts_RegenerateServiceAccountToken_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/Superplane.ServiceAccounts.ServiceAccounts/RegenerateServiceAccountToken", runtime.WithHTTPPathPattern("/api/v1/service-accounts/{id}/token")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ServiceAccounts_RegenerateServiceAccountToken_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ServiceAccounts_RegenerateServiceAccountToken_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + return nil +} + +var ( + pattern_ServiceAccounts_CreateServiceAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "service-accounts"}, "")) + pattern_ServiceAccounts_ListServiceAccounts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "service-accounts"}, "")) + pattern_ServiceAccounts_DescribeServiceAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "service-accounts", "id"}, "")) + pattern_ServiceAccounts_UpdateServiceAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "service-accounts", "id"}, "")) + pattern_ServiceAccounts_DeleteServiceAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "service-accounts", "id"}, "")) + pattern_ServiceAccounts_RegenerateServiceAccountToken_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"api", "v1", "service-accounts", "id", "token"}, "")) +) + +var ( + forward_ServiceAccounts_CreateServiceAccount_0 = runtime.ForwardResponseMessage + forward_ServiceAccounts_ListServiceAccounts_0 = runtime.ForwardResponseMessage + forward_ServiceAccounts_DescribeServiceAccount_0 = runtime.ForwardResponseMessage + forward_ServiceAccounts_UpdateServiceAccount_0 = runtime.ForwardResponseMessage + forward_ServiceAccounts_DeleteServiceAccount_0 = runtime.ForwardResponseMessage + forward_ServiceAccounts_RegenerateServiceAccountToken_0 = runtime.ForwardResponseMessage +) diff --git a/pkg/protos/service_accounts/service_accounts_grpc.pb.go b/pkg/protos/service_accounts/service_accounts_grpc.pb.go new file mode 100644 index 0000000000..f4e3564ec5 --- /dev/null +++ b/pkg/protos/service_accounts/service_accounts_grpc.pb.go @@ -0,0 +1,309 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.6.0 +// - protoc v3.15.8 +// source: service_accounts.proto + +package service_accounts + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ServiceAccounts_CreateServiceAccount_FullMethodName = "/Superplane.ServiceAccounts.ServiceAccounts/CreateServiceAccount" + ServiceAccounts_ListServiceAccounts_FullMethodName = "/Superplane.ServiceAccounts.ServiceAccounts/ListServiceAccounts" + ServiceAccounts_DescribeServiceAccount_FullMethodName = "/Superplane.ServiceAccounts.ServiceAccounts/DescribeServiceAccount" + ServiceAccounts_UpdateServiceAccount_FullMethodName = "/Superplane.ServiceAccounts.ServiceAccounts/UpdateServiceAccount" + ServiceAccounts_DeleteServiceAccount_FullMethodName = "/Superplane.ServiceAccounts.ServiceAccounts/DeleteServiceAccount" + ServiceAccounts_RegenerateServiceAccountToken_FullMethodName = "/Superplane.ServiceAccounts.ServiceAccounts/RegenerateServiceAccountToken" +) + +// ServiceAccountsClient is the client API for ServiceAccounts service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ServiceAccountsClient interface { + CreateServiceAccount(ctx context.Context, in *CreateServiceAccountRequest, opts ...grpc.CallOption) (*CreateServiceAccountResponse, error) + ListServiceAccounts(ctx context.Context, in *ListServiceAccountsRequest, opts ...grpc.CallOption) (*ListServiceAccountsResponse, error) + DescribeServiceAccount(ctx context.Context, in *DescribeServiceAccountRequest, opts ...grpc.CallOption) (*DescribeServiceAccountResponse, error) + UpdateServiceAccount(ctx context.Context, in *UpdateServiceAccountRequest, opts ...grpc.CallOption) (*UpdateServiceAccountResponse, error) + DeleteServiceAccount(ctx context.Context, in *DeleteServiceAccountRequest, opts ...grpc.CallOption) (*DeleteServiceAccountResponse, error) + RegenerateServiceAccountToken(ctx context.Context, in *RegenerateServiceAccountTokenRequest, opts ...grpc.CallOption) (*RegenerateServiceAccountTokenResponse, error) +} + +type serviceAccountsClient struct { + cc grpc.ClientConnInterface +} + +func NewServiceAccountsClient(cc grpc.ClientConnInterface) ServiceAccountsClient { + return &serviceAccountsClient{cc} +} + +func (c *serviceAccountsClient) CreateServiceAccount(ctx context.Context, in *CreateServiceAccountRequest, opts ...grpc.CallOption) (*CreateServiceAccountResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(CreateServiceAccountResponse) + err := c.cc.Invoke(ctx, ServiceAccounts_CreateServiceAccount_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceAccountsClient) ListServiceAccounts(ctx context.Context, in *ListServiceAccountsRequest, opts ...grpc.CallOption) (*ListServiceAccountsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListServiceAccountsResponse) + err := c.cc.Invoke(ctx, ServiceAccounts_ListServiceAccounts_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceAccountsClient) DescribeServiceAccount(ctx context.Context, in *DescribeServiceAccountRequest, opts ...grpc.CallOption) (*DescribeServiceAccountResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DescribeServiceAccountResponse) + err := c.cc.Invoke(ctx, ServiceAccounts_DescribeServiceAccount_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceAccountsClient) UpdateServiceAccount(ctx context.Context, in *UpdateServiceAccountRequest, opts ...grpc.CallOption) (*UpdateServiceAccountResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(UpdateServiceAccountResponse) + err := c.cc.Invoke(ctx, ServiceAccounts_UpdateServiceAccount_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceAccountsClient) DeleteServiceAccount(ctx context.Context, in *DeleteServiceAccountRequest, opts ...grpc.CallOption) (*DeleteServiceAccountResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DeleteServiceAccountResponse) + err := c.cc.Invoke(ctx, ServiceAccounts_DeleteServiceAccount_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceAccountsClient) RegenerateServiceAccountToken(ctx context.Context, in *RegenerateServiceAccountTokenRequest, opts ...grpc.CallOption) (*RegenerateServiceAccountTokenResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RegenerateServiceAccountTokenResponse) + err := c.cc.Invoke(ctx, ServiceAccounts_RegenerateServiceAccountToken_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ServiceAccountsServer is the server API for ServiceAccounts service. +// All implementations should embed UnimplementedServiceAccountsServer +// for forward compatibility. +type ServiceAccountsServer interface { + CreateServiceAccount(context.Context, *CreateServiceAccountRequest) (*CreateServiceAccountResponse, error) + ListServiceAccounts(context.Context, *ListServiceAccountsRequest) (*ListServiceAccountsResponse, error) + DescribeServiceAccount(context.Context, *DescribeServiceAccountRequest) (*DescribeServiceAccountResponse, error) + UpdateServiceAccount(context.Context, *UpdateServiceAccountRequest) (*UpdateServiceAccountResponse, error) + DeleteServiceAccount(context.Context, *DeleteServiceAccountRequest) (*DeleteServiceAccountResponse, error) + RegenerateServiceAccountToken(context.Context, *RegenerateServiceAccountTokenRequest) (*RegenerateServiceAccountTokenResponse, error) +} + +// UnimplementedServiceAccountsServer should be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedServiceAccountsServer struct{} + +func (UnimplementedServiceAccountsServer) CreateServiceAccount(context.Context, *CreateServiceAccountRequest) (*CreateServiceAccountResponse, error) { + return nil, status.Error(codes.Unimplemented, "method CreateServiceAccount not implemented") +} +func (UnimplementedServiceAccountsServer) ListServiceAccounts(context.Context, *ListServiceAccountsRequest) (*ListServiceAccountsResponse, error) { + return nil, status.Error(codes.Unimplemented, "method ListServiceAccounts not implemented") +} +func (UnimplementedServiceAccountsServer) DescribeServiceAccount(context.Context, *DescribeServiceAccountRequest) (*DescribeServiceAccountResponse, error) { + return nil, status.Error(codes.Unimplemented, "method DescribeServiceAccount not implemented") +} +func (UnimplementedServiceAccountsServer) UpdateServiceAccount(context.Context, *UpdateServiceAccountRequest) (*UpdateServiceAccountResponse, error) { + return nil, status.Error(codes.Unimplemented, "method UpdateServiceAccount not implemented") +} +func (UnimplementedServiceAccountsServer) DeleteServiceAccount(context.Context, *DeleteServiceAccountRequest) (*DeleteServiceAccountResponse, error) { + return nil, status.Error(codes.Unimplemented, "method DeleteServiceAccount not implemented") +} +func (UnimplementedServiceAccountsServer) RegenerateServiceAccountToken(context.Context, *RegenerateServiceAccountTokenRequest) (*RegenerateServiceAccountTokenResponse, error) { + return nil, status.Error(codes.Unimplemented, "method RegenerateServiceAccountToken not implemented") +} +func (UnimplementedServiceAccountsServer) testEmbeddedByValue() {} + +// UnsafeServiceAccountsServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ServiceAccountsServer will +// result in compilation errors. +type UnsafeServiceAccountsServer interface { + mustEmbedUnimplementedServiceAccountsServer() +} + +func RegisterServiceAccountsServer(s grpc.ServiceRegistrar, srv ServiceAccountsServer) { + // If the following call panics, it indicates UnimplementedServiceAccountsServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ServiceAccounts_ServiceDesc, srv) +} + +func _ServiceAccounts_CreateServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceAccountsServer).CreateServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ServiceAccounts_CreateServiceAccount_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceAccountsServer).CreateServiceAccount(ctx, req.(*CreateServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceAccounts_ListServiceAccounts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServiceAccountsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceAccountsServer).ListServiceAccounts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ServiceAccounts_ListServiceAccounts_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceAccountsServer).ListServiceAccounts(ctx, req.(*ListServiceAccountsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceAccounts_DescribeServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DescribeServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceAccountsServer).DescribeServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ServiceAccounts_DescribeServiceAccount_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceAccountsServer).DescribeServiceAccount(ctx, req.(*DescribeServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceAccounts_UpdateServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceAccountsServer).UpdateServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ServiceAccounts_UpdateServiceAccount_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceAccountsServer).UpdateServiceAccount(ctx, req.(*UpdateServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceAccounts_DeleteServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceAccountsServer).DeleteServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ServiceAccounts_DeleteServiceAccount_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceAccountsServer).DeleteServiceAccount(ctx, req.(*DeleteServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceAccounts_RegenerateServiceAccountToken_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RegenerateServiceAccountTokenRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceAccountsServer).RegenerateServiceAccountToken(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ServiceAccounts_RegenerateServiceAccountToken_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceAccountsServer).RegenerateServiceAccountToken(ctx, req.(*RegenerateServiceAccountTokenRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ServiceAccounts_ServiceDesc is the grpc.ServiceDesc for ServiceAccounts service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ServiceAccounts_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "Superplane.ServiceAccounts.ServiceAccounts", + HandlerType: (*ServiceAccountsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateServiceAccount", + Handler: _ServiceAccounts_CreateServiceAccount_Handler, + }, + { + MethodName: "ListServiceAccounts", + Handler: _ServiceAccounts_ListServiceAccounts_Handler, + }, + { + MethodName: "DescribeServiceAccount", + Handler: _ServiceAccounts_DescribeServiceAccount_Handler, + }, + { + MethodName: "UpdateServiceAccount", + Handler: _ServiceAccounts_UpdateServiceAccount_Handler, + }, + { + MethodName: "DeleteServiceAccount", + Handler: _ServiceAccounts_DeleteServiceAccount_Handler, + }, + { + MethodName: "RegenerateServiceAccountToken", + Handler: _ServiceAccounts_RegenerateServiceAccountToken_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "service_accounts.proto", +} diff --git a/pkg/protos/users/users.pb.go b/pkg/protos/users/users.pb.go index b88e1981f0..d10be824cf 100644 --- a/pkg/protos/users/users.pb.go +++ b/pkg/protos/users/users.pb.go @@ -283,11 +283,12 @@ func (x *ListUserRolesResponse) GetRoles() []*roles.Role { } type ListUsersRequest struct { - state protoimpl.MessageState `protogen:"open.v1"` - DomainType authorization.DomainType `protobuf:"varint,1,opt,name=domain_type,json=domainType,proto3,enum=Superplane.Authorization.DomainType" json:"domain_type,omitempty"` - DomainId string `protobuf:"bytes,2,opt,name=domain_id,json=domainId,proto3" json:"domain_id,omitempty"` - unknownFields protoimpl.UnknownFields - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + DomainType authorization.DomainType `protobuf:"varint,1,opt,name=domain_type,json=domainType,proto3,enum=Superplane.Authorization.DomainType" json:"domain_type,omitempty"` + DomainId string `protobuf:"bytes,2,opt,name=domain_id,json=domainId,proto3" json:"domain_id,omitempty"` + IncludeServiceAccounts bool `protobuf:"varint,3,opt,name=include_service_accounts,json=includeServiceAccounts,proto3" json:"include_service_accounts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListUsersRequest) Reset() { @@ -334,6 +335,13 @@ func (x *ListUsersRequest) GetDomainId() string { return "" } +func (x *ListUsersRequest) GetIncludeServiceAccounts() bool { + if x != nil { + return x.IncludeServiceAccounts + } + return false +} + type ListUsersResponse struct { state protoimpl.MessageState `protogen:"open.v1"` Users []*User `protobuf:"bytes,1,rep,name=users,proto3" json:"users,omitempty"` @@ -804,11 +812,12 @@ const file_users_proto_rawDesc = "" + "\vdomain_type\x18\x02 \x01(\x0e2$.Superplane.Authorization.DomainTypeR\n" + "domainType\x12\x1b\n" + "\tdomain_id\x18\x03 \x01(\tR\bdomainId\x12,\n" + - "\x05roles\x18\x04 \x03(\v2\x16.Superplane.Roles.RoleR\x05roles\"v\n" + + "\x05roles\x18\x04 \x03(\v2\x16.Superplane.Roles.RoleR\x05roles\"\xb0\x01\n" + "\x10ListUsersRequest\x12E\n" + "\vdomain_type\x18\x01 \x01(\x0e2$.Superplane.Authorization.DomainTypeR\n" + "domainType\x12\x1b\n" + - "\tdomain_id\x18\x02 \x01(\tR\bdomainId\"A\n" + + "\tdomain_id\x18\x02 \x01(\tR\bdomainId\x128\n" + + "\x18include_service_accounts\x18\x03 \x01(\bR\x16includeServiceAccounts\"A\n" + "\x11ListUsersResponse\x12,\n" + "\x05users\x18\x01 \x03(\v2\x16.Superplane.Users.UserR\x05users\"\xaa\x04\n" + "\x04User\x12;\n" + diff --git a/pkg/public/server.go b/pkg/public/server.go index 663fbc1262..f1bf8a1c6c 100644 --- a/pkg/public/server.go +++ b/pkg/public/server.go @@ -43,6 +43,7 @@ import ( pbOrg "github.com/superplanehq/superplane/pkg/protos/organizations" pbRoles "github.com/superplanehq/superplane/pkg/protos/roles" pbSecret "github.com/superplanehq/superplane/pkg/protos/secrets" + pbServiceAccounts "github.com/superplanehq/superplane/pkg/protos/service_accounts" pbTriggers "github.com/superplanehq/superplane/pkg/protos/triggers" pbUsers "github.com/superplanehq/superplane/pkg/protos/users" pbWidgets "github.com/superplanehq/superplane/pkg/protos/widgets" @@ -233,6 +234,11 @@ func (s *Server) RegisterGRPCGateway(grpcServerAddr string) error { return err } + err = pbServiceAccounts.RegisterServiceAccountsHandlerFromEndpoint(ctx, grpcGatewayMux, grpcServerAddr, opts) + if err != nil { + return err + } + // Public health check s.Router.HandleFunc("/api/v1/canvases/is-alive", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) @@ -258,6 +264,7 @@ func (s *Server) RegisterGRPCGateway(grpcServerAddr string) error { s.Router.PathPrefix("/api/v1/triggers").Handler(protectedGRPCHandler) s.Router.PathPrefix("/api/v1/widgets").Handler(protectedGRPCHandler) s.Router.PathPrefix("/api/v1/blueprints").Handler(protectedGRPCHandler) + s.Router.PathPrefix("/api/v1/service-accounts").Handler(protectedGRPCHandler) s.Router.PathPrefix("/api/v1/workflows").Handler(protectedGRPCHandler) return nil diff --git a/pkg/public/server_test.go b/pkg/public/server_test.go index ad974ae8b1..512c92e652 100644 --- a/pkg/public/server_test.go +++ b/pkg/public/server_test.go @@ -340,7 +340,7 @@ func Test__CreateOrganization(t *testing.T) { user, err := models.FindActiveUserByEmail(orgID, account.Email) require.NoError(t, err) - assert.Equal(t, account.Email, user.Email) + assert.Equal(t, account.Email, user.GetEmail()) roles, err := authService.GetUserRolesForOrg(user.ID.String(), orgID) require.NoError(t, err) diff --git a/pkg/workers/contexts/auth_context.go b/pkg/workers/contexts/auth_context.go index 513d12a54b..709dd6689b 100644 --- a/pkg/workers/contexts/auth_context.go +++ b/pkg/workers/contexts/auth_context.go @@ -35,7 +35,7 @@ func (c *AuthContext) AuthenticatedUser() *core.User { return &core.User{ ID: c.authenticatedUser.ID.String(), Name: c.authenticatedUser.Name, - Email: c.authenticatedUser.Email, + Email: c.authenticatedUser.GetEmail(), } } @@ -48,7 +48,7 @@ func (c *AuthContext) GetUser(id uuid.UUID) (*core.User, error) { return &core.User{ ID: user.ID.String(), Name: user.Name, - Email: user.Email, + Email: user.GetEmail(), }, nil } diff --git a/pkg/workers/invitation_email_consumer.go b/pkg/workers/invitation_email_consumer.go index 6caae20c50..4782d32cb8 100644 --- a/pkg/workers/invitation_email_consumer.go +++ b/pkg/workers/invitation_email_consumer.go @@ -121,7 +121,7 @@ func (c *InvitationEmailConsumer) Consume(delivery tackle.Delivery) error { invitation.Email, org.Name, c.BaseURL+"/login", - inviter.Email, + inviter.GetEmail(), ) if err != nil { diff --git a/pkg/workers/node_executor_test.go b/pkg/workers/node_executor_test.go index 25393d2a9c..594a3dc9dc 100644 --- a/pkg/workers/node_executor_test.go +++ b/pkg/workers/node_executor_test.go @@ -244,7 +244,7 @@ func Test__NodeExecutor_ComponentNodeWithoutStateChange(t *testing.T) { "user": map[string]any{ "id": r.User.String(), "name": r.UserModel.Name, - "email": r.UserModel.Email, + "email": r.UserModel.GetEmail(), }, }, }, diff --git a/pkg/workers/notification_email_consumer.go b/pkg/workers/notification_email_consumer.go index e73c48b1cb..bd14eac73e 100644 --- a/pkg/workers/notification_email_consumer.go +++ b/pkg/workers/notification_email_consumer.go @@ -158,7 +158,7 @@ func addUsersToRecipientSet(orgID uuid.UUID, userIDs []string, recipients map[st } for _, user := range users { - normalized := utils.NormalizeEmail(user.Email) + normalized := utils.NormalizeEmail(user.GetEmail()) if normalized == "" { continue } diff --git a/pkg/workers/notification_email_consumer_test.go b/pkg/workers/notification_email_consumer_test.go index 71c739c7c0..5944a56cb1 100644 --- a/pkg/workers/notification_email_consumer_test.go +++ b/pkg/workers/notification_email_consumer_test.go @@ -64,7 +64,7 @@ func Test__NotificationEmailConsumer(t *testing.T) { "Please review the pending approval.", "https://app.superplane.com/approvals/123", "Review approval", - []string{groupUser.Email, "external@example.com"}, + []string{groupUser.GetEmail(), "external@example.com"}, []string{groupName}, []string{models.RoleOrgAdmin}, ) @@ -82,7 +82,7 @@ func Test__NotificationEmailConsumer(t *testing.T) { bcc := sentEmails[0].Bcc sort.Strings(bcc) - expected := []string{groupUser.Email, roleUser.Email, "external@example.com"} + expected := []string{groupUser.GetEmail(), roleUser.GetEmail(), "external@example.com"} sort.Strings(expected) assert.Equal(t, expected, bcc) diff --git a/protos/service_accounts.proto b/protos/service_accounts.proto new file mode 100644 index 0000000000..ab20ca7c0d --- /dev/null +++ b/protos/service_accounts.proto @@ -0,0 +1,156 @@ +syntax = "proto3"; + +package Superplane.ServiceAccounts; + +import "google/api/annotations.proto"; +import "google/protobuf/timestamp.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option go_package = "github.com/superplanehq/superplane/pkg/protos/service_accounts"; + +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + info: { + title: "Superplane Service Accounts API"; + version: "1.0"; + description: "API for Superplane Service Accounts"; + contact: { + name: "API Support"; + email: "support@superplane.com"; + }; + }; + schemes: HTTP; + schemes: HTTPS; + consumes: "application/json"; + produces: "application/json"; +}; + +service ServiceAccounts { + rpc CreateServiceAccount(CreateServiceAccountRequest) returns (CreateServiceAccountResponse) { + option (google.api.http) = { + post: "/api/v1/service-accounts" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "Create a service account"; + description: "Creates a new service account in the organization"; + tags: "ServiceAccounts"; + }; + } + + rpc ListServiceAccounts(ListServiceAccountsRequest) returns (ListServiceAccountsResponse) { + option (google.api.http) = { + get: "/api/v1/service-accounts" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "List service accounts"; + description: "Returns all service accounts in the organization"; + tags: "ServiceAccounts"; + }; + } + + rpc DescribeServiceAccount(DescribeServiceAccountRequest) returns (DescribeServiceAccountResponse) { + option (google.api.http) = { + get: "/api/v1/service-accounts/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "Describe a service account"; + description: "Returns details of a specific service account"; + tags: "ServiceAccounts"; + }; + } + + rpc UpdateServiceAccount(UpdateServiceAccountRequest) returns (UpdateServiceAccountResponse) { + option (google.api.http) = { + patch: "/api/v1/service-accounts/{id}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "Update a service account"; + description: "Updates the name or description of a service account"; + tags: "ServiceAccounts"; + }; + } + + rpc DeleteServiceAccount(DeleteServiceAccountRequest) returns (DeleteServiceAccountResponse) { + option (google.api.http) = { + delete: "/api/v1/service-accounts/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "Delete a service account"; + description: "Deletes a service account and removes its RBAC policies"; + tags: "ServiceAccounts"; + }; + } + + rpc RegenerateServiceAccountToken(RegenerateServiceAccountTokenRequest) returns (RegenerateServiceAccountTokenResponse) { + option (google.api.http) = { + post: "/api/v1/service-accounts/{id}/token" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "Regenerate service account token"; + description: "Regenerates the API token for a service account"; + tags: "ServiceAccounts"; + }; + } +} + +message ServiceAccount { + string id = 1; + string name = 2; + string description = 3; + string organization_id = 4; + string created_by = 5; + bool has_token = 6; + google.protobuf.Timestamp created_at = 7; + google.protobuf.Timestamp updated_at = 8; +} + +message CreateServiceAccountRequest { + string name = 1; + string description = 2; + string role = 3; +} + +message CreateServiceAccountResponse { + ServiceAccount service_account = 1; + string token = 2; +} + +message ListServiceAccountsRequest {} + +message ListServiceAccountsResponse { + repeated ServiceAccount service_accounts = 1; +} + +message DescribeServiceAccountRequest { + string id = 1; +} + +message DescribeServiceAccountResponse { + ServiceAccount service_account = 1; +} + +message UpdateServiceAccountRequest { + string id = 1; + string name = 2; + string description = 3; +} + +message UpdateServiceAccountResponse { + ServiceAccount service_account = 1; +} + +message DeleteServiceAccountRequest { + string id = 1; +} + +message DeleteServiceAccountResponse {} + +message RegenerateServiceAccountTokenRequest { + string id = 1; +} + +message RegenerateServiceAccountTokenResponse { + string token = 1; +} diff --git a/protos/users.proto b/protos/users.proto index d386e60d51..87adee3471 100644 --- a/protos/users.proto +++ b/protos/users.proto @@ -102,6 +102,7 @@ message ListUserRolesResponse { message ListUsersRequest { Authorization.DomainType domain_type = 1; string domain_id = 2; + bool include_service_accounts = 3; } message ListUsersResponse { diff --git a/rbac/rbac_org_policy.csv b/rbac/rbac_org_policy.csv index 405bd98547..5b6a3fdb21 100644 --- a/rbac/rbac_org_policy.csv +++ b/rbac/rbac_org_policy.csv @@ -6,6 +6,7 @@ p,/roles/org_viewer,/org/*,groups,read p,/roles/org_viewer,/org/*,members,read p,/roles/org_viewer,/org/*,canvases,read p,/roles/org_viewer,/org/*,blueprints,read +p,/roles/org_viewer,/org/*,service_accounts,read p,/roles/org_admin,/org/*,canvases,create p,/roles/org_admin,/org/*,canvases,update p,/roles/org_admin,/org/*,canvases,delete @@ -29,6 +30,9 @@ p,/roles/org_admin,/org/*,roles,delete p,/roles/org_admin,/org/*,blueprints,create p,/roles/org_admin,/org/*,blueprints,update p,/roles/org_admin,/org/*,blueprints,delete +p,/roles/org_admin,/org/*,service_accounts,create +p,/roles/org_admin,/org/*,service_accounts,update +p,/roles/org_admin,/org/*,service_accounts,delete p,/roles/org_owner,/org/*,integrations,delete p,/roles/org_owner,/org/*,org,update p,/roles/org_owner,/org/*,org,delete diff --git a/test/e2e/service_accounts_test.go b/test/e2e/service_accounts_test.go new file mode 100644 index 0000000000..d29c9bce68 --- /dev/null +++ b/test/e2e/service_accounts_test.go @@ -0,0 +1,343 @@ +package e2e + +import ( + "testing" + + pw "github.com/playwright-community/playwright-go" + "github.com/stretchr/testify/require" + "github.com/superplanehq/superplane/pkg/authorization" + "github.com/superplanehq/superplane/pkg/database" + "github.com/superplanehq/superplane/pkg/models" + q "github.com/superplanehq/superplane/test/e2e/queries" + "github.com/superplanehq/superplane/test/e2e/session" + "github.com/superplanehq/superplane/test/support" +) + +func TestServiceAccounts(t *testing.T) { + steps := &serviceAccountSteps{t: t} + + t.Run("creating a service account with viewer role", func(t *testing.T) { + steps.start() + steps.visitServiceAccountsPage() + steps.clickCreateServiceAccount() + steps.fillName("ci-deploy-bot") + steps.fillDescription("Deploys from CI") + steps.selectRole("Viewer") + steps.submitCreate() + steps.assertTokenDisplayed() + steps.dismissTokenModal() + steps.assertServiceAccountSavedInDB("ci-deploy-bot", "Deploys from CI", models.RoleOrgViewer) + }) + + t.Run("creating a service account with admin role", func(t *testing.T) { + steps.start() + steps.visitServiceAccountsPage() + steps.clickCreateServiceAccount() + steps.fillName("admin-bot") + steps.fillDescription("Admin automation") + steps.selectRole("Admin") + steps.submitCreate() + steps.assertTokenDisplayed() + steps.dismissTokenModal() + steps.assertServiceAccountSavedInDB("admin-bot", "Admin automation", models.RoleOrgAdmin) + }) + + t.Run("viewing service accounts in the list", func(t *testing.T) { + steps.start() + steps.givenServiceAccountExists("list-test-bot", "For listing test") + steps.visitServiceAccountsPage() + steps.assertServiceAccountVisibleInList("list-test-bot") + }) + + t.Run("navigating to service account detail", func(t *testing.T) { + steps.start() + steps.givenServiceAccountExists("detail-test-bot", "For detail test") + steps.visitServiceAccountsPage() + steps.clickServiceAccountLink("detail-test-bot") + steps.assertOnDetailPage("detail-test-bot") + }) + + t.Run("editing a service account", func(t *testing.T) { + steps.start() + steps.givenServiceAccountExists("edit-test-bot", "Original description") + steps.visitServiceAccountsPage() + steps.clickServiceAccountLink("edit-test-bot") + steps.clickEditButton() + steps.clearAndFillEditName("edited-bot") + steps.clearAndFillEditDescription("Updated description") + steps.submitEdit() + steps.assertServiceAccountNameInDB("edited-bot") + }) + + t.Run("deleting a service account", func(t *testing.T) { + steps.start() + steps.givenServiceAccountExists("delete-test-bot", "Will be deleted") + steps.visitServiceAccountsPage() + steps.assertServiceAccountVisibleInList("delete-test-bot") + steps.clickServiceAccountLink("delete-test-bot") + steps.clickDeleteOnDetail() + steps.assertServiceAccountDeletedFromDB("delete-test-bot") + }) + + t.Run("regenerating a service account token", func(t *testing.T) { + steps.start() + steps.givenServiceAccountExists("regen-test-bot", "Token regen test") + steps.visitServiceAccountsPage() + steps.clickServiceAccountLink("regen-test-bot") + steps.clickRegenerateToken() + steps.assertTokenDisplayed() + }) + + t.Run("viewer cannot create or manage service accounts", func(t *testing.T) { + steps.start() + steps.givenServiceAccountExists("viewer-test-bot", "Viewer RBAC test") + steps.loginAsViewer() + steps.visitServiceAccountsPage() + steps.assertCreateButtonDisabled() + steps.clickServiceAccountLink("viewer-test-bot") + steps.assertEditButtonDisabled() + steps.assertDeleteButtonDisabled() + }) +} + +type serviceAccountSteps struct { + t *testing.T + session *session.TestSession +} + +func (s *serviceAccountSteps) start() { + s.session = ctx.NewSession(s.t) + s.session.Start() + s.session.Login() +} + +func (s *serviceAccountSteps) visitServiceAccountsPage() { + s.session.Visit("/" + s.session.OrgID.String() + "/settings/service-accounts") + s.session.Sleep(500) +} + +func (s *serviceAccountSteps) clickCreateServiceAccount() { + page := s.session.Page() + createBtn := page.GetByTestId("sa-create-btn") + err := createBtn.First().Click() + require.NoError(s.t, err) + s.session.Sleep(500) +} + +func (s *serviceAccountSteps) fillName(name string) { + page := s.session.Page() + err := page.GetByTestId("sa-create-name").Fill(name) + require.NoError(s.t, err) + s.session.Sleep(200) +} + +func (s *serviceAccountSteps) fillDescription(description string) { + page := s.session.Page() + err := page.GetByTestId("sa-create-description").Fill(description) + require.NoError(s.t, err) + s.session.Sleep(200) +} + +func (s *serviceAccountSteps) selectRole(roleLabel string) { + page := s.session.Page() + + trigger := page.GetByTestId("sa-create-role") + err := trigger.Click() + require.NoError(s.t, err) + s.session.Sleep(300) + + option := page.GetByRole("option", pw.PageGetByRoleOptions{Name: roleLabel, Exact: pw.Bool(true)}) + err = option.Click() + require.NoError(s.t, err) + s.session.Sleep(300) +} + +func (s *serviceAccountSteps) submitCreate() { + page := s.session.Page() + err := page.GetByTestId("sa-create-submit").Click() + require.NoError(s.t, err) + s.session.Sleep(1000) +} + +func (s *serviceAccountSteps) assertTokenDisplayed() { + page := s.session.Page() + tokenInput := page.GetByTestId("sa-token-display") + err := tokenInput.WaitFor(pw.LocatorWaitForOptions{State: pw.WaitForSelectorStateVisible, Timeout: pw.Float(5000)}) + require.NoError(s.t, err) + + value, err := tokenInput.InputValue() + require.NoError(s.t, err) + require.NotEmpty(s.t, value, "token should not be empty") +} + +func (s *serviceAccountSteps) dismissTokenModal() { + page := s.session.Page() + err := page.GetByTestId("sa-token-done").Click() + require.NoError(s.t, err) + s.session.Sleep(500) +} + +func (s *serviceAccountSteps) assertServiceAccountSavedInDB(name, description, expectedRole string) { + orgID := s.session.OrgID.String() + serviceAccounts, err := models.FindServiceAccountsByOrganization(orgID) + require.NoError(s.t, err) + + var found *models.User + for i := range serviceAccounts { + if serviceAccounts[i].Name == name { + found = &serviceAccounts[i] + break + } + } + require.NotNil(s.t, found, "service account %q should exist in DB", name) + require.Equal(s.t, models.UserTypeServiceAccount, found.Type) + require.NotNil(s.t, found.Description) + require.Equal(s.t, description, *found.Description) + require.NotEmpty(s.t, found.TokenHash, "token hash should be set") + + // Verify the role was assigned correctly via casbin + var casbinRule struct { + V0 string + V1 string + } + err = database.Conn(). + Table("casbin_rule"). + Select("v0, v1"). + Where("ptype = 'g' AND v0 = ? AND v2 LIKE ?", "/users/"+found.ID.String(), "/org/%"). + First(&casbinRule).Error + require.NoError(s.t, err) + require.Equal(s.t, "/roles/"+expectedRole, casbinRule.V1) +} + +func (s *serviceAccountSteps) assertServiceAccountVisibleInList(name string) { + s.session.AssertText(name) +} + +func (s *serviceAccountSteps) clickServiceAccountLink(name string) { + page := s.session.Page() + link := page.GetByTestId("sa-link").GetByText(name, pw.LocatorGetByTextOptions{Exact: pw.Bool(true)}) + err := link.Click() + require.NoError(s.t, err) + s.session.Sleep(500) +} + +func (s *serviceAccountSteps) assertOnDetailPage(name string) { + s.session.AssertText(name) + s.session.AssertText("API Token") +} + +func (s *serviceAccountSteps) clickEditButton() { + page := s.session.Page() + err := page.GetByTestId("sa-detail-edit").Click() + require.NoError(s.t, err) + s.session.Sleep(300) +} + +func (s *serviceAccountSteps) clearAndFillEditName(name string) { + page := s.session.Page() + input := page.GetByTestId("sa-detail-edit-name") + err := input.Fill(name) + require.NoError(s.t, err) + s.session.Sleep(200) +} + +func (s *serviceAccountSteps) clearAndFillEditDescription(description string) { + page := s.session.Page() + input := page.GetByTestId("sa-detail-edit-description") + err := input.Fill(description) + require.NoError(s.t, err) + s.session.Sleep(200) +} + +func (s *serviceAccountSteps) submitEdit() { + page := s.session.Page() + saveBtn := page.Locator("button:has-text('Save')").First() + err := saveBtn.Click() + require.NoError(s.t, err) + s.session.Sleep(1000) +} + +func (s *serviceAccountSteps) assertServiceAccountNameInDB(name string) { + serviceAccounts, err := models.FindServiceAccountsByOrganization(s.session.OrgID.String()) + require.NoError(s.t, err) + + for _, sa := range serviceAccounts { + if sa.Name == name { + return + } + } + require.Fail(s.t, "service account %q not found in DB", name) +} + +func (s *serviceAccountSteps) clickDeleteOnDetail() { + page := s.session.Page() + err := page.GetByTestId("sa-detail-delete").Click() + require.NoError(s.t, err) + s.session.Sleep(1000) +} + +func (s *serviceAccountSteps) assertServiceAccountDeletedFromDB(name string) { + serviceAccounts, err := models.FindServiceAccountsByOrganization(s.session.OrgID.String()) + require.NoError(s.t, err) + + for _, sa := range serviceAccounts { + if sa.Name == name { + require.Fail(s.t, "service account %q should have been deleted", name) + } + } +} + +func (s *serviceAccountSteps) clickRegenerateToken() { + page := s.session.Page() + err := page.GetByTestId("sa-detail-regenerate-token").Click() + require.NoError(s.t, err) + s.session.Sleep(1000) +} + +func (s *serviceAccountSteps) loginAsViewer() { + viewerEmail := support.RandomName("viewer") + "@superplane.local" + viewerAccount, err := models.CreateAccount("Viewer User", viewerEmail) + require.NoError(s.t, err) + + viewerUser, err := models.CreateUser(s.session.OrgID, viewerAccount.ID, viewerEmail, "Viewer User") + require.NoError(s.t, err) + + authService, err := authorization.NewAuthService() + require.NoError(s.t, err) + + err = authService.AssignRole(viewerUser.ID.String(), models.RoleOrgViewer, s.session.OrgID.String(), models.DomainTypeOrganization) + require.NoError(s.t, err) + + s.session.Account = viewerAccount + s.session.Login() +} + +func (s *serviceAccountSteps) assertCreateButtonDisabled() { + s.session.AssertDisabled(q.TestID("sa-create-btn")) +} + +func (s *serviceAccountSteps) assertEditButtonDisabled() { + s.session.AssertDisabled(q.TestID("sa-detail-edit")) +} + +func (s *serviceAccountSteps) assertDeleteButtonDisabled() { + s.session.AssertDisabled(q.TestID("sa-detail-delete")) +} + +// givenServiceAccountExists creates a service account directly in the DB for test setup. +func (s *serviceAccountSteps) givenServiceAccountExists(name, description string) { + // Look up the human user to use as created_by (the FK references users.id, not accounts.id) + user, err := models.FindMaybeDeletedUserByEmail(s.session.OrgID.String(), "e2e@superplane.local") + require.NoError(s.t, err) + + desc := description + sa, err := models.CreateServiceAccount( + database.Conn(), + s.session.OrgID, + name, + &desc, + user.ID, + ) + require.NoError(s.t, err) + require.NotNil(s.t, sa) +} diff --git a/web_src/src/api-client/index.ts b/web_src/src/api-client/index.ts index 839884e0e4..cab5129b97 100644 --- a/web_src/src/api-client/index.ts +++ b/web_src/src/api-client/index.ts @@ -70,6 +70,12 @@ export { secretsSetSecretKey, secretsUpdateSecret, secretsUpdateSecretName, + serviceAccountsCreateServiceAccount, + serviceAccountsDeleteServiceAccount, + serviceAccountsDescribeServiceAccount, + serviceAccountsListServiceAccounts, + serviceAccountsRegenerateServiceAccountToken, + serviceAccountsUpdateServiceAccount, triggersDescribeTrigger, triggersListTriggers, usersListUserPermissions, @@ -585,6 +591,46 @@ export type { SecretsUpdateSecretResponse, SecretsUpdateSecretResponse2, SecretsUpdateSecretResponses, + ServiceAccountsCreateServiceAccountData, + ServiceAccountsCreateServiceAccountError, + ServiceAccountsCreateServiceAccountErrors, + ServiceAccountsCreateServiceAccountRequest, + ServiceAccountsCreateServiceAccountResponse, + ServiceAccountsCreateServiceAccountResponse2, + ServiceAccountsCreateServiceAccountResponses, + ServiceAccountsDeleteServiceAccountData, + ServiceAccountsDeleteServiceAccountError, + ServiceAccountsDeleteServiceAccountErrors, + ServiceAccountsDeleteServiceAccountResponse, + ServiceAccountsDeleteServiceAccountResponse2, + ServiceAccountsDeleteServiceAccountResponses, + ServiceAccountsDescribeServiceAccountData, + ServiceAccountsDescribeServiceAccountError, + ServiceAccountsDescribeServiceAccountErrors, + ServiceAccountsDescribeServiceAccountResponse, + ServiceAccountsDescribeServiceAccountResponse2, + ServiceAccountsDescribeServiceAccountResponses, + ServiceAccountsListServiceAccountsData, + ServiceAccountsListServiceAccountsError, + ServiceAccountsListServiceAccountsErrors, + ServiceAccountsListServiceAccountsResponse, + ServiceAccountsListServiceAccountsResponse2, + ServiceAccountsListServiceAccountsResponses, + ServiceAccountsRegenerateServiceAccountTokenBody, + ServiceAccountsRegenerateServiceAccountTokenData, + ServiceAccountsRegenerateServiceAccountTokenError, + ServiceAccountsRegenerateServiceAccountTokenErrors, + ServiceAccountsRegenerateServiceAccountTokenResponse, + ServiceAccountsRegenerateServiceAccountTokenResponse2, + ServiceAccountsRegenerateServiceAccountTokenResponses, + ServiceAccountsServiceAccount, + ServiceAccountsUpdateServiceAccountBody, + ServiceAccountsUpdateServiceAccountData, + ServiceAccountsUpdateServiceAccountError, + ServiceAccountsUpdateServiceAccountErrors, + ServiceAccountsUpdateServiceAccountResponse, + ServiceAccountsUpdateServiceAccountResponse2, + ServiceAccountsUpdateServiceAccountResponses, SuperplaneBlueprintsOutputChannel, SuperplaneBlueprintsUserRef, SuperplaneCanvasesUserRef, diff --git a/web_src/src/api-client/sdk.gen.ts b/web_src/src/api-client/sdk.gen.ts index f6a014a166..152b8da701 100644 --- a/web_src/src/api-client/sdk.gen.ts +++ b/web_src/src/api-client/sdk.gen.ts @@ -207,6 +207,24 @@ import type { SecretsUpdateSecretNameErrors, SecretsUpdateSecretNameResponses, SecretsUpdateSecretResponses, + ServiceAccountsCreateServiceAccountData, + ServiceAccountsCreateServiceAccountErrors, + ServiceAccountsCreateServiceAccountResponses, + ServiceAccountsDeleteServiceAccountData, + ServiceAccountsDeleteServiceAccountErrors, + ServiceAccountsDeleteServiceAccountResponses, + ServiceAccountsDescribeServiceAccountData, + ServiceAccountsDescribeServiceAccountErrors, + ServiceAccountsDescribeServiceAccountResponses, + ServiceAccountsListServiceAccountsData, + ServiceAccountsListServiceAccountsErrors, + ServiceAccountsListServiceAccountsResponses, + ServiceAccountsRegenerateServiceAccountTokenData, + ServiceAccountsRegenerateServiceAccountTokenErrors, + ServiceAccountsRegenerateServiceAccountTokenResponses, + ServiceAccountsUpdateServiceAccountData, + ServiceAccountsUpdateServiceAccountErrors, + ServiceAccountsUpdateServiceAccountResponses, TriggersDescribeTriggerData, TriggersDescribeTriggerErrors, TriggersDescribeTriggerResponses, @@ -1280,6 +1298,111 @@ export const secretsUpdateSecretName = ( }, }); +/** + * List service accounts + * + * Returns all service accounts in the organization + */ +export const serviceAccountsListServiceAccounts = ( + options?: Options, +) => + (options?.client ?? client).get< + ServiceAccountsListServiceAccountsResponses, + ServiceAccountsListServiceAccountsErrors, + ThrowOnError + >({ url: "/api/v1/service-accounts", ...options }); + +/** + * Create a service account + * + * Creates a new service account in the organization + */ +export const serviceAccountsCreateServiceAccount = ( + options: Options, +) => + (options.client ?? client).post< + ServiceAccountsCreateServiceAccountResponses, + ServiceAccountsCreateServiceAccountErrors, + ThrowOnError + >({ + url: "/api/v1/service-accounts", + ...options, + headers: { + "Content-Type": "application/json", + ...options.headers, + }, + }); + +/** + * Delete a service account + * + * Deletes a service account and removes its RBAC policies + */ +export const serviceAccountsDeleteServiceAccount = ( + options: Options, +) => + (options.client ?? client).delete< + ServiceAccountsDeleteServiceAccountResponses, + ServiceAccountsDeleteServiceAccountErrors, + ThrowOnError + >({ url: "/api/v1/service-accounts/{id}", ...options }); + +/** + * Describe a service account + * + * Returns details of a specific service account + */ +export const serviceAccountsDescribeServiceAccount = ( + options: Options, +) => + (options.client ?? client).get< + ServiceAccountsDescribeServiceAccountResponses, + ServiceAccountsDescribeServiceAccountErrors, + ThrowOnError + >({ url: "/api/v1/service-accounts/{id}", ...options }); + +/** + * Update a service account + * + * Updates the name or description of a service account + */ +export const serviceAccountsUpdateServiceAccount = ( + options: Options, +) => + (options.client ?? client).patch< + ServiceAccountsUpdateServiceAccountResponses, + ServiceAccountsUpdateServiceAccountErrors, + ThrowOnError + >({ + url: "/api/v1/service-accounts/{id}", + ...options, + headers: { + "Content-Type": "application/json", + ...options.headers, + }, + }); + +/** + * Regenerate service account token + * + * Regenerates the API token for a service account + */ +export const serviceAccountsRegenerateServiceAccountToken = ( + options: Options, +) => + (options.client ?? client).post< + ServiceAccountsRegenerateServiceAccountTokenResponses, + ServiceAccountsRegenerateServiceAccountTokenErrors, + ThrowOnError + >({ + url: "/api/v1/service-accounts/{id}/token", + ...options, + headers: { + "Content-Type": "application/json", + ...options.headers, + }, + }); + /** * List triggers * diff --git a/web_src/src/api-client/types.gen.ts b/web_src/src/api-client/types.gen.ts index 8a782d424f..b2fcb7039d 100644 --- a/web_src/src/api-client/types.gen.ts +++ b/web_src/src/api-client/types.gen.ts @@ -923,6 +923,57 @@ export type SecretsUpdateSecretResponse = { secret?: SecretsSecret; }; +export type ServiceAccountsCreateServiceAccountRequest = { + name?: string; + description?: string; + role?: string; +}; + +export type ServiceAccountsCreateServiceAccountResponse = { + serviceAccount?: ServiceAccountsServiceAccount; + token?: string; +}; + +export type ServiceAccountsDeleteServiceAccountResponse = { + [key: string]: unknown; +}; + +export type ServiceAccountsDescribeServiceAccountResponse = { + serviceAccount?: ServiceAccountsServiceAccount; +}; + +export type ServiceAccountsListServiceAccountsResponse = { + serviceAccounts?: Array; +}; + +export type ServiceAccountsRegenerateServiceAccountTokenBody = { + [key: string]: unknown; +}; + +export type ServiceAccountsRegenerateServiceAccountTokenResponse = { + token?: string; +}; + +export type ServiceAccountsServiceAccount = { + id?: string; + name?: string; + description?: string; + organizationId?: string; + createdBy?: string; + hasToken?: boolean; + createdAt?: string; + updatedAt?: string; +}; + +export type ServiceAccountsUpdateServiceAccountBody = { + name?: string; + description?: string; +}; + +export type ServiceAccountsUpdateServiceAccountResponse = { + serviceAccount?: ServiceAccountsServiceAccount; +}; + export type SuperplaneBlueprintsOutputChannel = { name?: string; nodeId?: string; @@ -3027,6 +3078,176 @@ export type SecretsUpdateSecretNameResponses = { export type SecretsUpdateSecretNameResponse2 = SecretsUpdateSecretNameResponses[keyof SecretsUpdateSecretNameResponses]; +export type ServiceAccountsListServiceAccountsData = { + body?: never; + path?: never; + query?: never; + url: "/api/v1/service-accounts"; +}; + +export type ServiceAccountsListServiceAccountsErrors = { + /** + * An unexpected error response. + */ + default: GooglerpcStatus; +}; + +export type ServiceAccountsListServiceAccountsError = + ServiceAccountsListServiceAccountsErrors[keyof ServiceAccountsListServiceAccountsErrors]; + +export type ServiceAccountsListServiceAccountsResponses = { + /** + * A successful response. + */ + 200: ServiceAccountsListServiceAccountsResponse; +}; + +export type ServiceAccountsListServiceAccountsResponse2 = + ServiceAccountsListServiceAccountsResponses[keyof ServiceAccountsListServiceAccountsResponses]; + +export type ServiceAccountsCreateServiceAccountData = { + body: ServiceAccountsCreateServiceAccountRequest; + path?: never; + query?: never; + url: "/api/v1/service-accounts"; +}; + +export type ServiceAccountsCreateServiceAccountErrors = { + /** + * An unexpected error response. + */ + default: GooglerpcStatus; +}; + +export type ServiceAccountsCreateServiceAccountError = + ServiceAccountsCreateServiceAccountErrors[keyof ServiceAccountsCreateServiceAccountErrors]; + +export type ServiceAccountsCreateServiceAccountResponses = { + /** + * A successful response. + */ + 200: ServiceAccountsCreateServiceAccountResponse; +}; + +export type ServiceAccountsCreateServiceAccountResponse2 = + ServiceAccountsCreateServiceAccountResponses[keyof ServiceAccountsCreateServiceAccountResponses]; + +export type ServiceAccountsDeleteServiceAccountData = { + body?: never; + path: { + id: string; + }; + query?: never; + url: "/api/v1/service-accounts/{id}"; +}; + +export type ServiceAccountsDeleteServiceAccountErrors = { + /** + * An unexpected error response. + */ + default: GooglerpcStatus; +}; + +export type ServiceAccountsDeleteServiceAccountError = + ServiceAccountsDeleteServiceAccountErrors[keyof ServiceAccountsDeleteServiceAccountErrors]; + +export type ServiceAccountsDeleteServiceAccountResponses = { + /** + * A successful response. + */ + 200: ServiceAccountsDeleteServiceAccountResponse; +}; + +export type ServiceAccountsDeleteServiceAccountResponse2 = + ServiceAccountsDeleteServiceAccountResponses[keyof ServiceAccountsDeleteServiceAccountResponses]; + +export type ServiceAccountsDescribeServiceAccountData = { + body?: never; + path: { + id: string; + }; + query?: never; + url: "/api/v1/service-accounts/{id}"; +}; + +export type ServiceAccountsDescribeServiceAccountErrors = { + /** + * An unexpected error response. + */ + default: GooglerpcStatus; +}; + +export type ServiceAccountsDescribeServiceAccountError = + ServiceAccountsDescribeServiceAccountErrors[keyof ServiceAccountsDescribeServiceAccountErrors]; + +export type ServiceAccountsDescribeServiceAccountResponses = { + /** + * A successful response. + */ + 200: ServiceAccountsDescribeServiceAccountResponse; +}; + +export type ServiceAccountsDescribeServiceAccountResponse2 = + ServiceAccountsDescribeServiceAccountResponses[keyof ServiceAccountsDescribeServiceAccountResponses]; + +export type ServiceAccountsUpdateServiceAccountData = { + body: ServiceAccountsUpdateServiceAccountBody; + path: { + id: string; + }; + query?: never; + url: "/api/v1/service-accounts/{id}"; +}; + +export type ServiceAccountsUpdateServiceAccountErrors = { + /** + * An unexpected error response. + */ + default: GooglerpcStatus; +}; + +export type ServiceAccountsUpdateServiceAccountError = + ServiceAccountsUpdateServiceAccountErrors[keyof ServiceAccountsUpdateServiceAccountErrors]; + +export type ServiceAccountsUpdateServiceAccountResponses = { + /** + * A successful response. + */ + 200: ServiceAccountsUpdateServiceAccountResponse; +}; + +export type ServiceAccountsUpdateServiceAccountResponse2 = + ServiceAccountsUpdateServiceAccountResponses[keyof ServiceAccountsUpdateServiceAccountResponses]; + +export type ServiceAccountsRegenerateServiceAccountTokenData = { + body: ServiceAccountsRegenerateServiceAccountTokenBody; + path: { + id: string; + }; + query?: never; + url: "/api/v1/service-accounts/{id}/token"; +}; + +export type ServiceAccountsRegenerateServiceAccountTokenErrors = { + /** + * An unexpected error response. + */ + default: GooglerpcStatus; +}; + +export type ServiceAccountsRegenerateServiceAccountTokenError = + ServiceAccountsRegenerateServiceAccountTokenErrors[keyof ServiceAccountsRegenerateServiceAccountTokenErrors]; + +export type ServiceAccountsRegenerateServiceAccountTokenResponses = { + /** + * A successful response. + */ + 200: ServiceAccountsRegenerateServiceAccountTokenResponse; +}; + +export type ServiceAccountsRegenerateServiceAccountTokenResponse2 = + ServiceAccountsRegenerateServiceAccountTokenResponses[keyof ServiceAccountsRegenerateServiceAccountTokenResponses]; + export type TriggersListTriggersData = { body?: never; path?: never; @@ -3085,6 +3306,7 @@ export type UsersListUsersData = { query?: { domainType?: "DOMAIN_TYPE_UNSPECIFIED" | "DOMAIN_TYPE_ORGANIZATION"; domainId?: string; + includeServiceAccounts?: boolean; }; url: "/api/v1/users"; }; diff --git a/web_src/src/components/OrganizationMenuButton.tsx b/web_src/src/components/OrganizationMenuButton.tsx index 1698006393..d8e10f0798 100644 --- a/web_src/src/components/OrganizationMenuButton.tsx +++ b/web_src/src/components/OrganizationMenuButton.tsx @@ -4,6 +4,7 @@ import { useOrganization } from "@/hooks/useOrganizationData"; import { cn } from "@/lib/utils"; import { ArrowRightLeft, + Bot, ChevronDown, CircleUser, Key, @@ -97,6 +98,12 @@ export function OrganizationMenuButton({ organizationId, onLogoClick, className Icon: UserIcon, permission: { resource: "members", action: "read" }, }, + { + label: "Service Accounts", + href: organizationId ? `/${organizationId}/settings/service-accounts` : "#", + Icon: Bot, + permission: { resource: "service_accounts", action: "read" }, + }, { label: "Groups", href: organizationId ? `/${organizationId}/settings/groups` : "#", diff --git a/web_src/src/hooks/useOrganizationData.ts b/web_src/src/hooks/useOrganizationData.ts index 2dd916c581..8e15641958 100644 --- a/web_src/src/hooks/useOrganizationData.ts +++ b/web_src/src/hooks/useOrganizationData.ts @@ -58,13 +58,19 @@ export const useOrganization = (organizationId: string) => { }); }; -export const useOrganizationUsers = (organizationId: string) => { +export const useOrganizationUsers = (organizationId: string, includeServiceAccounts = false) => { return useQuery({ - queryKey: organizationKeys.users(organizationId), + queryKey: includeServiceAccounts + ? [...organizationKeys.users(organizationId), includeServiceAccounts] + : organizationKeys.users(organizationId), queryFn: async () => { const response = await usersListUsers( withOrganizationHeader({ - query: { domainType: "DOMAIN_TYPE_ORGANIZATION", domainId: organizationId }, + query: { + domainType: "DOMAIN_TYPE_ORGANIZATION", + domainId: organizationId, + includeServiceAccounts, + }, }), ); return response.data?.users || []; diff --git a/web_src/src/hooks/useServiceAccounts.ts b/web_src/src/hooks/useServiceAccounts.ts new file mode 100644 index 0000000000..51555e0543 --- /dev/null +++ b/web_src/src/hooks/useServiceAccounts.ts @@ -0,0 +1,124 @@ +import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; +import { + serviceAccountsListServiceAccounts, + serviceAccountsCreateServiceAccount, + serviceAccountsDescribeServiceAccount, + serviceAccountsUpdateServiceAccount, + serviceAccountsDeleteServiceAccount, + serviceAccountsRegenerateServiceAccountToken, +} from "@/api-client/sdk.gen"; +import { withOrganizationHeader } from "@/utils/withOrganizationHeader"; + +export const serviceAccountKeys = { + all: ["serviceAccounts"] as const, + list: (orgId: string) => [...serviceAccountKeys.all, "list", orgId] as const, + detail: (orgId: string, id: string) => [...serviceAccountKeys.all, "detail", orgId, id] as const, +}; + +export const useServiceAccounts = (organizationId: string) => { + return useQuery({ + queryKey: serviceAccountKeys.list(organizationId), + queryFn: async () => { + const response = await serviceAccountsListServiceAccounts(withOrganizationHeader({})); + return response.data?.serviceAccounts || []; + }, + staleTime: 2 * 60 * 1000, + gcTime: 5 * 60 * 1000, + enabled: !!organizationId, + }); +}; + +export const useServiceAccount = (organizationId: string, id: string) => { + return useQuery({ + queryKey: serviceAccountKeys.detail(organizationId, id), + queryFn: async () => { + const response = await serviceAccountsDescribeServiceAccount( + withOrganizationHeader({ + path: { id }, + }), + ); + return response.data?.serviceAccount || null; + }, + staleTime: 2 * 60 * 1000, + gcTime: 5 * 60 * 1000, + enabled: !!organizationId && !!id, + }); +}; + +export const useCreateServiceAccount = (organizationId: string) => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async (params: { name: string; description: string; role: string }) => { + return serviceAccountsCreateServiceAccount( + withOrganizationHeader({ + body: { + name: params.name, + description: params.description, + role: params.role, + }, + }), + ); + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: serviceAccountKeys.list(organizationId) }); + }, + }); +}; + +export const useUpdateServiceAccount = (organizationId: string) => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async (params: { id: string; name: string; description: string }) => { + return serviceAccountsUpdateServiceAccount( + withOrganizationHeader({ + path: { id: params.id }, + body: { + name: params.name, + description: params.description, + }, + }), + ); + }, + onSuccess: (_data, variables) => { + queryClient.invalidateQueries({ queryKey: serviceAccountKeys.list(organizationId) }); + queryClient.invalidateQueries({ queryKey: serviceAccountKeys.detail(organizationId, variables.id) }); + }, + }); +}; + +export const useDeleteServiceAccount = (organizationId: string) => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async (id: string) => { + return serviceAccountsDeleteServiceAccount( + withOrganizationHeader({ + path: { id }, + }), + ); + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: serviceAccountKeys.list(organizationId) }); + }, + }); +}; + +export const useRegenerateServiceAccountToken = (organizationId: string) => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async (id: string) => { + return serviceAccountsRegenerateServiceAccountToken( + withOrganizationHeader({ + path: { id }, + body: {}, + }), + ); + }, + onSuccess: (_data, id) => { + queryClient.invalidateQueries({ queryKey: serviceAccountKeys.detail(organizationId, id) }); + }, + }); +}; diff --git a/web_src/src/pages/organization/settings/AddMembersSection.tsx b/web_src/src/pages/organization/settings/AddMembersSection.tsx index 7e56b18730..fd6758b2f8 100644 --- a/web_src/src/pages/organization/settings/AddMembersSection.tsx +++ b/web_src/src/pages/organization/settings/AddMembersSection.tsx @@ -29,7 +29,7 @@ const AddMembersSectionComponent = forwardRef
    - {member.metadata?.email || `${member.metadata!.id!}@email.placeholder`} + {member.metadata?.email || "Service Account"}
    diff --git a/web_src/src/pages/organization/settings/GroupMembersPage.tsx b/web_src/src/pages/organization/settings/GroupMembersPage.tsx index 15eb920cc7..aeee5d100a 100644 --- a/web_src/src/pages/organization/settings/GroupMembersPage.tsx +++ b/web_src/src/pages/organization/settings/GroupMembersPage.tsx @@ -256,7 +256,7 @@ export function GroupMembersPage() {
    - {member.metadata?.email} + {member.metadata?.email || "Service Account"}
    (); + const { canAct, isLoading: permissionsLoading } = usePermissions(); + const canUpdate = canAct("service_accounts", "update"); + const canDelete = canAct("service_accounts", "delete"); + + const { data: serviceAccount, isLoading } = useServiceAccount(organizationId, id || ""); + const updateMutation = useUpdateServiceAccount(organizationId); + const deleteMutation = useDeleteServiceAccount(organizationId); + const regenerateTokenMutation = useRegenerateServiceAccountToken(organizationId); + + const [isEditing, setIsEditing] = useState(false); + const [editName, setEditName] = useState(""); + const [editDescription, setEditDescription] = useState(""); + const [newToken, setNewToken] = useState(null); + + const handleEditStart = () => { + setEditName(serviceAccount?.name || ""); + setEditDescription(serviceAccount?.description || ""); + setIsEditing(true); + }; + + const handleEditCancel = () => { + setIsEditing(false); + }; + + const handleEditSave = async () => { + if (!canUpdate || !id) return; + if (!editName?.trim()) { + showErrorToast("Name is required"); + return; + } + try { + await updateMutation.mutateAsync({ + id, + name: editName.trim(), + description: editDescription.trim(), + }); + showSuccessToast("Service account updated"); + setIsEditing(false); + } catch (error) { + showErrorToast(`Failed to update: ${getApiErrorMessage(error)}`); + } + }; + + const handleDelete = async () => { + if (!canDelete || !id) return; + if (!confirm(`Are you sure you want to delete "${serviceAccount?.name}"? This cannot be undone.`)) return; + try { + await deleteMutation.mutateAsync(id); + showSuccessToast("Service account deleted"); + navigate(`/${organizationId}/settings/service-accounts`); + } catch (error) { + showErrorToast(`Failed to delete: ${getApiErrorMessage(error)}`); + } + }; + + const handleRegenerateToken = async () => { + if (!canUpdate || !id) return; + if (!confirm("Are you sure? The current token will stop working immediately.")) return; + try { + const result = await regenerateTokenMutation.mutateAsync(id); + const token = result.data?.token; + if (token) { + setNewToken(token); + } + } catch (error) { + showErrorToast(`Failed to regenerate token: ${getApiErrorMessage(error)}`); + } + }; + + const handleCopyToken = async () => { + if (!newToken) return; + try { + await navigator.clipboard.writeText(newToken); + showSuccessToast("Token copied to clipboard"); + } catch { + showErrorToast("Failed to copy token"); + } + }; + + if (isLoading) { + return ( +
    +
    +
    +

    Loading...

    +
    +
    +
    + ); + } + + if (!serviceAccount) { + return ( +
    +
    +
    +

    Service account not found

    +
    +
    +
    + ); + } + + const createdAt = serviceAccount.createdAt ? new Date(serviceAccount.createdAt).toLocaleDateString() : "—"; + + return ( +
    + {/* Back button */} + + + {/* Details */} +
    +
    +
    +
    + +

    {serviceAccount.name}

    +
    +
    + {!isEditing && ( + + + + )} + + + +
    +
    + + {isEditing ? ( +
    { + e.preventDefault(); + handleEditSave(); + }} + > +
    + + setEditName(e.target.value)} + required + data-testid="sa-detail-edit-name" + /> +
    +
    + +