diff --git a/docs/components/Dash0.mdx b/docs/components/Dash0.mdx
index faacc53df3..deea44937f 100644
--- a/docs/components/Dash0.mdx
+++ b/docs/components/Dash0.mdx
@@ -9,10 +9,100 @@ import { CardGrid, LinkCard } from "@astrojs/starlight/components";
## Actions
+
+
+
+
+
+## Create Synthetic Check
+
+The Create Synthetic Check component creates a Dash0 synthetic check using the configuration API.
+
+### Use Cases
+
+- **Service onboarding**: Create synthetic checks when new services are deployed
+- **Environment bootstrap**: Provision baseline uptime checks in new environments
+- **Automation workflows**: Create checks from CI/CD or incident workflows
+
+### Configuration
+
+- **Origin or ID (Optional)**: Custom synthetic check identifier. If omitted, SuperPlane generates one.
+- **Name**: Human-readable synthetic check name
+- **Enabled**: Whether the synthetic check is enabled
+- **Plugin Kind**: Synthetic check plugin type (currently HTTP)
+- **Method**: HTTP method for request checks
+- **URL**: Target URL for the synthetic check
+- **Headers (Optional)**: Request header key/value pairs
+- **Request Body (Optional)**: HTTP request body (useful for POST/PUT/PATCH)
+
+### Output
+
+Emits:
+- **originOrId**: Synthetic check identifier used for the API request
+- **response**: Raw Dash0 API response
+
+### Example Output
+
+```json
+{
+ "data": {
+ "originOrId": "superplane-synthetic-ab12cd34",
+ "response": {
+ "id": "superplane-synthetic-ab12cd34",
+ "status": "updated"
+ }
+ },
+ "timestamp": "2026-02-09T12:00:00Z",
+ "type": "dash0.synthetic.check.created"
+}
+```
+
+
+
+## Get Check Details
+
+The Get Check Details component fetches full context for a Dash0 check by ID.
+
+### Use Cases
+
+- **Alert enrichment**: Expand webhook payloads with full check context before notifying responders
+- **Workflow branching**: Use check attributes (severity, thresholds, services) in downstream conditions
+- **Incident automation**: Add rich check details to incident tickets or chat messages
+
+### Configuration
+
+- **Check ID**: The Dash0 check identifier to retrieve
+- **Include History**: Include additional history data when supported by the Dash0 API
+
+### Output
+
+Emits a payload containing:
+- **checkId**: Check identifier used in the request
+- **details**: Raw details response from Dash0
+
+### Example Output
+
+```json
+{
+ "data": {
+ "checkId": "check-123",
+ "details": {
+ "currentValue": 0.92,
+ "id": "check-123",
+ "name": "Checkout API latency",
+ "query": "histogram_quantile(0.95, sum(rate(http_request_duration_seconds_bucket{service=\"checkout\"}[5m])) by (le))",
+ "severity": "critical"
+ }
+ },
+ "timestamp": "2026-02-09T12:00:00Z",
+ "type": "dash0.check.details.retrieved"
+}
+```
+
## List Issues
@@ -150,3 +240,46 @@ Returns the Prometheus query response including:
}
```
+
+
+## Send Log Event
+
+The Send Log Event component sends workflow log records to Dash0 using OTLP HTTP ingestion.
+
+### Use Cases
+
+- **Audit trails**: Record workflow milestones in Dash0 logs
+- **Change tracking**: Emit deployment, approval, and remediation events
+- **Observability correlation**: Correlate workflow activity with traces and metrics
+
+### Configuration
+
+- **Service Name**: Service name attached to emitted records
+- **Records**: One or more log records containing:
+ - message
+ - severity
+ - timestamp (optional, RFC3339 or unix)
+ - attributes (optional key/value map)
+
+### Output
+
+Emits:
+- **serviceName**: Service name used for ingestion
+- **sentCount**: Number of records sent in this request
+
+### Example Output
+
+```json
+{
+ "data": {
+ "response": {
+ "status": "ok"
+ },
+ "sentCount": 2,
+ "serviceName": "superplane.workflow"
+ },
+ "timestamp": "2026-02-09T12:00:00Z",
+ "type": "dash0.log.event.sent"
+}
+```
+
diff --git a/pkg/integrations/dash0/client.go b/pkg/integrations/dash0/client.go
index 7a16eb1baa..81473eeba3 100644
--- a/pkg/integrations/dash0/client.go
+++ b/pkg/integrations/dash0/client.go
@@ -1,6 +1,7 @@
package dash0
import (
+ "bytes"
"encoding/json"
"fmt"
"io"
@@ -18,9 +19,11 @@ const (
)
type Client struct {
- Token string
- BaseURL string
- http core.HTTPContext
+ Token string
+ BaseURL string
+ LogsIngestURL string
+ Dataset string
+ http core.HTTPContext
}
func NewClient(http core.HTTPContext, ctx core.IntegrationContext) (*Client, error) {
@@ -42,13 +45,66 @@ func NewClient(http core.HTTPContext, ctx core.IntegrationContext) (*Client, err
// Strip /api/prometheus if user included it in the base URL
baseURL = strings.TrimSuffix(baseURL, "/api/prometheus")
+ dataset := "default"
+ datasetConfig, err := ctx.GetConfig("dataset")
+ if err == nil && datasetConfig != nil && len(datasetConfig) > 0 {
+ trimmedDataset := strings.TrimSpace(string(datasetConfig))
+ if trimmedDataset != "" {
+ dataset = trimmedDataset
+ }
+ }
+
+ logsIngestURL := deriveLogsIngestURL(baseURL)
+
return &Client{
- Token: string(apiToken),
- BaseURL: baseURL,
- http: http,
+ Token: string(apiToken),
+ BaseURL: baseURL,
+ LogsIngestURL: logsIngestURL,
+ Dataset: dataset,
+ http: http,
}, nil
}
+// deriveLogsIngestURL derives the OTLP logs ingress host from the configured API base URL.
+func deriveLogsIngestURL(baseURL string) string {
+ parsedURL, err := url.Parse(baseURL)
+ if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" {
+ return strings.TrimSuffix(baseURL, "/")
+ }
+
+ hostname := parsedURL.Hostname()
+ if strings.HasPrefix(hostname, "api.") {
+ hostname = "ingress." + strings.TrimPrefix(hostname, "api.")
+ }
+
+ if port := parsedURL.Port(); port != "" {
+ parsedURL.Host = fmt.Sprintf("%s:%s", hostname, port)
+ } else {
+ parsedURL.Host = hostname
+ }
+
+ parsedURL.Path = ""
+ parsedURL.RawPath = ""
+ parsedURL.RawQuery = ""
+ parsedURL.Fragment = ""
+
+ return strings.TrimSuffix(parsedURL.String(), "/")
+}
+
+// withDatasetQuery appends the configured dataset query parameter to a request URL.
+func (c *Client) withDatasetQuery(requestURL string) (string, error) {
+ parsedURL, err := url.Parse(requestURL)
+ if err != nil {
+ return "", fmt.Errorf("error parsing request URL: %v", err)
+ }
+
+ query := parsedURL.Query()
+ query.Set("dataset", c.Dataset)
+ parsedURL.RawQuery = query.Encode()
+
+ return parsedURL.String(), nil
+}
+
func (c *Client) execRequest(method, url string, body io.Reader, contentType string) ([]byte, error) {
req, err := http.NewRequest(method, url, body)
if err != nil {
@@ -168,8 +224,12 @@ type CheckRule struct {
func (c *Client) ListCheckRules() ([]CheckRule, error) {
apiURL := fmt.Sprintf("%s/api/alerting/check-rules", c.BaseURL)
+ requestURL, err := c.withDatasetQuery(apiURL)
+ if err != nil {
+ return nil, err
+ }
- responseBody, err := c.execRequest(http.MethodGet, apiURL, nil, "")
+ responseBody, err := c.execRequest(http.MethodGet, requestURL, nil, "")
if err != nil {
return nil, err
}
@@ -207,3 +267,120 @@ func (c *Client) ListCheckRules() ([]CheckRule, error) {
return checkRules, nil
}
+
+// SendLogEvents sends OTLP log batches to Dash0 ingestion endpoint.
+func (c *Client) SendLogEvents(request OTLPLogsRequest) (map[string]any, error) {
+ requestURL := fmt.Sprintf("%s/v1/logs", c.LogsIngestURL)
+
+ body, err := json.Marshal(request)
+ if err != nil {
+ return nil, fmt.Errorf("error marshaling logs request: %v", err)
+ }
+
+ responseBody, err := c.execRequest(http.MethodPost, requestURL, bytes.NewReader(body), "application/json")
+ if err != nil {
+ return nil, err
+ }
+
+ parsed, err := parseJSONResponse(responseBody)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing send log event response: %v", err)
+ }
+
+ return parsed, nil
+}
+
+// GetCheckDetails fetches check context by failed-check ID with check-rules fallback.
+func (c *Client) GetCheckDetails(checkID string, includeHistory bool) (map[string]any, error) {
+ trimmedCheckID := strings.TrimSpace(checkID)
+ if trimmedCheckID == "" {
+ return nil, fmt.Errorf("check id is required")
+ }
+
+ querySuffix := ""
+ if includeHistory {
+ querySuffix = "?include_history=true"
+ }
+
+ escapedID := url.PathEscape(trimmedCheckID)
+ requestURL := fmt.Sprintf("%s/api/alerting/failed-checks/%s%s", c.BaseURL, escapedID, querySuffix)
+
+ responseBody, err := c.execRequest(http.MethodGet, requestURL, nil, "")
+ if err != nil {
+ if strings.Contains(err.Error(), "request got 404 code") {
+ fallbackURL := fmt.Sprintf("%s/api/alerting/check-rules/%s%s", c.BaseURL, escapedID, querySuffix)
+ responseBody, err = c.execRequest(http.MethodGet, fallbackURL, nil, "")
+ if err != nil {
+ return nil, fmt.Errorf("fallback check-rules lookup failed: %v", err)
+ }
+ } else {
+ return nil, err
+ }
+ }
+
+ parsed, err := parseJSONResponse(responseBody)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing check details response: %v", err)
+ }
+
+ if _, ok := parsed["checkId"]; !ok {
+ parsed["checkId"] = trimmedCheckID
+ }
+
+ return parsed, nil
+}
+
+// UpsertSyntheticCheck creates or updates a synthetic check by origin/id.
+func (c *Client) UpsertSyntheticCheck(originOrID string, specification map[string]any) (map[string]any, error) {
+ trimmedOriginOrID := strings.TrimSpace(originOrID)
+ if trimmedOriginOrID == "" {
+ return nil, fmt.Errorf("origin/id is required")
+ }
+
+ requestURL := fmt.Sprintf("%s/api/synthetic-checks/%s", c.BaseURL, url.PathEscape(trimmedOriginOrID))
+ requestURL, err := c.withDatasetQuery(requestURL)
+ if err != nil {
+ return nil, err
+ }
+
+ body, err := json.Marshal(specification)
+ if err != nil {
+ return nil, fmt.Errorf("error marshaling request: %v", err)
+ }
+
+ responseBody, err := c.execRequest(http.MethodPut, requestURL, bytes.NewReader(body), "application/json")
+ if err != nil {
+ return nil, err
+ }
+
+ parsed, err := parseJSONResponse(responseBody)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing upsert synthetic check response: %v", err)
+ }
+
+ if _, ok := parsed["originOrId"]; !ok {
+ parsed["originOrId"] = trimmedOriginOrID
+ }
+
+ return parsed, nil
+}
+
+// parseJSONResponse normalizes object or array JSON responses into a map.
+func parseJSONResponse(responseBody []byte) (map[string]any, error) {
+ trimmedBody := strings.TrimSpace(string(responseBody))
+ if trimmedBody == "" {
+ return map[string]any{}, nil
+ }
+
+ var parsed map[string]any
+ if err := json.Unmarshal(responseBody, &parsed); err == nil {
+ return parsed, nil
+ }
+
+ var parsedArray []any
+ if err := json.Unmarshal(responseBody, &parsedArray); err == nil {
+ return map[string]any{"items": parsedArray}, nil
+ }
+
+ return nil, fmt.Errorf("unexpected response payload shape")
+}
diff --git a/pkg/integrations/dash0/create_synthetic_check.go b/pkg/integrations/dash0/create_synthetic_check.go
new file mode 100644
index 0000000000..3beb579f02
--- /dev/null
+++ b/pkg/integrations/dash0/create_synthetic_check.go
@@ -0,0 +1,272 @@
+package dash0
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/google/uuid"
+ "github.com/mitchellh/mapstructure"
+ "github.com/superplanehq/superplane/pkg/configuration"
+ "github.com/superplanehq/superplane/pkg/core"
+)
+
+const CreateSyntheticCheckPayloadType = "dash0.synthetic.check.created"
+
+// CreateSyntheticCheck creates Dash0 synthetic checks via configuration API.
+type CreateSyntheticCheck struct{}
+
+// Name returns the stable component identifier.
+func (c *CreateSyntheticCheck) Name() string {
+ return "dash0.createSyntheticCheck"
+}
+
+// Label returns the display name used in the workflow builder.
+func (c *CreateSyntheticCheck) Label() string {
+ return "Create Synthetic Check"
+}
+
+// Description returns a short summary of component behavior.
+func (c *CreateSyntheticCheck) Description() string {
+ return "Create a synthetic check in Dash0 configuration API"
+}
+
+// Documentation returns markdown help shown in the component docs panel.
+func (c *CreateSyntheticCheck) Documentation() string {
+ return `The Create Synthetic Check component creates a Dash0 synthetic check using the configuration API.
+
+## Use Cases
+
+- **Service onboarding**: Create synthetic checks when new services are deployed
+- **Environment bootstrap**: Provision baseline uptime checks in new environments
+- **Automation workflows**: Create checks from CI/CD or incident workflows
+
+## Configuration
+
+- **Origin or ID (Optional)**: Custom synthetic check identifier. If omitted, SuperPlane generates one.
+- **Name**: Human-readable synthetic check name
+- **Enabled**: Whether the synthetic check is enabled
+- **Plugin Kind**: Synthetic check plugin type (currently HTTP)
+- **Method**: HTTP method for request checks
+- **URL**: Target URL for the synthetic check
+- **Headers (Optional)**: Request header key/value pairs
+- **Request Body (Optional)**: HTTP request body (useful for POST/PUT/PATCH)
+
+## Output
+
+Emits:
+- **originOrId**: Synthetic check identifier used for the API request
+- **response**: Raw Dash0 API response`
+}
+
+// Icon returns the Lucide icon name for this component.
+func (c *CreateSyntheticCheck) Icon() string {
+ return "plus-circle"
+}
+
+// Color returns the node color used in the UI.
+func (c *CreateSyntheticCheck) Color() string {
+ return "blue"
+}
+
+// OutputChannels declares the channel emitted by this action.
+func (c *CreateSyntheticCheck) OutputChannels(configuration any) []core.OutputChannel {
+ return []core.OutputChannel{core.DefaultOutputChannel}
+}
+
+// Configuration defines fields required to create synthetic checks.
+func (c *CreateSyntheticCheck) Configuration() []configuration.Field {
+ return []configuration.Field{
+ {
+ Name: "originOrId",
+ Label: "Origin or ID",
+ Type: configuration.FieldTypeString,
+ Required: false,
+ Description: "Optional synthetic check origin/ID. Leave empty to auto-generate.",
+ Placeholder: "superplane.synthetic.check",
+ },
+ {
+ Name: "name",
+ Label: "Name",
+ Type: configuration.FieldTypeString,
+ Required: true,
+ Description: "Human-readable synthetic check name",
+ Placeholder: "checkout-health",
+ },
+ {
+ Name: "enabled",
+ Label: "Enabled",
+ Type: configuration.FieldTypeBool,
+ Required: true,
+ Default: true,
+ Description: "Enable or disable the synthetic check",
+ },
+ {
+ Name: "pluginKind",
+ Label: "Plugin Kind",
+ Type: configuration.FieldTypeSelect,
+ Required: true,
+ Default: "http",
+ TypeOptions: &configuration.TypeOptions{
+ Select: &configuration.SelectTypeOptions{
+ Options: []configuration.FieldOption{
+ {Label: "HTTP", Value: "http"},
+ },
+ },
+ },
+ Description: "Synthetic check plugin kind",
+ },
+ {
+ Name: "method",
+ Label: "Method",
+ Type: configuration.FieldTypeSelect,
+ Required: true,
+ Default: "get",
+ TypeOptions: &configuration.TypeOptions{
+ Select: &configuration.SelectTypeOptions{
+ Options: []configuration.FieldOption{
+ {Label: "GET", Value: "get"},
+ {Label: "POST", Value: "post"},
+ {Label: "PUT", Value: "put"},
+ {Label: "PATCH", Value: "patch"},
+ {Label: "DELETE", Value: "delete"},
+ {Label: "HEAD", Value: "head"},
+ {Label: "OPTIONS", Value: "options"},
+ },
+ },
+ },
+ Description: "HTTP method used for the synthetic check request",
+ },
+ {
+ Name: "url",
+ Label: "URL",
+ Type: configuration.FieldTypeString,
+ Required: true,
+ Description: "Target URL for the synthetic check request",
+ Placeholder: "https://www.example.com/health",
+ },
+ {
+ Name: "headers",
+ Label: "Headers",
+ Type: configuration.FieldTypeList,
+ Required: false,
+ Togglable: true,
+ Description: "Optional request header key/value pairs",
+ TypeOptions: &configuration.TypeOptions{
+ List: &configuration.ListTypeOptions{
+ ItemLabel: "Header",
+ ItemDefinition: &configuration.ListItemDefinition{
+ Type: configuration.FieldTypeObject,
+ Schema: []configuration.Field{
+ {
+ Name: "key",
+ Label: "Key",
+ Type: configuration.FieldTypeString,
+ Required: true,
+ DisallowExpression: true,
+ },
+ {
+ Name: "value",
+ Label: "Value",
+ Type: configuration.FieldTypeString,
+ Required: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ {
+ Name: "requestBody",
+ Label: "Request Body",
+ Type: configuration.FieldTypeText,
+ Required: false,
+ Togglable: true,
+ Description: "Optional HTTP request body",
+ VisibilityConditions: []configuration.VisibilityCondition{
+ {Field: "method", Values: []string{"post", "put", "patch"}},
+ },
+ },
+ }
+}
+
+// Setup validates component configuration during save/setup.
+func (c *CreateSyntheticCheck) Setup(ctx core.SetupContext) error {
+ scope := "dash0.createSyntheticCheck setup"
+ config := UpsertSyntheticCheckConfiguration{}
+ if err := mapstructure.Decode(ctx.Configuration, &config); err != nil {
+ return fmt.Errorf("%s: decode configuration: %w", scope, err)
+ }
+
+ if _, err := buildSyntheticCheckSpecificationFromConfiguration(config, scope); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// ProcessQueueItem delegates queue processing to default behavior.
+func (c *CreateSyntheticCheck) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) {
+ return ctx.DefaultProcessing()
+}
+
+// Execute creates a synthetic check and emits API response payload.
+func (c *CreateSyntheticCheck) Execute(ctx core.ExecutionContext) error {
+ scope := "dash0.createSyntheticCheck execute"
+ config := UpsertSyntheticCheckConfiguration{}
+ if err := mapstructure.Decode(ctx.Configuration, &config); err != nil {
+ return fmt.Errorf("%s: decode configuration: %w", scope, err)
+ }
+
+ specification, err := buildSyntheticCheckSpecificationFromConfiguration(config, scope)
+ if err != nil {
+ return err
+ }
+
+ originOrID := strings.TrimSpace(config.OriginOrID)
+ if originOrID == "" {
+ originOrID = fmt.Sprintf("superplane-synthetic-%s", uuid.NewString()[:8])
+ }
+
+ client, err := NewClient(ctx.HTTP, ctx.Integration)
+ if err != nil {
+ return fmt.Errorf("%s: create client: %w", scope, err)
+ }
+
+ response, err := client.UpsertSyntheticCheck(originOrID, specification)
+ if err != nil {
+ return fmt.Errorf("%s: create synthetic check %q: %w", scope, originOrID, err)
+ }
+
+ payload := map[string]any{
+ "originOrId": originOrID,
+ "response": response,
+ }
+
+ return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, CreateSyntheticCheckPayloadType, []any{payload})
+}
+
+// Actions returns no manual actions for this component.
+func (c *CreateSyntheticCheck) Actions() []core.Action {
+ return []core.Action{}
+}
+
+// HandleAction is unused because this component has no actions.
+func (c *CreateSyntheticCheck) HandleAction(ctx core.ActionContext) error {
+ return nil
+}
+
+// HandleWebhook is unused because this component does not receive webhooks.
+func (c *CreateSyntheticCheck) HandleWebhook(ctx core.WebhookRequestContext) (int, error) {
+ return http.StatusOK, nil
+}
+
+// Cancel is a no-op because execution is synchronous and short-lived.
+func (c *CreateSyntheticCheck) Cancel(ctx core.ExecutionContext) error {
+ return nil
+}
+
+// Cleanup is a no-op because no external resources are provisioned.
+func (c *CreateSyntheticCheck) Cleanup(ctx core.SetupContext) error {
+ return nil
+}
diff --git a/pkg/integrations/dash0/create_synthetic_check_test.go b/pkg/integrations/dash0/create_synthetic_check_test.go
new file mode 100644
index 0000000000..794b9c6795
--- /dev/null
+++ b/pkg/integrations/dash0/create_synthetic_check_test.go
@@ -0,0 +1,87 @@
+package dash0
+
+import (
+ "io"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/superplanehq/superplane/pkg/core"
+ "github.com/superplanehq/superplane/test/support/contexts"
+)
+
+func Test__CreateSyntheticCheck__Setup(t *testing.T) {
+ component := CreateSyntheticCheck{}
+
+ t.Run("name is required", func(t *testing.T) {
+ err := component.Setup(core.SetupContext{
+ Configuration: map[string]any{
+ "method": "get",
+ "url": "https://example.com/health",
+ },
+ })
+
+ require.ErrorContains(t, err, "name is required")
+ })
+
+ t.Run("legacy spec remains supported", func(t *testing.T) {
+ err := component.Setup(core.SetupContext{
+ Configuration: map[string]any{
+ "spec": `[{"kind":"Dash0SyntheticCheck","metadata":{"name":"checkout-health"},"spec":{"enabled":true,"plugin":{"kind":"http","spec":{"request":{"method":"get","url":"https://example.com"}}}}}]`,
+ },
+ })
+
+ require.NoError(t, err)
+ })
+}
+
+func Test__CreateSyntheticCheck__Execute(t *testing.T) {
+ component := CreateSyntheticCheck{}
+
+ httpContext := &contexts.HTTPContext{
+ Responses: []*http.Response{
+ {
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(`{"status":"updated"}`)),
+ },
+ },
+ }
+
+ execCtx := &contexts.ExecutionStateContext{}
+ err := component.Execute(core.ExecutionContext{
+ Configuration: map[string]any{
+ "name": "checkout-health",
+ "enabled": true,
+ "pluginKind": "http",
+ "method": "get",
+ "url": "https://example.com/health",
+ "headers": []map[string]any{
+ {"key": "x-test", "value": "superplane"},
+ },
+ },
+ HTTP: httpContext,
+ Integration: &contexts.IntegrationContext{
+ Configuration: map[string]any{
+ "apiToken": "token123",
+ "baseURL": "https://api.us-west-2.aws.dash0.com",
+ },
+ },
+ ExecutionState: execCtx,
+ })
+
+ require.NoError(t, err)
+ assert.Equal(t, CreateSyntheticCheckPayloadType, execCtx.Type)
+ require.Len(t, httpContext.Requests, 1)
+ assert.Equal(t, http.MethodPut, httpContext.Requests[0].Method)
+ assert.Contains(t, httpContext.Requests[0].URL.String(), "/api/synthetic-checks/superplane-synthetic-")
+ assert.Equal(t, "default", httpContext.Requests[0].URL.Query().Get("dataset"))
+
+ requestBody, readErr := io.ReadAll(httpContext.Requests[0].Body)
+ require.NoError(t, readErr)
+ assert.Contains(t, string(requestBody), `"kind":"Dash0SyntheticCheck"`)
+ assert.Contains(t, string(requestBody), `"name":"checkout-health"`)
+ assert.Contains(t, string(requestBody), `"method":"get"`)
+ assert.Contains(t, string(requestBody), `"url":"https://example.com/health"`)
+}
diff --git a/pkg/integrations/dash0/dash0.go b/pkg/integrations/dash0/dash0.go
index a5f96699e3..1a3913eaf1 100644
--- a/pkg/integrations/dash0/dash0.go
+++ b/pkg/integrations/dash0/dash0.go
@@ -18,6 +18,7 @@ type Dash0 struct{}
type Configuration struct {
APIToken string `json:"apiToken"`
BaseURL string `json:"baseURL"`
+ Dataset string `json:"dataset"`
}
type Metadata struct {
@@ -62,6 +63,15 @@ func (d *Dash0) Configuration() []configuration.Field {
Description: "Your Dash0 Prometheus API base URL. Find this in Dash0 dashboard: Organization Settings > Endpoints > Prometheus API. You can use either the full endpoint URL (https://api.us-west-2.aws.dash0.com/api/prometheus) or just the base URL (https://api.us-west-2.aws.dash0.com)",
Placeholder: "https://api.us-west-2.aws.dash0.com",
},
+ {
+ Name: "dataset",
+ Label: "Dataset",
+ Type: configuration.FieldTypeString,
+ Required: false,
+ Default: "default",
+ Description: "Dash0 dataset used by config API operations (check rules and synthetic checks).",
+ Placeholder: "default",
+ },
}
}
@@ -69,6 +79,9 @@ func (d *Dash0) Components() []core.Component {
return []core.Component{
&QueryPrometheus{},
&ListIssues{},
+ &SendLogEvent{},
+ &GetCheckDetails{},
+ &CreateSyntheticCheck{},
}
}
diff --git a/pkg/integrations/dash0/example.go b/pkg/integrations/dash0/example.go
index a2737e8e53..885545dc14 100644
--- a/pkg/integrations/dash0/example.go
+++ b/pkg/integrations/dash0/example.go
@@ -19,6 +19,24 @@ var exampleOutputListIssuesBytes []byte
var exampleOutputListIssuesOnce sync.Once
var exampleOutputListIssues map[string]any
+//go:embed example_output_send_log_event.json
+var exampleOutputSendLogEventBytes []byte
+
+var exampleOutputSendLogEventOnce sync.Once
+var exampleOutputSendLogEvent map[string]any
+
+//go:embed example_output_get_check_details.json
+var exampleOutputGetCheckDetailsBytes []byte
+
+var exampleOutputGetCheckDetailsOnce sync.Once
+var exampleOutputGetCheckDetails map[string]any
+
+//go:embed example_output_create_synthetic_check.json
+var exampleOutputCreateSyntheticCheckBytes []byte
+
+var exampleOutputCreateSyntheticCheckOnce sync.Once
+var exampleOutputCreateSyntheticCheck map[string]any
+
func (c *QueryPrometheus) ExampleOutput() map[string]any {
return utils.UnmarshalEmbeddedJSON(&exampleOutputQueryPrometheusOnce, exampleOutputQueryPrometheusBytes, &exampleOutputQueryPrometheus)
}
@@ -26,3 +44,18 @@ func (c *QueryPrometheus) ExampleOutput() map[string]any {
func (c *ListIssues) ExampleOutput() map[string]any {
return utils.UnmarshalEmbeddedJSON(&exampleOutputListIssuesOnce, exampleOutputListIssuesBytes, &exampleOutputListIssues)
}
+
+// ExampleOutput returns sample output data for Send Log Event.
+func (c *SendLogEvent) ExampleOutput() map[string]any {
+ return utils.UnmarshalEmbeddedJSON(&exampleOutputSendLogEventOnce, exampleOutputSendLogEventBytes, &exampleOutputSendLogEvent)
+}
+
+// ExampleOutput returns sample output data for Get Check Details.
+func (c *GetCheckDetails) ExampleOutput() map[string]any {
+ return utils.UnmarshalEmbeddedJSON(&exampleOutputGetCheckDetailsOnce, exampleOutputGetCheckDetailsBytes, &exampleOutputGetCheckDetails)
+}
+
+// ExampleOutput returns sample output data for Create Synthetic Check.
+func (c *CreateSyntheticCheck) ExampleOutput() map[string]any {
+ return utils.UnmarshalEmbeddedJSON(&exampleOutputCreateSyntheticCheckOnce, exampleOutputCreateSyntheticCheckBytes, &exampleOutputCreateSyntheticCheck)
+}
diff --git a/pkg/integrations/dash0/example_output_create_synthetic_check.json b/pkg/integrations/dash0/example_output_create_synthetic_check.json
new file mode 100644
index 0000000000..0aebfeb944
--- /dev/null
+++ b/pkg/integrations/dash0/example_output_create_synthetic_check.json
@@ -0,0 +1,11 @@
+{
+ "data": {
+ "originOrId": "superplane-synthetic-ab12cd34",
+ "response": {
+ "id": "superplane-synthetic-ab12cd34",
+ "status": "updated"
+ }
+ },
+ "timestamp": "2026-02-09T12:00:00Z",
+ "type": "dash0.synthetic.check.created"
+}
diff --git a/pkg/integrations/dash0/example_output_get_check_details.json b/pkg/integrations/dash0/example_output_get_check_details.json
new file mode 100644
index 0000000000..47e4c46390
--- /dev/null
+++ b/pkg/integrations/dash0/example_output_get_check_details.json
@@ -0,0 +1,14 @@
+{
+ "data": {
+ "checkId": "check-123",
+ "details": {
+ "id": "check-123",
+ "name": "Checkout API latency",
+ "severity": "critical",
+ "query": "histogram_quantile(0.95, sum(rate(http_request_duration_seconds_bucket{service=\"checkout\"}[5m])) by (le))",
+ "currentValue": 0.92
+ }
+ },
+ "timestamp": "2026-02-09T12:00:00Z",
+ "type": "dash0.check.details.retrieved"
+}
diff --git a/pkg/integrations/dash0/example_output_send_log_event.json b/pkg/integrations/dash0/example_output_send_log_event.json
new file mode 100644
index 0000000000..78f2c8731f
--- /dev/null
+++ b/pkg/integrations/dash0/example_output_send_log_event.json
@@ -0,0 +1,11 @@
+{
+ "data": {
+ "serviceName": "superplane.workflow",
+ "sentCount": 2,
+ "response": {
+ "status": "ok"
+ }
+ },
+ "timestamp": "2026-02-09T12:00:00Z",
+ "type": "dash0.log.event.sent"
+}
diff --git a/pkg/integrations/dash0/get_check_details.go b/pkg/integrations/dash0/get_check_details.go
new file mode 100644
index 0000000000..bf5490d17b
--- /dev/null
+++ b/pkg/integrations/dash0/get_check_details.go
@@ -0,0 +1,167 @@
+package dash0
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/google/uuid"
+ "github.com/mitchellh/mapstructure"
+ "github.com/superplanehq/superplane/pkg/configuration"
+ "github.com/superplanehq/superplane/pkg/core"
+)
+
+const GetCheckDetailsPayloadType = "dash0.check.details.retrieved"
+
+// GetCheckDetails fetches detailed check information from Dash0 alerting APIs.
+type GetCheckDetails struct{}
+
+// Name returns the stable component identifier.
+func (c *GetCheckDetails) Name() string {
+ return "dash0.getCheckDetails"
+}
+
+// Label returns the display name used in the workflow builder.
+func (c *GetCheckDetails) Label() string {
+ return "Get Check Details"
+}
+
+// Description returns a short summary of component behavior.
+func (c *GetCheckDetails) Description() string {
+ return "Get detailed information for a Dash0 check by ID"
+}
+
+// Documentation returns markdown help shown in the component docs panel.
+func (c *GetCheckDetails) Documentation() string {
+ return `The Get Check Details component fetches full context for a Dash0 check by ID.
+
+## Use Cases
+
+- **Alert enrichment**: Expand webhook payloads with full check context before notifying responders
+- **Workflow branching**: Use check attributes (severity, thresholds, services) in downstream conditions
+- **Incident automation**: Add rich check details to incident tickets or chat messages
+
+## Configuration
+
+- **Check ID**: The Dash0 check identifier to retrieve
+- **Include History**: Include additional history data when supported by the Dash0 API
+
+## Output
+
+Emits a payload containing:
+- **checkId**: Check identifier used in the request
+- **details**: Raw details response from Dash0`
+}
+
+// Icon returns the Lucide icon name for this component.
+func (c *GetCheckDetails) Icon() string {
+ return "search"
+}
+
+// Color returns the node color used in the UI.
+func (c *GetCheckDetails) Color() string {
+ return "blue"
+}
+
+// OutputChannels declares the channel emitted by this action.
+func (c *GetCheckDetails) OutputChannels(configuration any) []core.OutputChannel {
+ return []core.OutputChannel{core.DefaultOutputChannel}
+}
+
+// Configuration defines fields required to fetch check details.
+func (c *GetCheckDetails) Configuration() []configuration.Field {
+ return []configuration.Field{
+ {
+ Name: "checkId",
+ Label: "Check ID",
+ Type: configuration.FieldTypeString,
+ Required: true,
+ Description: "Dash0 check identifier (for example from an On Alert Event trigger)",
+ Placeholder: "{{ event.data.checkId }}",
+ },
+ {
+ Name: "includeHistory",
+ Label: "Include History",
+ Type: configuration.FieldTypeBool,
+ Required: false,
+ Default: false,
+ Description: "Request additional history data when supported by Dash0",
+ },
+ }
+}
+
+// Setup validates component configuration during save/setup.
+func (c *GetCheckDetails) Setup(ctx core.SetupContext) error {
+ scope := "dash0.getCheckDetails setup"
+ config := GetCheckDetailsConfiguration{}
+ if err := mapstructure.Decode(ctx.Configuration, &config); err != nil {
+ return fmt.Errorf("%s: decode configuration: %w", scope, err)
+ }
+
+ if strings.TrimSpace(config.CheckID) == "" {
+ return fmt.Errorf("%s: checkId is required", scope)
+ }
+
+ return nil
+}
+
+// ProcessQueueItem delegates queue processing to default behavior.
+func (c *GetCheckDetails) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) {
+ return ctx.DefaultProcessing()
+}
+
+// Execute fetches check details and emits normalized output payload.
+func (c *GetCheckDetails) Execute(ctx core.ExecutionContext) error {
+ scope := "dash0.getCheckDetails execute"
+ config := GetCheckDetailsConfiguration{}
+ if err := mapstructure.Decode(ctx.Configuration, &config); err != nil {
+ return fmt.Errorf("%s: decode configuration: %w", scope, err)
+ }
+
+ checkID := strings.TrimSpace(config.CheckID)
+ if checkID == "" {
+ return fmt.Errorf("%s: checkId is required", scope)
+ }
+
+ client, err := NewClient(ctx.HTTP, ctx.Integration)
+ if err != nil {
+ return fmt.Errorf("%s: create client: %w", scope, err)
+ }
+
+ details, err := client.GetCheckDetails(checkID, config.IncludeHistory)
+ if err != nil {
+ return fmt.Errorf("%s: get check details for %q: %w", scope, checkID, err)
+ }
+
+ payload := map[string]any{
+ "checkId": checkID,
+ "details": details,
+ }
+
+ return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, GetCheckDetailsPayloadType, []any{payload})
+}
+
+// Actions returns no manual actions for this component.
+func (c *GetCheckDetails) Actions() []core.Action {
+ return []core.Action{}
+}
+
+// HandleAction is unused because this component has no actions.
+func (c *GetCheckDetails) HandleAction(ctx core.ActionContext) error {
+ return nil
+}
+
+// HandleWebhook is unused because this component does not receive webhooks.
+func (c *GetCheckDetails) HandleWebhook(ctx core.WebhookRequestContext) (int, error) {
+ return http.StatusOK, nil
+}
+
+// Cancel is a no-op because execution is synchronous and short-lived.
+func (c *GetCheckDetails) Cancel(ctx core.ExecutionContext) error {
+ return nil
+}
+
+// Cleanup is a no-op because no external resources are provisioned.
+func (c *GetCheckDetails) Cleanup(ctx core.SetupContext) error {
+ return nil
+}
diff --git a/pkg/integrations/dash0/get_check_details_test.go b/pkg/integrations/dash0/get_check_details_test.go
new file mode 100644
index 0000000000..d8f9f69836
--- /dev/null
+++ b/pkg/integrations/dash0/get_check_details_test.go
@@ -0,0 +1,107 @@
+package dash0
+
+import (
+ "io"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/superplanehq/superplane/pkg/core"
+ "github.com/superplanehq/superplane/test/support/contexts"
+)
+
+func Test__GetCheckDetails__Setup(t *testing.T) {
+ component := GetCheckDetails{}
+
+ t.Run("check id is required", func(t *testing.T) {
+ err := component.Setup(core.SetupContext{
+ Configuration: map[string]any{
+ "checkId": "",
+ },
+ })
+
+ require.ErrorContains(t, err, "checkId is required")
+ })
+
+ t.Run("valid configuration", func(t *testing.T) {
+ err := component.Setup(core.SetupContext{
+ Configuration: map[string]any{
+ "checkId": "check-123",
+ },
+ })
+
+ require.NoError(t, err)
+ })
+}
+
+func Test__GetCheckDetails__Execute(t *testing.T) {
+ component := GetCheckDetails{}
+
+ t.Run("fetches details from failed-checks endpoint", func(t *testing.T) {
+ httpContext := &contexts.HTTPContext{
+ Responses: []*http.Response{
+ {
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(`{"id":"check-123","name":"Checkout latency"}`)),
+ },
+ },
+ }
+
+ execCtx := &contexts.ExecutionStateContext{}
+ err := component.Execute(core.ExecutionContext{
+ Configuration: map[string]any{
+ "checkId": "check-123",
+ },
+ HTTP: httpContext,
+ Integration: &contexts.IntegrationContext{
+ Configuration: map[string]any{
+ "apiToken": "token123",
+ "baseURL": "https://api.us-west-2.aws.dash0.com",
+ },
+ },
+ ExecutionState: execCtx,
+ })
+
+ require.NoError(t, err)
+ assert.Equal(t, GetCheckDetailsPayloadType, execCtx.Type)
+ require.Len(t, httpContext.Requests, 1)
+ assert.Contains(t, httpContext.Requests[0].URL.String(), "/api/alerting/failed-checks/check-123")
+ })
+
+ t.Run("falls back to check-rules endpoint on 404", func(t *testing.T) {
+ httpContext := &contexts.HTTPContext{
+ Responses: []*http.Response{
+ {
+ StatusCode: http.StatusNotFound,
+ Body: io.NopCloser(strings.NewReader(`{"error":"not found"}`)),
+ },
+ {
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(`{"id":"check-123","name":"Checkout latency rule"}`)),
+ },
+ },
+ }
+
+ execCtx := &contexts.ExecutionStateContext{}
+ err := component.Execute(core.ExecutionContext{
+ Configuration: map[string]any{
+ "checkId": "check-123",
+ },
+ HTTP: httpContext,
+ Integration: &contexts.IntegrationContext{
+ Configuration: map[string]any{
+ "apiToken": "token123",
+ "baseURL": "https://api.us-west-2.aws.dash0.com",
+ },
+ },
+ ExecutionState: execCtx,
+ })
+
+ require.NoError(t, err)
+ require.Len(t, httpContext.Requests, 2)
+ assert.Contains(t, httpContext.Requests[0].URL.String(), "/api/alerting/failed-checks/check-123")
+ assert.Contains(t, httpContext.Requests[1].URL.String(), "/api/alerting/check-rules/check-123")
+ })
+}
diff --git a/pkg/integrations/dash0/send_log_event.go b/pkg/integrations/dash0/send_log_event.go
new file mode 100644
index 0000000000..e152504d8d
--- /dev/null
+++ b/pkg/integrations/dash0/send_log_event.go
@@ -0,0 +1,474 @@
+package dash0
+
+import (
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/google/uuid"
+ "github.com/mitchellh/mapstructure"
+ "github.com/superplanehq/superplane/pkg/configuration"
+ "github.com/superplanehq/superplane/pkg/core"
+)
+
+const (
+ SendLogEventPayloadType = "dash0.log.event.sent"
+ maxLogEventRecords = 50
+)
+
+var logSeverityOptions = []configuration.FieldOption{
+ {Label: "Trace", Value: "TRACE"},
+ {Label: "Debug", Value: "DEBUG"},
+ {Label: "Info", Value: "INFO"},
+ {Label: "Warn", Value: "WARN"},
+ {Label: "Error", Value: "ERROR"},
+ {Label: "Fatal", Value: "FATAL"},
+}
+
+var severityNumberByText = map[string]int{
+ "TRACE": 1,
+ "DEBUG": 5,
+ "INFO": 9,
+ "WARN": 13,
+ "ERROR": 17,
+ "FATAL": 21,
+}
+
+// SendLogEvent publishes workflow log records to Dash0 OTLP HTTP ingestion.
+type SendLogEvent struct{}
+
+// Name returns the stable component identifier.
+func (c *SendLogEvent) Name() string {
+ return "dash0.sendLogEvent"
+}
+
+// Label returns the display name used in the workflow builder.
+func (c *SendLogEvent) Label() string {
+ return "Send Log Event"
+}
+
+// Description returns a short summary of component behavior.
+func (c *SendLogEvent) Description() string {
+ return "Send one or more workflow log records to Dash0 OTLP HTTP ingestion"
+}
+
+// Documentation returns markdown help shown in the component docs panel.
+func (c *SendLogEvent) Documentation() string {
+ return `The Send Log Event component sends workflow log records to Dash0 using OTLP HTTP ingestion.
+
+## Use Cases
+
+- **Audit trails**: Record workflow milestones in Dash0 logs
+- **Change tracking**: Emit deployment, approval, and remediation events
+- **Observability correlation**: Correlate workflow activity with traces and metrics
+
+## Configuration
+
+- **Service Name**: Service name attached to emitted records
+- **Records**: One or more log records containing:
+ - message
+ - severity
+ - timestamp (optional, RFC3339 or unix)
+ - attributes (optional key/value map)
+
+## Output
+
+Emits:
+- **serviceName**: Service name used for ingestion
+- **sentCount**: Number of records sent in this request`
+}
+
+// Icon returns the Lucide icon name for this component.
+func (c *SendLogEvent) Icon() string {
+ return "file-text"
+}
+
+// Color returns the node color used in the UI.
+func (c *SendLogEvent) Color() string {
+ return "blue"
+}
+
+// OutputChannels declares the channel emitted by this action.
+func (c *SendLogEvent) OutputChannels(configuration any) []core.OutputChannel {
+ return []core.OutputChannel{core.DefaultOutputChannel}
+}
+
+// Configuration defines fields required to send OTLP log records.
+func (c *SendLogEvent) Configuration() []configuration.Field {
+ return []configuration.Field{
+ {
+ Name: "serviceName",
+ Label: "Service Name",
+ Type: configuration.FieldTypeString,
+ Required: false,
+ Default: "superplane.workflow",
+ Description: "Service name attached to ingested log records",
+ Placeholder: "superplane.workflow",
+ },
+ {
+ Name: "records",
+ Label: "Records",
+ Type: configuration.FieldTypeList,
+ Required: true,
+ Description: "List of log records to send (max 50 per execution)",
+ TypeOptions: &configuration.TypeOptions{
+ List: &configuration.ListTypeOptions{
+ ItemLabel: "Record",
+ ItemDefinition: &configuration.ListItemDefinition{
+ Type: configuration.FieldTypeObject,
+ Schema: []configuration.Field{
+ {
+ Name: "message",
+ Label: "Message",
+ Type: configuration.FieldTypeString,
+ Required: true,
+ Description: "Log message text",
+ },
+ {
+ Name: "severity",
+ Label: "Severity",
+ Type: configuration.FieldTypeSelect,
+ Required: false,
+ Default: "INFO",
+ Description: "Log severity",
+ TypeOptions: &configuration.TypeOptions{
+ Select: &configuration.SelectTypeOptions{
+ Options: logSeverityOptions,
+ },
+ },
+ },
+ {
+ Name: "timestamp",
+ Label: "Timestamp",
+ Type: configuration.FieldTypeString,
+ Required: false,
+ Description: "Optional timestamp (RFC3339, unix seconds, unix milliseconds, or unix nanoseconds)",
+ Placeholder: "2026-02-09T12:00:00Z",
+ },
+ {
+ Name: "attributes",
+ Label: "Attributes",
+ Type: configuration.FieldTypeObject,
+ Required: false,
+ Togglable: true,
+ Description: "Optional key/value attributes for this record",
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+}
+
+// Setup validates component configuration during save/setup.
+func (c *SendLogEvent) Setup(ctx core.SetupContext) error {
+ scope := "dash0.sendLogEvent setup"
+ config := SendLogEventConfiguration{}
+ if err := mapstructure.Decode(ctx.Configuration, &config); err != nil {
+ return fmt.Errorf("%s: decode configuration: %w", scope, err)
+ }
+
+ return validateSendLogEventConfiguration(config, scope)
+}
+
+// ProcessQueueItem delegates queue processing to default behavior.
+func (c *SendLogEvent) ProcessQueueItem(ctx core.ProcessQueueContext) (*uuid.UUID, error) {
+ return ctx.DefaultProcessing()
+}
+
+// Execute transforms configured records into OTLP and sends them to Dash0.
+func (c *SendLogEvent) Execute(ctx core.ExecutionContext) error {
+ scope := "dash0.sendLogEvent execute"
+ config := SendLogEventConfiguration{}
+ if err := mapstructure.Decode(ctx.Configuration, &config); err != nil {
+ return fmt.Errorf("%s: decode configuration: %w", scope, err)
+ }
+
+ if err := validateSendLogEventConfiguration(config, scope); err != nil {
+ return err
+ }
+
+ request, serviceName, err := buildOTLPLogRequest(config, scope)
+ if err != nil {
+ return err
+ }
+
+ client, err := NewClient(ctx.HTTP, ctx.Integration)
+ if err != nil {
+ return fmt.Errorf("%s: create client: %w", scope, err)
+ }
+
+ response, err := client.SendLogEvents(request)
+ if err != nil {
+ return fmt.Errorf("%s: send log events: %w", scope, err)
+ }
+
+ payload := map[string]any{
+ "serviceName": serviceName,
+ "sentCount": len(config.Records),
+ "response": response,
+ }
+
+ return ctx.ExecutionState.Emit(core.DefaultOutputChannel.Name, SendLogEventPayloadType, []any{payload})
+}
+
+// Actions returns no manual actions for this component.
+func (c *SendLogEvent) Actions() []core.Action {
+ return []core.Action{}
+}
+
+// HandleAction is unused because this component has no actions.
+func (c *SendLogEvent) HandleAction(ctx core.ActionContext) error {
+ return nil
+}
+
+// HandleWebhook is unused because this component does not receive webhooks.
+func (c *SendLogEvent) HandleWebhook(ctx core.WebhookRequestContext) (int, error) {
+ return http.StatusOK, nil
+}
+
+// Cancel is a no-op because execution is synchronous and short-lived.
+func (c *SendLogEvent) Cancel(ctx core.ExecutionContext) error {
+ return nil
+}
+
+// Cleanup is a no-op because no external resources are provisioned.
+func (c *SendLogEvent) Cleanup(ctx core.SetupContext) error {
+ return nil
+}
+
+// validateSendLogEventConfiguration enforces required fields and record constraints.
+func validateSendLogEventConfiguration(config SendLogEventConfiguration, scope string) error {
+ if len(config.Records) == 0 {
+ return fmt.Errorf("%s: records is required", scope)
+ }
+
+ if len(config.Records) > maxLogEventRecords {
+ return fmt.Errorf("%s: records cannot exceed %d", scope, maxLogEventRecords)
+ }
+
+ for index, record := range config.Records {
+ recordScope := fmt.Sprintf("%s: record[%d]", scope, index)
+ if strings.TrimSpace(record.Message) == "" {
+ return fmt.Errorf("%s: message is required", recordScope)
+ }
+
+ if strings.TrimSpace(record.Timestamp) != "" {
+ if _, err := parseRecordTimestamp(record.Timestamp); err != nil {
+ return fmt.Errorf("%s: invalid timestamp: %w", recordScope, err)
+ }
+ }
+ }
+
+ return nil
+}
+
+// buildOTLPLogRequest converts component configuration into an OTLP logs request.
+func buildOTLPLogRequest(config SendLogEventConfiguration, scope string) (OTLPLogsRequest, string, error) {
+ serviceName := strings.TrimSpace(config.ServiceName)
+ if serviceName == "" {
+ serviceName = "superplane.workflow"
+ }
+
+ resourceAttributes := []OTLPKeyValue{
+ {
+ Key: "service.name",
+ Value: otlpStringValue(serviceName),
+ },
+ }
+
+ logRecords := make([]OTLPLogRecord, 0, len(config.Records))
+ for index, record := range config.Records {
+ recordScope := fmt.Sprintf("%s: record[%d]", scope, index)
+ recordTime, err := parseRecordTimestamp(record.Timestamp)
+ if err != nil {
+ return OTLPLogsRequest{}, "", fmt.Errorf("%s: parse timestamp: %w", recordScope, err)
+ }
+
+ severityText, severityNumber := normalizeSeverity(record.Severity)
+ attributes := make([]OTLPKeyValue, 0, len(record.Attributes))
+ for key, value := range record.Attributes {
+ trimmedKey := strings.TrimSpace(key)
+ if trimmedKey == "" {
+ continue
+ }
+ attributes = append(attributes, OTLPKeyValue{
+ Key: trimmedKey,
+ Value: otlpAnyValue(value),
+ })
+ }
+
+ logRecords = append(logRecords, OTLPLogRecord{
+ TimeUnixNano: strconv.FormatInt(recordTime.UnixNano(), 10),
+ SeverityText: severityText,
+ SeverityNumber: severityNumber,
+ Body: otlpStringValue(record.Message),
+ Attributes: attributes,
+ })
+ }
+
+ request := OTLPLogsRequest{
+ ResourceLogs: []OTLPResourceLogs{
+ {
+ Resource: OTLPResource{
+ Attributes: resourceAttributes,
+ },
+ ScopeLogs: []OTLPScopeLogs{
+ {
+ Scope: OTLPScope{
+ Name: "superplane.workflow",
+ },
+ LogRecords: logRecords,
+ },
+ },
+ },
+ },
+ }
+
+ return request, serviceName, nil
+}
+
+// parseRecordTimestamp parses RFC3339 or unix-like timestamps into UTC time.
+func parseRecordTimestamp(value string) (time.Time, error) {
+ trimmed := strings.TrimSpace(value)
+ if trimmed == "" || strings.EqualFold(trimmed, "nil") || strings.EqualFold(trimmed, "") {
+ return time.Now().UTC(), nil
+ }
+
+ layouts := []string{
+ time.RFC3339Nano,
+ time.RFC3339,
+ "2006-01-02 15:04:05",
+ }
+
+ for _, layout := range layouts {
+ parsed, err := time.Parse(layout, trimmed)
+ if err == nil {
+ return parsed.UTC(), nil
+ }
+ }
+
+ intValue, intErr := strconv.ParseInt(trimmed, 10, 64)
+ if intErr != nil {
+ floatValue, floatErr := strconv.ParseFloat(trimmed, 64)
+ if floatErr != nil {
+ return time.Time{}, fmt.Errorf("unsupported timestamp format %q", trimmed)
+ }
+ intValue = int64(floatValue)
+ }
+
+ switch {
+ case intValue >= 1_000_000_000_000_000_000:
+ return time.Unix(0, intValue).UTC(), nil
+ case intValue >= 1_000_000_000_000:
+ return time.UnixMilli(intValue).UTC(), nil
+ default:
+ return time.Unix(intValue, 0).UTC(), nil
+ }
+}
+
+// normalizeSeverity maps input severity values to OTLP text and numeric levels.
+func normalizeSeverity(value string) (string, int) {
+ trimmed := strings.TrimSpace(strings.ToUpper(value))
+ if trimmed == "WARNING" {
+ trimmed = "WARN"
+ }
+ if trimmed == "" {
+ trimmed = "INFO"
+ }
+
+ severityNumber, ok := severityNumberByText[trimmed]
+ if !ok {
+ return "INFO", severityNumberByText["INFO"]
+ }
+
+ return trimmed, severityNumber
+}
+
+// otlpStringValue wraps a string into OTLP AnyValue format.
+func otlpStringValue(value string) OTLPAnyValue {
+ normalized := value
+ return OTLPAnyValue{
+ StringValue: &normalized,
+ }
+}
+
+// otlpAnyValue converts generic Go values into OTLP AnyValue representations.
+func otlpAnyValue(value any) OTLPAnyValue {
+ switch typed := value.(type) {
+ case nil:
+ return otlpStringValue("")
+ case string:
+ return otlpStringValue(typed)
+ case bool:
+ boolean := typed
+ return OTLPAnyValue{BoolValue: &boolean}
+ case int:
+ integer := strconv.Itoa(typed)
+ return OTLPAnyValue{IntValue: &integer}
+ case int8:
+ integer := strconv.FormatInt(int64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case int16:
+ integer := strconv.FormatInt(int64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case int32:
+ integer := strconv.FormatInt(int64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case int64:
+ integer := strconv.FormatInt(typed, 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case uint:
+ integer := strconv.FormatUint(uint64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case uint8:
+ integer := strconv.FormatUint(uint64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case uint16:
+ integer := strconv.FormatUint(uint64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case uint32:
+ integer := strconv.FormatUint(uint64(typed), 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case uint64:
+ integer := strconv.FormatUint(typed, 10)
+ return OTLPAnyValue{IntValue: &integer}
+ case float32:
+ float := float64(typed)
+ return OTLPAnyValue{DoubleValue: &float}
+ case float64:
+ float := typed
+ return OTLPAnyValue{DoubleValue: &float}
+ case []any:
+ values := make([]OTLPAnyValue, 0, len(typed))
+ for _, entry := range typed {
+ values = append(values, otlpAnyValue(entry))
+ }
+ return OTLPAnyValue{
+ ArrayValue: &OTLPArray{
+ Values: values,
+ },
+ }
+ case map[string]any:
+ values := make([]OTLPKeyValue, 0, len(typed))
+ for key, entry := range typed {
+ if strings.TrimSpace(key) == "" {
+ continue
+ }
+ values = append(values, OTLPKeyValue{
+ Key: key,
+ Value: otlpAnyValue(entry),
+ })
+ }
+ return OTLPAnyValue{
+ KvlistValue: &OTLPKVList{
+ Values: values,
+ },
+ }
+ default:
+ return otlpStringValue(fmt.Sprintf("%v", typed))
+ }
+}
diff --git a/pkg/integrations/dash0/send_log_event_test.go b/pkg/integrations/dash0/send_log_event_test.go
new file mode 100644
index 0000000000..8b9206fb0b
--- /dev/null
+++ b/pkg/integrations/dash0/send_log_event_test.go
@@ -0,0 +1,149 @@
+package dash0
+
+import (
+ "io"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/superplanehq/superplane/pkg/core"
+ "github.com/superplanehq/superplane/test/support/contexts"
+)
+
+func Test__SendLogEvent__Setup(t *testing.T) {
+ component := SendLogEvent{}
+
+ t.Run("requires at least one record", func(t *testing.T) {
+ err := component.Setup(core.SetupContext{
+ Configuration: map[string]any{
+ "records": []map[string]any{},
+ },
+ })
+
+ require.ErrorContains(t, err, "records is required")
+ })
+
+ t.Run("record message is required", func(t *testing.T) {
+ err := component.Setup(core.SetupContext{
+ Configuration: map[string]any{
+ "records": []map[string]any{
+ {
+ "message": " ",
+ },
+ },
+ },
+ })
+
+ require.ErrorContains(t, err, "message is required")
+ })
+}
+
+func Test__SendLogEvent__Execute(t *testing.T) {
+ component := SendLogEvent{}
+
+ t.Run("sends logs and emits output", func(t *testing.T) {
+ httpContext := &contexts.HTTPContext{
+ Responses: []*http.Response{
+ {
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(`{"status":"ok"}`)),
+ },
+ },
+ }
+
+ execCtx := &contexts.ExecutionStateContext{}
+ err := component.Execute(core.ExecutionContext{
+ Configuration: map[string]any{
+ "serviceName": "superplane.workflow",
+ "records": []map[string]any{
+ {
+ "message": "deployment completed",
+ "severity": "INFO",
+ "timestamp": "2026-02-09T12:00:00Z",
+ "attributes": map[string]any{
+ "environment": "production",
+ },
+ },
+ },
+ },
+ HTTP: httpContext,
+ Integration: &contexts.IntegrationContext{
+ Configuration: map[string]any{
+ "apiToken": "token123",
+ "baseURL": "https://api.us-west-2.aws.dash0.com",
+ },
+ },
+ ExecutionState: execCtx,
+ })
+
+ require.NoError(t, err)
+ assert.True(t, execCtx.Finished)
+ assert.True(t, execCtx.Passed)
+ assert.Equal(t, SendLogEventPayloadType, execCtx.Type)
+ require.Len(t, httpContext.Requests, 1)
+ assert.Equal(t, http.MethodPost, httpContext.Requests[0].Method)
+ assert.Contains(t, httpContext.Requests[0].URL.String(), "/v1/logs")
+ assert.Contains(t, httpContext.Requests[0].URL.Host, "ingress.")
+ assert.Equal(t, "Bearer token123", httpContext.Requests[0].Header.Get("Authorization"))
+ })
+
+ t.Run("invalid timestamp returns error", func(t *testing.T) {
+ execCtx := &contexts.ExecutionStateContext{}
+ err := component.Execute(core.ExecutionContext{
+ Configuration: map[string]any{
+ "records": []map[string]any{
+ {
+ "message": "deployment completed",
+ "timestamp": "invalid-timestamp",
+ },
+ },
+ },
+ HTTP: &contexts.HTTPContext{},
+ Integration: &contexts.IntegrationContext{
+ Configuration: map[string]any{
+ "apiToken": "token123",
+ "baseURL": "https://api.us-west-2.aws.dash0.com",
+ },
+ },
+ ExecutionState: execCtx,
+ })
+
+ require.ErrorContains(t, err, "invalid timestamp")
+ })
+
+ t.Run("nil-like timestamp is treated as current time", func(t *testing.T) {
+ httpContext := &contexts.HTTPContext{
+ Responses: []*http.Response{
+ {
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(`{"status":"ok"}`)),
+ },
+ },
+ }
+
+ execCtx := &contexts.ExecutionStateContext{}
+ err := component.Execute(core.ExecutionContext{
+ Configuration: map[string]any{
+ "records": []map[string]any{
+ {
+ "message": "deployment completed",
+ "timestamp": "",
+ },
+ },
+ },
+ HTTP: httpContext,
+ Integration: &contexts.IntegrationContext{
+ Configuration: map[string]any{
+ "apiToken": "token123",
+ "baseURL": "https://api.us-west-2.aws.dash0.com",
+ },
+ },
+ ExecutionState: execCtx,
+ })
+
+ require.NoError(t, err)
+ require.Len(t, httpContext.Requests, 1)
+ })
+}
diff --git a/pkg/integrations/dash0/specification.go b/pkg/integrations/dash0/specification.go
new file mode 100644
index 0000000000..cb50e43eac
--- /dev/null
+++ b/pkg/integrations/dash0/specification.go
@@ -0,0 +1,117 @@
+package dash0
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+)
+
+// parseSpecification validates and parses a JSON object field used by upsert actions.
+func parseSpecification(specification, fieldName, scope string) (map[string]any, error) {
+ trimmed := strings.TrimSpace(specification)
+ if trimmed == "" {
+ return nil, fmt.Errorf("%s: %s is required", scope, fieldName)
+ }
+
+ var payload map[string]any
+ objectErr := json.Unmarshal([]byte(trimmed), &payload)
+ if objectErr == nil {
+ if len(payload) == 0 {
+ return nil, fmt.Errorf("%s: %s cannot be an empty JSON object", scope, fieldName)
+ }
+
+ return payload, nil
+ }
+
+ var payloadArray []map[string]any
+ if err := json.Unmarshal([]byte(trimmed), &payloadArray); err == nil {
+ if len(payloadArray) == 0 {
+ return nil, fmt.Errorf("%s: %s cannot be an empty JSON array", scope, fieldName)
+ }
+
+ if len(payloadArray) > 1 {
+ return nil, fmt.Errorf("%s: %s must be a JSON object or a single-item JSON array", scope, fieldName)
+ }
+
+ if len(payloadArray[0]) == 0 {
+ return nil, fmt.Errorf("%s: %s cannot contain an empty JSON object", scope, fieldName)
+ }
+
+ return payloadArray[0], nil
+ }
+
+ return nil, fmt.Errorf("%s: parse %s as JSON object: %w", scope, fieldName, objectErr)
+}
+
+// requireNonEmptyValue trims and validates required string configuration values.
+func requireNonEmptyValue(value, fieldName, scope string) (string, error) {
+ trimmed := strings.TrimSpace(value)
+ if trimmed == "" {
+ return "", fmt.Errorf("%s: %s is required", scope, fieldName)
+ }
+
+ return trimmed, nil
+}
+
+// validateSyntheticCheckSpecification validates the minimum shape required by Dash0 synthetic checks.
+func validateSyntheticCheckSpecification(specification map[string]any, fieldName, scope string) error {
+ kindValue, ok := specification["kind"]
+ if !ok {
+ return fmt.Errorf("%s: %s.kind is required (expected \"Dash0SyntheticCheck\")", scope, fieldName)
+ }
+
+ kind, ok := kindValue.(string)
+ if !ok {
+ return fmt.Errorf("%s: %s.kind must be a string", scope, fieldName)
+ }
+
+ trimmedKind := strings.TrimSpace(kind)
+ if trimmedKind == "" {
+ return fmt.Errorf("%s: %s.kind is required (expected \"Dash0SyntheticCheck\")", scope, fieldName)
+ }
+
+ if !strings.EqualFold(trimmedKind, "Dash0SyntheticCheck") {
+ return fmt.Errorf("%s: %s.kind must be \"Dash0SyntheticCheck\"", scope, fieldName)
+ }
+
+ specification["kind"] = "Dash0SyntheticCheck"
+
+ specValue, ok := specification["spec"]
+ if !ok {
+ return fmt.Errorf("%s: %s must include object field spec", scope, fieldName)
+ }
+
+ specMap, ok := specValue.(map[string]any)
+ if !ok {
+ return fmt.Errorf("%s: %s.spec must be a JSON object", scope, fieldName)
+ }
+
+ pluginValue, ok := specMap["plugin"]
+ if !ok {
+ return fmt.Errorf("%s: %s.spec.plugin is required", scope, fieldName)
+ }
+
+ pluginMap, ok := pluginValue.(map[string]any)
+ if !ok {
+ return fmt.Errorf("%s: %s.spec.plugin must be a JSON object", scope, fieldName)
+ }
+
+ pluginKindValue, ok := pluginMap["kind"]
+ if !ok {
+ return fmt.Errorf("%s: %s.spec.plugin.kind is required (for example: \"http\")", scope, fieldName)
+ }
+
+ pluginKind, ok := pluginKindValue.(string)
+ if !ok {
+ return fmt.Errorf("%s: %s.spec.plugin.kind must be a string", scope, fieldName)
+ }
+
+ trimmedPluginKind := strings.TrimSpace(pluginKind)
+ if trimmedPluginKind == "" {
+ return fmt.Errorf("%s: %s.spec.plugin.kind is required (for example: \"http\")", scope, fieldName)
+ }
+
+ pluginMap["kind"] = strings.ToLower(trimmedPluginKind)
+
+ return nil
+}
diff --git a/pkg/integrations/dash0/synthetic_check_form.go b/pkg/integrations/dash0/synthetic_check_form.go
new file mode 100644
index 0000000000..6847aa8f53
--- /dev/null
+++ b/pkg/integrations/dash0/synthetic_check_form.go
@@ -0,0 +1,101 @@
+package dash0
+
+import (
+ "fmt"
+ "strings"
+)
+
+// buildSyntheticCheckSpecificationFromConfiguration validates and builds a synthetic check specification map.
+func buildSyntheticCheckSpecificationFromConfiguration(config UpsertSyntheticCheckConfiguration, scope string) (map[string]any, error) {
+ if strings.TrimSpace(config.Spec) != "" {
+ specification, err := parseSpecification(config.Spec, "spec", scope)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := validateSyntheticCheckSpecification(specification, "spec", scope); err != nil {
+ return nil, err
+ }
+
+ return specification, nil
+ }
+
+ name, err := requireNonEmptyValue(config.Name, "name", scope)
+ if err != nil {
+ return nil, err
+ }
+
+ method, err := requireNonEmptyValue(config.Method, "method", scope)
+ if err != nil {
+ return nil, err
+ }
+
+ requestURL, err := requireNonEmptyValue(config.URL, "url", scope)
+ if err != nil {
+ return nil, err
+ }
+
+ pluginKind := strings.TrimSpace(config.PluginKind)
+ if pluginKind == "" {
+ pluginKind = "http"
+ }
+
+ request := map[string]any{
+ "method": strings.ToLower(method),
+ "url": requestURL,
+ }
+
+ headers, err := normalizeSyntheticCheckFields(config.Headers, "headers", scope)
+ if err != nil {
+ return nil, err
+ }
+ if len(headers) > 0 {
+ request["headers"] = headers
+ }
+
+ requestBody := strings.TrimSpace(config.RequestBody)
+ if requestBody != "" {
+ request["body"] = requestBody
+ }
+
+ specification := map[string]any{
+ "kind": "Dash0SyntheticCheck",
+ "metadata": map[string]any{
+ "name": name,
+ },
+ "spec": map[string]any{
+ "enabled": config.Enabled,
+ "plugin": map[string]any{
+ "kind": strings.ToLower(pluginKind),
+ "spec": map[string]any{
+ "request": request,
+ },
+ },
+ },
+ }
+
+ if err := validateSyntheticCheckSpecification(specification, "spec", scope); err != nil {
+ return nil, err
+ }
+
+ return specification, nil
+}
+
+// normalizeSyntheticCheckFields converts list-based key/value entries into a request map.
+func normalizeSyntheticCheckFields(fields []SyntheticCheckField, fieldName, scope string) (map[string]string, error) {
+ if len(fields) == 0 {
+ return nil, nil
+ }
+
+ normalized := make(map[string]string, len(fields))
+ for index, field := range fields {
+ key := strings.TrimSpace(field.Key)
+ if key == "" {
+ return nil, fmt.Errorf("%s: %s[%d].key is required", scope, fieldName, index)
+ }
+
+ normalized[key] = strings.TrimSpace(field.Value)
+ }
+
+ return normalized, nil
+}
diff --git a/pkg/integrations/dash0/types.go b/pkg/integrations/dash0/types.go
new file mode 100644
index 0000000000..7cb8c31727
--- /dev/null
+++ b/pkg/integrations/dash0/types.go
@@ -0,0 +1,103 @@
+package dash0
+
+// SendLogEventConfiguration stores the batch payload sent to OTLP logs ingestion.
+type SendLogEventConfiguration struct {
+ ServiceName string `json:"serviceName" mapstructure:"serviceName"`
+ Records []SendLogEventRecord `json:"records" mapstructure:"records"`
+}
+
+// SendLogEventRecord represents one workflow log record in component configuration.
+type SendLogEventRecord struct {
+ Message string `json:"message" mapstructure:"message"`
+ Severity string `json:"severity,omitempty" mapstructure:"severity"`
+ Timestamp string `json:"timestamp,omitempty" mapstructure:"timestamp"`
+ Attributes map[string]any `json:"attributes,omitempty" mapstructure:"attributes"`
+}
+
+// OTLPLogsRequest is the OTLP HTTP JSON request root for log ingestion.
+type OTLPLogsRequest struct {
+ ResourceLogs []OTLPResourceLogs `json:"resourceLogs"`
+}
+
+// OTLPResourceLogs groups log records by resource attributes.
+type OTLPResourceLogs struct {
+ Resource OTLPResource `json:"resource"`
+ ScopeLogs []OTLPScopeLogs `json:"scopeLogs"`
+}
+
+// OTLPResource identifies the emitting service and resource metadata.
+type OTLPResource struct {
+ Attributes []OTLPKeyValue `json:"attributes,omitempty"`
+}
+
+// OTLPScopeLogs groups records by instrumentation scope.
+type OTLPScopeLogs struct {
+ Scope OTLPScope `json:"scope"`
+ LogRecords []OTLPLogRecord `json:"logRecords"`
+}
+
+// OTLPScope identifies the instrumentation scope for emitted records.
+type OTLPScope struct {
+ Name string `json:"name,omitempty"`
+}
+
+// OTLPLogRecord represents one OTLP log record entry.
+type OTLPLogRecord struct {
+ TimeUnixNano string `json:"timeUnixNano,omitempty"`
+ SeverityNumber int `json:"severityNumber,omitempty"`
+ SeverityText string `json:"severityText,omitempty"`
+ Body OTLPAnyValue `json:"body"`
+ Attributes []OTLPKeyValue `json:"attributes,omitempty"`
+}
+
+// OTLPKeyValue is a key/value pair used across OTLP resources and log attributes.
+type OTLPKeyValue struct {
+ Key string `json:"key"`
+ Value OTLPAnyValue `json:"value"`
+}
+
+// OTLPAnyValue models the OTLP "AnyValue" union structure.
+type OTLPAnyValue struct {
+ StringValue *string `json:"stringValue,omitempty"`
+ BoolValue *bool `json:"boolValue,omitempty"`
+ IntValue *string `json:"intValue,omitempty"`
+ DoubleValue *float64 `json:"doubleValue,omitempty"`
+ KvlistValue *OTLPKVList `json:"kvlistValue,omitempty"`
+ ArrayValue *OTLPArray `json:"arrayValue,omitempty"`
+ BytesValue *string `json:"bytesValue,omitempty"`
+}
+
+// OTLPKVList stores nested key/value objects inside OTLPAnyValue.
+type OTLPKVList struct {
+ Values []OTLPKeyValue `json:"values"`
+}
+
+// OTLPArray stores nested array values inside OTLPAnyValue.
+type OTLPArray struct {
+ Values []OTLPAnyValue `json:"values"`
+}
+
+// GetCheckDetailsConfiguration stores action settings for check detail retrieval.
+type GetCheckDetailsConfiguration struct {
+ CheckID string `json:"checkId" mapstructure:"checkId"`
+ IncludeHistory bool `json:"includeHistory" mapstructure:"includeHistory"`
+}
+
+// UpsertSyntheticCheckConfiguration stores synthetic check upsert input.
+type UpsertSyntheticCheckConfiguration struct {
+ OriginOrID string `json:"originOrId" mapstructure:"originOrId"`
+ Name string `json:"name" mapstructure:"name"`
+ Enabled bool `json:"enabled" mapstructure:"enabled"`
+ PluginKind string `json:"pluginKind" mapstructure:"pluginKind"`
+ Method string `json:"method" mapstructure:"method"`
+ URL string `json:"url" mapstructure:"url"`
+ Headers []SyntheticCheckField `json:"headers" mapstructure:"headers"`
+ RequestBody string `json:"requestBody" mapstructure:"requestBody"`
+ Spec string `json:"spec" mapstructure:"spec"`
+}
+
+// SyntheticCheckField stores one key/value pair for synthetic check request maps.
+type SyntheticCheckField struct {
+ Key string `json:"key" mapstructure:"key"`
+ Value string `json:"value" mapstructure:"value"`
+}
diff --git a/web_src/src/pages/workflowv2/mappers/dash0/base.ts b/web_src/src/pages/workflowv2/mappers/dash0/base.ts
new file mode 100644
index 0000000000..90be32e639
--- /dev/null
+++ b/web_src/src/pages/workflowv2/mappers/dash0/base.ts
@@ -0,0 +1,63 @@
+import { EventSection } from "@/ui/componentBase";
+import { formatTimeAgo } from "@/utils/date";
+import { getState, getTriggerRenderer } from "..";
+import { ExecutionInfo, NodeInfo, OutputPayload } from "../types";
+
+export function getFirstDefaultPayload(execution: ExecutionInfo): OutputPayload | null {
+ const outputs = execution.outputs as { default?: OutputPayload[] } | undefined;
+ if (!outputs?.default || outputs.default.length === 0) {
+ return null;
+ }
+
+ return outputs.default[0] || null;
+}
+
+export function buildDash0ExecutionDetails(execution: ExecutionInfo, label: string): Record {
+ const details: Record = {};
+ const payload = getFirstDefaultPayload(execution);
+
+ if (payload?.timestamp) {
+ details["Received At"] = new Date(payload.timestamp).toLocaleString();
+ }
+
+ if (!payload?.data) {
+ details[label] = "No data returned";
+ return details;
+ }
+
+ try {
+ details[label] = JSON.stringify(payload.data, null, 2);
+ } catch {
+ details[label] = String(payload.data);
+ }
+
+ return details;
+}
+
+export function buildDash0EventSections(
+ nodes: NodeInfo[],
+ execution: ExecutionInfo,
+ componentName: string,
+ subtitlePrefix?: string,
+): EventSection[] {
+ const rootTriggerNode = nodes.find((node) => node.id === execution.rootEvent?.nodeId);
+ const rootTriggerRenderer = getTriggerRenderer(rootTriggerNode?.componentName!);
+ const { title } = rootTriggerRenderer.getTitleAndSubtitle({ event: execution.rootEvent });
+ const timeAgo = formatTimeAgo(new Date(execution.createdAt!));
+
+ let subtitle = timeAgo;
+ if (subtitlePrefix) {
+ subtitle = `${subtitlePrefix} ยท ${timeAgo}`;
+ }
+
+ return [
+ {
+ receivedAt: new Date(execution.createdAt!),
+ eventTitle: title,
+ eventSubtitle: subtitle,
+ eventState: getState(componentName)(execution),
+ eventId: execution.rootEvent!.id!,
+ },
+ ];
+}
+
diff --git a/web_src/src/pages/workflowv2/mappers/dash0/create_synthetic_check.ts b/web_src/src/pages/workflowv2/mappers/dash0/create_synthetic_check.ts
new file mode 100644
index 0000000000..762da3b181
--- /dev/null
+++ b/web_src/src/pages/workflowv2/mappers/dash0/create_synthetic_check.ts
@@ -0,0 +1,54 @@
+import { ComponentBaseProps } from "@/ui/componentBase";
+import { getStateMap } from "..";
+import {
+ ComponentBaseContext,
+ ComponentBaseMapper,
+ ExecutionDetailsContext,
+ NodeInfo,
+ SubtitleContext,
+} from "../types";
+import dash0Icon from "@/assets/icons/integrations/dash0.svg";
+import { MetadataItem } from "@/ui/metadataList";
+import { formatTimeAgo } from "@/utils/date";
+import { buildDash0EventSections, buildDash0ExecutionDetails } from "./base";
+import { UpsertSyntheticCheckConfiguration } from "./types";
+
+export const createSyntheticCheckMapper: ComponentBaseMapper = {
+ props(context: ComponentBaseContext): ComponentBaseProps {
+ const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null;
+ const componentName = context.componentDefinition.name || "unknown";
+
+ return {
+ iconSrc: dash0Icon,
+ collapsedBackground: "bg-white",
+ collapsed: context.node.isCollapsed,
+ title: context.node.name || context.componentDefinition.label || "Create Synthetic Check",
+ eventSections: lastExecution ? buildDash0EventSections(context.nodes, lastExecution, componentName) : undefined,
+ metadata: metadataList(context.node),
+ includeEmptyState: !lastExecution,
+ eventStateMap: getStateMap(componentName),
+ };
+ },
+
+ getExecutionDetails(context: ExecutionDetailsContext): Record {
+ return buildDash0ExecutionDetails(context.execution, "Create Response");
+ },
+
+ subtitle(context: SubtitleContext): string {
+ return formatTimeAgo(new Date(context.execution.createdAt!));
+ },
+};
+
+function metadataList(node: NodeInfo): MetadataItem[] {
+ const metadata: MetadataItem[] = [];
+ const configuration = node.configuration as UpsertSyntheticCheckConfiguration;
+
+ if (configuration?.originOrId) {
+ metadata.push({
+ icon: "hash",
+ label: configuration.originOrId,
+ });
+ }
+
+ return metadata;
+}
diff --git a/web_src/src/pages/workflowv2/mappers/dash0/get_check_details.ts b/web_src/src/pages/workflowv2/mappers/dash0/get_check_details.ts
new file mode 100644
index 0000000000..61fcff24ab
--- /dev/null
+++ b/web_src/src/pages/workflowv2/mappers/dash0/get_check_details.ts
@@ -0,0 +1,61 @@
+import { ComponentBaseProps } from "@/ui/componentBase";
+import { getStateMap } from "..";
+import {
+ ComponentBaseContext,
+ ComponentBaseMapper,
+ ExecutionDetailsContext,
+ NodeInfo,
+ SubtitleContext,
+} from "../types";
+import dash0Icon from "@/assets/icons/integrations/dash0.svg";
+import { MetadataItem } from "@/ui/metadataList";
+import { formatTimeAgo } from "@/utils/date";
+import { buildDash0EventSections, buildDash0ExecutionDetails } from "./base";
+import { GetCheckDetailsConfiguration } from "./types";
+
+export const getCheckDetailsMapper: ComponentBaseMapper = {
+ props(context: ComponentBaseContext): ComponentBaseProps {
+ const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null;
+ const componentName = context.componentDefinition.name || "unknown";
+
+ return {
+ iconSrc: dash0Icon,
+ collapsedBackground: "bg-white",
+ collapsed: context.node.isCollapsed,
+ title: context.node.name || context.componentDefinition.label || "Get Check Details",
+ eventSections: lastExecution ? buildDash0EventSections(context.nodes, lastExecution, componentName) : undefined,
+ metadata: metadataList(context.node),
+ includeEmptyState: !lastExecution,
+ eventStateMap: getStateMap(componentName),
+ };
+ },
+
+ getExecutionDetails(context: ExecutionDetailsContext): Record {
+ return buildDash0ExecutionDetails(context.execution, "Check Details");
+ },
+
+ subtitle(context: SubtitleContext): string {
+ return formatTimeAgo(new Date(context.execution.createdAt!));
+ },
+};
+
+function metadataList(node: NodeInfo): MetadataItem[] {
+ const metadata: MetadataItem[] = [];
+ const configuration = node.configuration as GetCheckDetailsConfiguration;
+
+ if (configuration?.checkId) {
+ metadata.push({
+ icon: "hash",
+ label: configuration.checkId,
+ });
+ }
+
+ if (configuration?.includeHistory) {
+ metadata.push({
+ icon: "history",
+ label: "Includes history",
+ });
+ }
+
+ return metadata;
+}
diff --git a/web_src/src/pages/workflowv2/mappers/dash0/index.ts b/web_src/src/pages/workflowv2/mappers/dash0/index.ts
index fd9865aafd..fc3ade0ff5 100644
--- a/web_src/src/pages/workflowv2/mappers/dash0/index.ts
+++ b/web_src/src/pages/workflowv2/mappers/dash0/index.ts
@@ -4,10 +4,16 @@ import { withOrganizationHeader } from "@/utils/withOrganizationHeader";
import { queryPrometheusMapper } from "./query_prometheus";
import { listIssuesMapper, LIST_ISSUES_STATE_REGISTRY } from "./list_issues";
import { buildActionStateRegistry } from "../utils";
+import { sendLogEventMapper } from "./send_log_event";
+import { getCheckDetailsMapper } from "./get_check_details";
+import { createSyntheticCheckMapper } from "./create_synthetic_check";
export const componentMappers: Record = {
queryPrometheus: queryPrometheusMapper,
listIssues: listIssuesMapper,
+ sendLogEvent: sendLogEventMapper,
+ getCheckDetails: getCheckDetailsMapper,
+ createSyntheticCheck: createSyntheticCheckMapper,
};
export const triggerRenderers: Record = {};
@@ -15,6 +21,9 @@ export const triggerRenderers: Record = {};
export const eventStateRegistry: Record = {
listIssues: LIST_ISSUES_STATE_REGISTRY,
queryPrometheus: buildActionStateRegistry("queried"),
+ sendLogEvent: buildActionStateRegistry("sent"),
+ getCheckDetails: buildActionStateRegistry("retrieved"),
+ createSyntheticCheck: buildActionStateRegistry("created"),
};
export async function resolveExecutionErrors(canvasId: string, executionIds: string[]) {
diff --git a/web_src/src/pages/workflowv2/mappers/dash0/send_log_event.ts b/web_src/src/pages/workflowv2/mappers/dash0/send_log_event.ts
new file mode 100644
index 0000000000..7f7f7e0da5
--- /dev/null
+++ b/web_src/src/pages/workflowv2/mappers/dash0/send_log_event.ts
@@ -0,0 +1,73 @@
+import { ComponentBaseProps } from "@/ui/componentBase";
+import { getStateMap } from "..";
+import {
+ ComponentBaseContext,
+ ComponentBaseMapper,
+ ExecutionDetailsContext,
+ NodeInfo,
+ SubtitleContext,
+} from "../types";
+import dash0Icon from "@/assets/icons/integrations/dash0.svg";
+import { MetadataItem } from "@/ui/metadataList";
+import { formatTimeAgo } from "@/utils/date";
+import { buildDash0EventSections, buildDash0ExecutionDetails } from "./base";
+import { SendLogEventConfiguration } from "./types";
+
+export const sendLogEventMapper: ComponentBaseMapper = {
+ props(context: ComponentBaseContext): ComponentBaseProps {
+ const lastExecution = context.lastExecutions.length > 0 ? context.lastExecutions[0] : null;
+ const componentName = context.componentDefinition.name || "unknown";
+
+ return {
+ iconSrc: dash0Icon,
+ collapsedBackground: "bg-white",
+ collapsed: context.node.isCollapsed,
+ title: context.node.name || context.componentDefinition.label || "Send Log Event",
+ eventSections: lastExecution
+ ? buildDash0EventSections(context.nodes, lastExecution, componentName, getSubtitlePrefix(lastExecution))
+ : undefined,
+ metadata: metadataList(context.node),
+ includeEmptyState: !lastExecution,
+ eventStateMap: getStateMap(componentName),
+ };
+ },
+
+ getExecutionDetails(context: ExecutionDetailsContext): Record {
+ return buildDash0ExecutionDetails(context.execution, "Response");
+ },
+
+ subtitle(context: SubtitleContext): string {
+ return formatTimeAgo(new Date(context.execution.createdAt!));
+ },
+};
+
+function metadataList(node: NodeInfo): MetadataItem[] {
+ const metadata: MetadataItem[] = [];
+ const configuration = node.configuration as SendLogEventConfiguration;
+
+ if (configuration?.serviceName) {
+ metadata.push({
+ icon: "database",
+ label: configuration.serviceName,
+ });
+ }
+
+ if (configuration?.records && configuration.records.length > 0) {
+ metadata.push({
+ icon: "list",
+ label: `${configuration.records.length} records`,
+ });
+ }
+
+ return metadata;
+}
+
+function getSubtitlePrefix(execution: { outputs?: unknown }): string | undefined {
+ const outputs = execution.outputs as { default?: Array<{ data?: { sentCount?: number } }> } | undefined;
+ const sentCount = outputs?.default?.[0]?.data?.sentCount;
+ if (typeof sentCount === "number") {
+ return `${sentCount} sent`;
+ }
+
+ return undefined;
+}
diff --git a/web_src/src/pages/workflowv2/mappers/dash0/types.ts b/web_src/src/pages/workflowv2/mappers/dash0/types.ts
index f26ca938e8..97ea744520 100644
--- a/web_src/src/pages/workflowv2/mappers/dash0/types.ts
+++ b/web_src/src/pages/workflowv2/mappers/dash0/types.ts
@@ -15,6 +15,35 @@ export interface ListIssuesConfiguration {
checkRules?: string[];
}
+export interface SendLogEventRecordConfiguration {
+ message: string;
+ severity?: string;
+ timestamp?: string;
+ attributes?: Record;
+}
+
+export interface SendLogEventConfiguration {
+ serviceName?: string;
+ records?: SendLogEventRecordConfiguration[];
+}
+
+export interface GetCheckDetailsConfiguration {
+ checkId?: string;
+ includeHistory?: boolean;
+}
+
+export interface UpsertSyntheticCheckConfiguration {
+ originOrId?: string;
+ name?: string;
+ enabled?: boolean;
+ pluginKind?: string;
+ method?: string;
+ url?: string;
+ headers?: Array<{ key: string; value: string }>;
+ requestBody?: string;
+ spec?: string;
+}
+
export interface PrometheusResponse {
status: string;
data: {