diff --git a/README.md b/README.md index dcfcae3..600019b 100644 --- a/README.md +++ b/README.md @@ -213,6 +213,7 @@ gait run record|inspect|replay|diff|receipt Run recording and replay gait run session start|append|status|checkpoint|compact Session journaling gait run reduce Reduce runpack by predicate gait mcp proxy|bridge|serve MCP transport adapters +gait gateway ingest Ingest MCP gateway logs into signed policy-enforcement proof records gait policy init|validate|fmt|simulate|test Policy authoring gait doctor [--production-readiness] [adoption] Diagnostics + readiness gait keys init|rotate|verify Signing key lifecycle diff --git a/cmd/gait/gateway.go b/cmd/gait/gateway.go new file mode 100644 index 0000000..f1e0e88 --- /dev/null +++ b/cmd/gait/gateway.go @@ -0,0 +1,149 @@ +package main + +import ( + "flag" + "fmt" + "io" + "strings" + + "github.com/Clyra-AI/gait/core/gateway" + sign "github.com/Clyra-AI/proof/signing" +) + +const ( + gatewayOutputSchemaID = "gait.gateway.output" + gatewayOutputSchemaVersion = "1.0.0" +) + +type gatewayOutput struct { + SchemaID string `json:"schema_id"` + SchemaVersion string `json:"schema_version"` + OK bool `json:"ok"` + Operation string `json:"operation,omitempty"` + Source string `json:"source,omitempty"` + LogPath string `json:"log_path,omitempty"` + ProofRecordsOut string `json:"proof_records_out,omitempty"` + InputEvents int `json:"input_events,omitempty"` + OutputRecords int `json:"output_records,omitempty"` + Warnings []string `json:"warnings,omitempty"` + Error string `json:"error,omitempty"` +} + +func runGateway(arguments []string) int { + if hasExplainFlag(arguments) { + return writeExplain("Ingest gateway audit logs and emit signed policy_enforcement proof records for compliance evidence.") + } + if len(arguments) == 0 { + printGatewayUsage() + return exitInvalidInput + } + switch arguments[0] { + case "ingest": + return runGatewayIngest(arguments[1:]) + default: + printGatewayUsage() + return exitInvalidInput + } +} + +func runGatewayIngest(arguments []string) int { + arguments = reorderInterspersedFlags(arguments, map[string]bool{ + "source": true, + "log-path": true, + "proof-out": true, + "key-mode": true, + "private-key": true, + "private-key-env": true, + }) + flagSet := flag.NewFlagSet("gateway-ingest", flag.ContinueOnError) + flagSet.SetOutput(io.Discard) + + var source string + var logPath string + var proofOut string + var keyMode string + var privateKeyPath string + var privateKeyEnv string + var jsonOutput bool + var helpFlag bool + + flagSet.StringVar(&source, "source", "", "gateway source: kong|docker|mintmcp") + flagSet.StringVar(&logPath, "log-path", "", "path to gateway log file") + flagSet.StringVar(&proofOut, "proof-out", "", "optional output path for policy_enforcement proof record JSONL") + flagSet.StringVar(&keyMode, "key-mode", string(sign.ModeDev), "signing key mode: dev or prod") + flagSet.StringVar(&privateKeyPath, "private-key", "", "path to base64 private signing key") + flagSet.StringVar(&privateKeyEnv, "private-key-env", "", "env var containing base64 private signing key") + flagSet.BoolVar(&jsonOutput, "json", false, "emit JSON output") + flagSet.BoolVar(&helpFlag, "help", false, "show help") + + if err := flagSet.Parse(arguments); err != nil { + return writeGatewayOutput(jsonOutput, gatewayOutput{OK: false, Operation: "ingest", Error: err.Error()}, exitCodeForError(err, exitInvalidInput)) + } + if helpFlag { + printGatewayIngestUsage() + return exitOK + } + if len(flagSet.Args()) > 0 { + return writeGatewayOutput(jsonOutput, gatewayOutput{OK: false, Operation: "ingest", Error: "unexpected positional arguments"}, exitInvalidInput) + } + if strings.TrimSpace(source) == "" || strings.TrimSpace(logPath) == "" { + return writeGatewayOutput(jsonOutput, gatewayOutput{OK: false, Operation: "ingest", Error: "expected --source and --log-path "}, exitInvalidInput) + } + + keyPair, warnings, err := sign.LoadSigningKey(sign.KeyConfig{ + Mode: sign.KeyMode(strings.ToLower(strings.TrimSpace(keyMode))), + PrivateKeyPath: privateKeyPath, + PrivateKeyEnv: privateKeyEnv, + }) + if err != nil { + return writeGatewayOutput(jsonOutput, gatewayOutput{OK: false, Operation: "ingest", Error: err.Error()}, exitCodeForError(err, exitInvalidInput)) + } + + result, err := gateway.IngestLogs(gateway.IngestOptions{ + Source: strings.TrimSpace(source), + LogPath: strings.TrimSpace(logPath), + OutputPath: strings.TrimSpace(proofOut), + ProducerVersion: version, + SigningPrivateKey: keyPair.Private, + }) + if err != nil { + return writeGatewayOutput(jsonOutput, gatewayOutput{OK: false, Operation: "ingest", Error: err.Error()}, exitCodeForError(err, exitInvalidInput)) + } + return writeGatewayOutput(jsonOutput, gatewayOutput{ + OK: true, + Operation: "ingest", + Source: result.Source, + LogPath: result.LogPath, + ProofRecordsOut: result.ProofRecordsOut, + InputEvents: result.InputEvents, + OutputRecords: result.OutputRecords, + Warnings: warnings, + }, exitOK) +} + +func writeGatewayOutput(jsonOutput bool, output gatewayOutput, exitCode int) int { + output.SchemaID = gatewayOutputSchemaID + output.SchemaVersion = gatewayOutputSchemaVersion + if jsonOutput { + return writeJSONOutput(output, exitCode) + } + if !output.OK { + fmt.Printf("gateway %s error: %s\n", output.Operation, output.Error) + return exitCode + } + fmt.Printf("gateway %s: source=%s input=%d output=%d proof=%s\n", output.Operation, output.Source, output.InputEvents, output.OutputRecords, output.ProofRecordsOut) + if len(output.Warnings) > 0 { + fmt.Printf("warnings: %s\n", strings.Join(output.Warnings, "; ")) + } + return exitCode +} + +func printGatewayUsage() { + fmt.Println("Usage:") + fmt.Println(" gait gateway ingest --source --log-path [--proof-out ] [--key-mode dev|prod] [--private-key |--private-key-env ] [--json] [--explain]") +} + +func printGatewayIngestUsage() { + fmt.Println("Usage:") + fmt.Println(" gait gateway ingest --source --log-path [--proof-out ] [--key-mode dev|prod] [--private-key |--private-key-env ] [--json] [--explain]") +} diff --git a/cmd/gait/gateway_cli_test.go b/cmd/gait/gateway_cli_test.go new file mode 100644 index 0000000..0c08e98 --- /dev/null +++ b/cmd/gait/gateway_cli_test.go @@ -0,0 +1,66 @@ +package main + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" +) + +func TestRunGatewayIngestCommand(t *testing.T) { + workDir := t.TempDir() + withWorkingDir(t, workDir) + + logPath := filepath.Join(workDir, "mintmcp.log.jsonl") + mustWriteFile(t, logPath, strings.Join([]string{ + `{"timestamp":"2026-02-20T12:00:00Z","tool_name":"tool.read","verdict":"allow","request_id":"req-a"}`, + `{"timestamp":"2026-02-20T12:00:01Z","tool_name":"tool.write","verdict":"block","request_id":"req-b","reason_code":"policy_blocked"}`, + }, "\n")) + + var code int + raw := captureStdout(t, func() { + code = runGateway([]string{ + "ingest", + "--source", "mintmcp", + "--log-path", logPath, + "--json", + }) + }) + if code != exitOK { + t.Fatalf("runGateway ingest expected %d got %d", exitOK, code) + } + var output gatewayOutput + if err := json.Unmarshal([]byte(raw), &output); err != nil { + t.Fatalf("decode gateway output: %v raw=%q", err, raw) + } + if !output.OK || output.OutputRecords != 2 { + t.Fatalf("unexpected gateway ingest output: %#v", output) + } + if strings.TrimSpace(output.ProofRecordsOut) == "" { + t.Fatalf("expected proof_records_out in output") + } + // #nosec G304 -- test validates explicit output path from command result. + proofRaw, err := os.ReadFile(output.ProofRecordsOut) + if err != nil { + t.Fatalf("read proof record output: %v", err) + } + if !strings.Contains(string(proofRaw), `"record_type":"policy_enforcement"`) { + t.Fatalf("expected policy_enforcement records in output file: %s", string(proofRaw)) + } +} + +func TestRunGatewayValidationAndHelpPaths(t *testing.T) { + if code := runGateway([]string{}); code != exitInvalidInput { + t.Fatalf("runGateway missing args expected %d got %d", exitInvalidInput, code) + } + if code := runGateway([]string{"unknown"}); code != exitInvalidInput { + t.Fatalf("runGateway unknown subcommand expected %d got %d", exitInvalidInput, code) + } + if code := runGateway([]string{"ingest", "--help"}); code != exitOK { + t.Fatalf("runGateway help expected %d got %d", exitOK, code) + } + if code := runGateway([]string{"ingest", "--source", "kong", "--json"}); code != exitInvalidInput { + t.Fatalf("runGateway missing log-path expected %d got %d", exitInvalidInput, code) + } +} diff --git a/cmd/gait/job.go b/cmd/gait/job.go index 91a4368..2bafacb 100644 --- a/cmd/gait/job.go +++ b/cmd/gait/job.go @@ -1,11 +1,15 @@ package main import ( + "encoding/json" "flag" "fmt" "io" + "os" + "path/filepath" "strings" + "github.com/Clyra-AI/gait/core/gate" "github.com/Clyra-AI/gait/core/jobruntime" ) @@ -63,7 +67,11 @@ func runJobSubmit(arguments []string) int { "id": true, "root": true, "actor": true, + "identity": true, "env-fingerprint": true, + "policy": true, + "policy-digest": true, + "policy-ref": true, }) flagSet := flag.NewFlagSet("job-submit", flag.ContinueOnError) flagSet.SetOutput(io.Discard) @@ -71,14 +79,22 @@ func runJobSubmit(arguments []string) int { var jobID string var root string var actor string + var identity string var envFingerprint string + var policyPath string + var policyDigest string + var policyRef string var jsonOutput bool var helpFlag bool flagSet.StringVar(&jobID, "id", "", "job identifier") flagSet.StringVar(&root, "root", "./gait-out/jobs", "job state root directory") flagSet.StringVar(&actor, "actor", "", "actor identity") + flagSet.StringVar(&identity, "identity", "", "agent identity bound to the job") flagSet.StringVar(&envFingerprint, "env-fingerprint", "", "optional environment fingerprint override") + flagSet.StringVar(&policyPath, "policy", "", "path to policy yaml used for submit/resume enforcement") + flagSet.StringVar(&policyDigest, "policy-digest", "", "policy digest override (sha256:...)") + flagSet.StringVar(&policyRef, "policy-ref", "", "policy reference identifier") flagSet.BoolVar(&jsonOutput, "json", false, "emit JSON output") flagSet.BoolVar(&helpFlag, "help", false, "show help") @@ -92,11 +108,18 @@ func runJobSubmit(arguments []string) int { if len(flagSet.Args()) > 0 { return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "submit", Error: "unexpected positional arguments"}, exitInvalidInput) } + resolvedPolicyDigest, resolvedPolicyRef, err := resolveJobPolicyMetadata(policyPath, policyDigest, policyRef) + if err != nil { + return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "submit", Error: err.Error()}, exitInvalidInput) + } state, err := jobruntime.Submit(root, jobruntime.SubmitOptions{ JobID: strings.TrimSpace(jobID), Actor: strings.TrimSpace(actor), + Identity: strings.TrimSpace(identity), ProducerVersion: version, EnvironmentFingerprint: jobruntime.EnvironmentFingerprint(envFingerprint), + PolicyDigest: resolvedPolicyDigest, + PolicyRef: resolvedPolicyRef, }) if err != nil { return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "submit", Error: err.Error()}, exitCodeForError(err, exitInvalidInput)) @@ -362,11 +385,17 @@ func runJobApprove(arguments []string) int { func runJobResume(arguments []string) int { arguments = reorderInterspersedFlags(arguments, map[string]bool{ - "id": true, - "root": true, - "actor": true, - "reason": true, - "env-fingerprint": true, + "id": true, + "root": true, + "actor": true, + "identity": true, + "reason": true, + "env-fingerprint": true, + "policy": true, + "policy-digest": true, + "policy-ref": true, + "identity-validation-source": true, + "identity-revocations": true, }) flagSet := flag.NewFlagSet("job-resume", flag.ContinueOnError) flagSet.SetOutput(io.Discard) @@ -374,8 +403,15 @@ func runJobResume(arguments []string) int { var jobID string var root string var actor string + var identity string var reason string var envFingerprint string + var policyPath string + var policyDigest string + var policyRef string + var identityValidationSource string + var identityRevocationsPath string + var identityRevoked bool var allowEnvMismatch bool var jsonOutput bool var helpFlag bool @@ -383,8 +419,15 @@ func runJobResume(arguments []string) int { flagSet.StringVar(&jobID, "id", "", "job identifier") flagSet.StringVar(&root, "root", "./gait-out/jobs", "job state root directory") flagSet.StringVar(&actor, "actor", "", "actor identity") + flagSet.StringVar(&identity, "identity", "", "agent identity to validate on resume") flagSet.StringVar(&reason, "reason", "", "resume reason") flagSet.StringVar(&envFingerprint, "env-fingerprint", "", "override current environment fingerprint") + flagSet.StringVar(&policyPath, "policy", "", "path to policy yaml for resume re-evaluation") + flagSet.StringVar(&policyDigest, "policy-digest", "", "policy digest override (sha256:...)") + flagSet.StringVar(&policyRef, "policy-ref", "", "policy reference identifier") + flagSet.StringVar(&identityValidationSource, "identity-validation-source", "", "identity validation source label (for audit payload)") + flagSet.StringVar(&identityRevocationsPath, "identity-revocations", "", "path to revoked identities file (json array/object or newline list)") + flagSet.BoolVar(&identityRevoked, "identity-revoked", false, "mark identity as revoked and block resume") flagSet.BoolVar(&allowEnvMismatch, "allow-env-mismatch", false, "allow resume when environment fingerprint differs") flagSet.BoolVar(&jsonOutput, "json", false, "emit JSON output") flagSet.BoolVar(&helpFlag, "help", false, "show help") @@ -399,11 +442,45 @@ func runJobResume(arguments []string) int { if len(flagSet.Args()) > 0 { return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "resume", Error: "unexpected positional arguments"}, exitInvalidInput) } + resolvedPolicyDigest, resolvedPolicyRef, err := resolveJobPolicyMetadata(policyPath, policyDigest, policyRef) + if err != nil { + return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "resume", Error: err.Error()}, exitInvalidInput) + } + normalizedJobID := strings.TrimSpace(jobID) + normalizedIdentity := strings.TrimSpace(identity) + if strings.TrimSpace(identityRevocationsPath) != "" { + revokedSet, err := loadRevokedIdentities(identityRevocationsPath) + if err != nil { + return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "resume", Error: err.Error()}, exitInvalidInput) + } + if normalizedIdentity == "" { + state, statusErr := jobruntime.Status(root, normalizedJobID) + if statusErr != nil { + return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "resume", Error: statusErr.Error()}, exitCodeForError(statusErr, exitInvalidInput)) + } + normalizedIdentity = strings.TrimSpace(state.Identity) + } + if identityIsRevoked(revokedSet, normalizedIdentity) { + identityRevoked = true + } + if strings.TrimSpace(identityValidationSource) == "" { + identityValidationSource = "revocation_list" + } + } + requirePolicyEvaluation := strings.TrimSpace(policyPath) != "" || strings.TrimSpace(resolvedPolicyDigest) != "" || strings.TrimSpace(resolvedPolicyRef) != "" + requireIdentityValidation := normalizedIdentity != "" || strings.TrimSpace(identityRevocationsPath) != "" state, err := jobruntime.Resume(root, strings.TrimSpace(jobID), jobruntime.ResumeOptions{ Actor: strings.TrimSpace(actor), + Identity: normalizedIdentity, Reason: strings.TrimSpace(reason), CurrentEnvironmentFingerprint: jobruntime.EnvironmentFingerprint(envFingerprint), AllowEnvironmentMismatch: allowEnvMismatch, + PolicyDigest: resolvedPolicyDigest, + PolicyRef: resolvedPolicyRef, + RequirePolicyEvaluation: requirePolicyEvaluation, + RequireIdentityValidation: requireIdentityValidation, + IdentityValidationSource: strings.TrimSpace(identityValidationSource), + IdentityRevoked: identityRevoked, }) if err != nil { return writeJobOutput(jsonOutput, jobOutput{OK: false, Operation: "resume", Error: err.Error()}, exitCodeForError(err, exitInvalidInput)) @@ -470,21 +547,21 @@ func writeJobOutput(jsonOutput bool, output jobOutput, exitCode int) int { func printJobUsage() { fmt.Println("Usage:") - fmt.Println(" gait job submit --id [--root ./gait-out/jobs] [--actor ] [--env-fingerprint ] [--json] [--explain]") + fmt.Println(" gait job submit --id [--root ./gait-out/jobs] [--actor ] [--identity ] [--policy |--policy-digest ] [--policy-ref ] [--env-fingerprint ] [--json] [--explain]") fmt.Println(" gait job status --id [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job checkpoint add --id --type --summary [--required-action ] [--actor ] [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job checkpoint list --id [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job checkpoint show --id --checkpoint [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job pause --id [--actor ] [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job approve --id --actor [--reason ] [--root ./gait-out/jobs] [--json] [--explain]") - fmt.Println(" gait job resume --id [--actor ] [--reason ] [--env-fingerprint ] [--allow-env-mismatch] [--root ./gait-out/jobs] [--json] [--explain]") + fmt.Println(" gait job resume --id [--actor ] [--identity ] [--reason ] [--policy |--policy-digest ] [--policy-ref ] [--identity-revocations |--identity-revoked] [--identity-validation-source ] [--env-fingerprint ] [--allow-env-mismatch] [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job cancel --id [--actor ] [--root ./gait-out/jobs] [--json] [--explain]") fmt.Println(" gait job inspect --id [--root ./gait-out/jobs] [--json] [--explain]") } func printJobSubmitUsage() { fmt.Println("Usage:") - fmt.Println(" gait job submit --id [--root ./gait-out/jobs] [--actor ] [--env-fingerprint ] [--json] [--explain]") + fmt.Println(" gait job submit --id [--root ./gait-out/jobs] [--actor ] [--identity ] [--policy |--policy-digest ] [--policy-ref ] [--env-fingerprint ] [--json] [--explain]") } func printJobStatusUsage() { @@ -526,7 +603,7 @@ func printJobApproveUsage() { func printJobResumeUsage() { fmt.Println("Usage:") - fmt.Println(" gait job resume --id [--actor ] [--reason ] [--env-fingerprint ] [--allow-env-mismatch] [--root ./gait-out/jobs] [--json] [--explain]") + fmt.Println(" gait job resume --id [--actor ] [--identity ] [--reason ] [--policy |--policy-digest ] [--policy-ref ] [--identity-revocations |--identity-revoked] [--identity-validation-source ] [--env-fingerprint ] [--allow-env-mismatch] [--root ./gait-out/jobs] [--json] [--explain]") } func printJobCancelUsage() { @@ -538,3 +615,86 @@ func printJobInspectUsage() { fmt.Println("Usage:") fmt.Println(" gait job inspect --id [--root ./gait-out/jobs] [--json] [--explain]") } + +func resolveJobPolicyMetadata(policyPath string, policyDigest string, policyRef string) (string, string, error) { + trimmedPolicyPath := strings.TrimSpace(policyPath) + resolvedDigest := strings.TrimSpace(policyDigest) + resolvedRef := strings.TrimSpace(policyRef) + if trimmedPolicyPath != "" { + policy, err := gate.LoadPolicyFile(trimmedPolicyPath) + if err != nil { + return "", "", fmt.Errorf("load policy: %w", err) + } + computedDigest, err := gate.PolicyDigest(policy) + if err != nil { + return "", "", fmt.Errorf("digest policy: %w", err) + } + if resolvedDigest != "" && resolvedDigest != computedDigest { + return "", "", fmt.Errorf("policy digest mismatch: provided=%s computed=%s", resolvedDigest, computedDigest) + } + resolvedDigest = computedDigest + if resolvedRef == "" { + resolvedRef = filepath.Clean(trimmedPolicyPath) + } + } + return resolvedDigest, resolvedRef, nil +} + +func loadRevokedIdentities(path string) (map[string]struct{}, error) { + cleanPath := filepath.Clean(strings.TrimSpace(path)) + if cleanPath == "" || cleanPath == "." { + return nil, fmt.Errorf("identity revocations path is required") + } + // #nosec G304 -- explicit local CLI path. + payload, err := os.ReadFile(cleanPath) + if err != nil { + return nil, fmt.Errorf("read identity revocations: %w", err) + } + trimmed := strings.TrimSpace(string(payload)) + revoked := map[string]struct{}{} + if trimmed == "" { + return revoked, nil + } + + var list []string + if err := json.Unmarshal([]byte(trimmed), &list); err == nil { + for _, identity := range list { + value := strings.TrimSpace(identity) + if value != "" { + revoked[value] = struct{}{} + } + } + return revoked, nil + } + var object struct { + RevokedIdentities []string `json:"revoked_identities"` + Identities []string `json:"identities"` + } + if err := json.Unmarshal([]byte(trimmed), &object); err == nil { + for _, identity := range append(object.RevokedIdentities, object.Identities...) { + value := strings.TrimSpace(identity) + if value != "" { + revoked[value] = struct{}{} + } + } + return revoked, nil + } + + for _, line := range strings.Split(trimmed, "\n") { + value := strings.TrimSpace(line) + if value == "" || strings.HasPrefix(value, "#") { + continue + } + revoked[value] = struct{}{} + } + return revoked, nil +} + +func identityIsRevoked(revoked map[string]struct{}, identity string) bool { + value := strings.TrimSpace(identity) + if value == "" || len(revoked) == 0 { + return false + } + _, ok := revoked[value] + return ok +} diff --git a/cmd/gait/job_cli_test.go b/cmd/gait/job_cli_test.go index d8e49e8..bbe27f3 100644 --- a/cmd/gait/job_cli_test.go +++ b/cmd/gait/job_cli_test.go @@ -2,7 +2,9 @@ package main import ( "encoding/json" + "os" "path/filepath" + "reflect" "strings" "testing" ) @@ -160,6 +162,294 @@ func TestRunJobHelpAndErrorPaths(t *testing.T) { } } +func TestRunJobResumeReevaluatesPolicyAndIdentity(t *testing.T) { + workDir := t.TempDir() + withWorkingDir(t, workDir) + root := filepath.Join(workDir, "jobs") + jobID := "job_cli_policy_resume" + + policyA := filepath.Join(workDir, "policy_a.yaml") + policyB := filepath.Join(workDir, "policy_b.yaml") + if err := os.WriteFile(policyA, []byte(` +schema_id: gait.gate.policy +schema_version: 1.0.0 +default_verdict: block +rules: + - name: allow_read + effect: allow + match: + tool_names: ["tool.read"] +`), 0o600); err != nil { + t.Fatalf("write policy A: %v", err) + } + if err := os.WriteFile(policyB, []byte(` +schema_id: gait.gate.policy +schema_version: 1.0.0 +default_verdict: block +rules: + - name: allow_read_then_write + effect: allow + match: + tool_names: ["tool.read", "tool.write"] +`), 0o600); err != nil { + t.Fatalf("write policy B: %v", err) + } + + submitCode, submitOut := runJobJSON(t, []string{ + "submit", + "--id", jobID, + "--root", root, + "--identity", "agent.alice", + "--policy", policyA, + "--json", + }) + if submitCode != exitOK { + t.Fatalf("submit expected %d got %d output=%#v", exitOK, submitCode, submitOut) + } + if submitOut.Job == nil || strings.TrimSpace(submitOut.Job.PolicyDigest) == "" { + t.Fatalf("expected submit output to include policy digest: %#v", submitOut) + } + + pauseCode, pauseOut := runJobJSON(t, []string{"pause", "--id", jobID, "--root", root, "--json"}) + if pauseCode != exitOK || pauseOut.Job == nil || pauseOut.Job.Status != "paused" { + t.Fatalf("pause before resume expected paused: code=%d output=%#v", pauseCode, pauseOut) + } + + missingPolicyCode, missingPolicyOut := runJobJSON(t, []string{"resume", "--id", jobID, "--root", root, "--json"}) + if missingPolicyCode != exitInvalidInput { + t.Fatalf("resume without policy expected %d got %d output=%#v", exitInvalidInput, missingPolicyCode, missingPolicyOut) + } + if !strings.Contains(missingPolicyOut.Error, "policy evaluation required") { + t.Fatalf("expected policy evaluation required error, got %#v", missingPolicyOut) + } + + resumeCode, resumeOut := runJobJSON(t, []string{ + "resume", + "--id", jobID, + "--root", root, + "--policy", policyB, + "--identity-validation-source", "revocation_list", + "--json", + }) + if resumeCode != exitOK { + t.Fatalf("resume expected %d got %d output=%#v", exitOK, resumeCode, resumeOut) + } + if resumeOut.Job == nil || resumeOut.Job.StatusReasonCode != "resumed_with_policy_transition" { + t.Fatalf("expected resumed_with_policy_transition, got %#v", resumeOut) + } + if submitOut.Job.PolicyDigest == resumeOut.Job.PolicyDigest { + t.Fatalf("expected policy digest transition on resume: submit=%s resume=%s", submitOut.Job.PolicyDigest, resumeOut.Job.PolicyDigest) + } + + pauseAgainCode, pauseAgainOut := runJobJSON(t, []string{"pause", "--id", jobID, "--root", root, "--json"}) + if pauseAgainCode != exitOK || pauseAgainOut.Job == nil || pauseAgainOut.Job.Status != "paused" { + t.Fatalf("pause before revoked identity check expected paused: code=%d output=%#v", pauseAgainCode, pauseAgainOut) + } + + revocationsPath := filepath.Join(workDir, "revoked_identities.txt") + if err := os.WriteFile(revocationsPath, []byte("agent.alice\n"), 0o600); err != nil { + t.Fatalf("write revoked identities: %v", err) + } + revokedCode, revokedOut := runJobJSON(t, []string{ + "resume", + "--id", jobID, + "--root", root, + "--policy", policyB, + "--identity-revocations", revocationsPath, + "--json", + }) + if revokedCode != exitInvalidInput { + t.Fatalf("resume revoked identity expected %d got %d output=%#v", exitInvalidInput, revokedCode, revokedOut) + } + if !strings.Contains(revokedOut.Error, "identity revoked") { + t.Fatalf("expected identity revoked error, got %#v", revokedOut) + } +} + +func TestRunJobSubmitRejectsPolicyDigestMismatch(t *testing.T) { + workDir := t.TempDir() + withWorkingDir(t, workDir) + root := filepath.Join(workDir, "jobs") + policyPath := filepath.Join(workDir, "policy.yaml") + if err := os.WriteFile(policyPath, []byte(` +schema_id: gait.gate.policy +schema_version: 1.0.0 +default_verdict: allow +rules: + - name: allow-read + effect: allow + match: + tool_names: [tool.read] +`), 0o600); err != nil { + t.Fatalf("write policy: %v", err) + } + + code, out := runJobJSON(t, []string{ + "submit", + "--id", "job_digest_mismatch", + "--root", root, + "--policy", policyPath, + "--policy-digest", "sha256:deadbeef", + "--json", + }) + if code != exitInvalidInput { + t.Fatalf("submit mismatch expected %d got %d output=%#v", exitInvalidInput, code, out) + } + if !strings.Contains(out.Error, "policy digest mismatch") { + t.Fatalf("expected policy digest mismatch error, got %#v", out) + } +} + +func TestLoadRevokedIdentitiesFormats(t *testing.T) { + workDir := t.TempDir() + + arrayPath := filepath.Join(workDir, "revoked_array.json") + if err := os.WriteFile(arrayPath, []byte(`["agent.a","agent.b"]`), 0o600); err != nil { + t.Fatalf("write array revocations: %v", err) + } + revoked, err := loadRevokedIdentities(arrayPath) + if err != nil { + t.Fatalf("load array revocations: %v", err) + } + expected := map[string]struct{}{"agent.a": {}, "agent.b": {}} + if !reflect.DeepEqual(revoked, expected) { + t.Fatalf("unexpected array revocations: %#v", revoked) + } + + objectPath := filepath.Join(workDir, "revoked_object.json") + if err := os.WriteFile(objectPath, []byte(`{"revoked_identities":["agent.c"],"identities":["agent.d"]}`), 0o600); err != nil { + t.Fatalf("write object revocations: %v", err) + } + revoked, err = loadRevokedIdentities(objectPath) + if err != nil { + t.Fatalf("load object revocations: %v", err) + } + expected = map[string]struct{}{"agent.c": {}, "agent.d": {}} + if !reflect.DeepEqual(revoked, expected) { + t.Fatalf("unexpected object revocations: %#v", revoked) + } + + linesPath := filepath.Join(workDir, "revoked_lines.txt") + if err := os.WriteFile(linesPath, []byte("# comment\nagent.e\n\nagent.f\n"), 0o600); err != nil { + t.Fatalf("write line revocations: %v", err) + } + revoked, err = loadRevokedIdentities(linesPath) + if err != nil { + t.Fatalf("load line revocations: %v", err) + } + expected = map[string]struct{}{"agent.e": {}, "agent.f": {}} + if !reflect.DeepEqual(revoked, expected) { + t.Fatalf("unexpected line revocations: %#v", revoked) + } +} + +func TestLoadRevokedIdentitiesErrorsAndIdentityLookup(t *testing.T) { + if _, err := loadRevokedIdentities("."); err == nil { + t.Fatalf("expected path validation error") + } + if _, err := loadRevokedIdentities(filepath.Join(t.TempDir(), "missing.txt")); err == nil { + t.Fatalf("expected read error for missing file") + } + + revoked := map[string]struct{}{"agent.revoked": {}} + if !identityIsRevoked(revoked, "agent.revoked") { + t.Fatalf("expected identityIsRevoked positive match") + } + if identityIsRevoked(revoked, "agent.ok") { + t.Fatalf("expected identityIsRevoked negative match") + } + if identityIsRevoked(nil, "agent.revoked") { + t.Fatalf("expected empty revoked set to return false") + } +} + +func TestResolveJobPolicyMetadata(t *testing.T) { + workDir := t.TempDir() + policyPath := filepath.Join(workDir, "policy.yaml") + if err := os.WriteFile(policyPath, []byte(` +schema_id: gait.gate.policy +schema_version: 1.0.0 +default_verdict: allow +rules: + - name: allow-read + effect: allow + match: + tool_names: [tool.read] +`), 0o600); err != nil { + t.Fatalf("write policy: %v", err) + } + + digest, ref, err := resolveJobPolicyMetadata(policyPath, "", "") + if err != nil { + t.Fatalf("resolve policy metadata: %v", err) + } + if strings.TrimSpace(digest) == "" { + t.Fatalf("expected computed policy digest") + } + if ref != filepath.Clean(policyPath) { + t.Fatalf("expected default policy ref to be cleaned path, got %q", ref) + } + + digest2, ref2, err := resolveJobPolicyMetadata("", "sha256:abc", "ref-1") + if err != nil { + t.Fatalf("resolve metadata without policy path: %v", err) + } + if digest2 != "sha256:abc" || ref2 != "ref-1" { + t.Fatalf("expected passthrough policy metadata, got digest=%q ref=%q", digest2, ref2) + } +} + +func TestRunJobResumeWithIdentityRevokedFlag(t *testing.T) { + workDir := t.TempDir() + withWorkingDir(t, workDir) + root := filepath.Join(workDir, "jobs") + jobID := "job_cli_revoked_flag" + + policyPath := filepath.Join(workDir, "policy.yaml") + if err := os.WriteFile(policyPath, []byte(` +schema_id: gait.gate.policy +schema_version: 1.0.0 +default_verdict: allow +rules: + - name: allow-read + effect: allow + match: + tool_names: [tool.read] +`), 0o600); err != nil { + t.Fatalf("write policy: %v", err) + } + + if code := runJob([]string{ + "submit", + "--id", jobID, + "--root", root, + "--identity", "agent.revoked", + "--policy", policyPath, + "--json", + }); code != exitOK { + t.Fatalf("submit expected %d got %d", exitOK, code) + } + if code := runJob([]string{"pause", "--id", jobID, "--root", root, "--json"}); code != exitOK { + t.Fatalf("pause expected %d got %d", exitOK, code) + } + + code, out := runJobJSON(t, []string{ + "resume", + "--id", jobID, + "--root", root, + "--policy", policyPath, + "--identity", "agent.revoked", + "--identity-revoked", + "--json", + }) + if code != exitInvalidInput { + t.Fatalf("resume with identity revoked flag expected %d got %d output=%#v", exitInvalidInput, code, out) + } + if !strings.Contains(out.Error, "identity revoked") { + t.Fatalf("expected identity revoked error, got %#v", out) + } +} + func runJobJSON(t *testing.T, args []string) (int, jobOutput) { t.Helper() var code int diff --git a/cmd/gait/main.go b/cmd/gait/main.go index 5f9e7b8..25a35de 100644 --- a/cmd/gait/main.go +++ b/cmd/gait/main.go @@ -101,6 +101,8 @@ func runDispatch(arguments []string) int { return runIncident(arguments[2:]) case "registry": return runRegistry(arguments[2:]) + case "gateway": + return runGateway(arguments[2:]) case "migrate": return runMigrate(arguments[2:]) case "mcp": @@ -138,7 +140,7 @@ func normalizeAdoptionCommand(arguments []string) string { return "version" case "--explain": return "explain" - case "approve-script", "list-scripts", "gate", "policy", "keys", "trace", "regress", "run", "job", "pack", "report", "scout", "guard", "incident", "registry", "mcp", "voice", "doctor", "delegate", "ui": + case "approve-script", "list-scripts", "gate", "policy", "keys", "trace", "regress", "run", "job", "pack", "report", "scout", "guard", "incident", "registry", "gateway", "mcp", "voice", "doctor", "delegate", "ui": if len(arguments) > 2 { subcommand := strings.TrimSpace(arguments[2]) if subcommand != "" && !strings.HasPrefix(subcommand, "-") { diff --git a/cmd/gait/main_test.go b/cmd/gait/main_test.go index 205b005..f941c8c 100644 --- a/cmd/gait/main_test.go +++ b/cmd/gait/main_test.go @@ -163,6 +163,9 @@ func TestRunDispatch(t *testing.T) { if code := run([]string{"gait", "registry", "verify", "--help"}); code != exitOK { t.Fatalf("run registry verify help: expected %d got %d", exitOK, code) } + if code := run([]string{"gait", "gateway", "ingest", "--help"}); code != exitOK { + t.Fatalf("run gateway ingest help: expected %d got %d", exitOK, code) + } if code := run([]string{"gait", "migrate", "--help"}); code != exitOK { t.Fatalf("run migrate help: expected %d got %d", exitOK, code) } @@ -203,6 +206,7 @@ func TestTopLevelUsageIncludesSessionAndMCPServe(t *testing.T) { "gait run session start", "gait run session append", "gait run session checkpoint", + "gait gateway ingest", "gait mcp serve --policy ", } { if !strings.Contains(raw, snippet) { diff --git a/cmd/gait/verify.go b/cmd/gait/verify.go index 4a2e63c..dbecd6a 100644 --- a/cmd/gait/verify.go +++ b/cmd/gait/verify.go @@ -668,7 +668,7 @@ func printUsage() { fmt.Println(" gait run session status --journal [--json] [--explain]") fmt.Println(" gait run session checkpoint --journal --out [--chain-out ] [--json] [--explain]") fmt.Println(" gait run session compact --journal [--out ] [--dry-run] [--json] [--explain]") - fmt.Println(" gait job submit --id [--json] [--explain]") + fmt.Println(" gait job submit --id [--policy |--policy-digest ] [--identity ] [--json] [--explain]") fmt.Println(" gait job status --id [--json] [--explain]") fmt.Println(" gait pack build --type --from [--json] [--explain]") fmt.Println(" gait pack verify [--profile standard|strict] [--json] [--explain]") @@ -689,6 +689,7 @@ func printUsage() { fmt.Println(" gait registry install --source --allow-host [--json] [--explain]") fmt.Println(" gait registry list [--cache-dir ] [--json] [--explain]") fmt.Println(" gait registry verify --path [--cache-dir ] [--json] [--explain]") + fmt.Println(" gait gateway ingest --source --log-path [--proof-out ] [--json] [--explain]") fmt.Println(" gait migrate [--out ] [--json] [--explain]") fmt.Println(" gait mcp proxy --policy --call [--adapter mcp|openai|anthropic|langchain|claude_code] [--json] [--explain]") fmt.Println(" gait mcp bridge --policy --call [--adapter mcp|openai|anthropic|langchain|claude_code] [--json] [--explain]") diff --git a/core/gate/intent.go b/core/gate/intent.go index aaa9acc..f66c1b6 100644 --- a/core/gate/intent.go +++ b/core/gate/intent.go @@ -50,6 +50,15 @@ var ( "ui.navigate": {}, "other": {}, } + allowedDiscoveryMethods = map[string]struct{}{ + "webmcp": {}, + "dynamic_mcp": {}, + "a2a": {}, + "mcp": {}, + "static_mcp": {}, + "manual": {}, + "unknown": {}, + } hexDigestPattern = regexp.MustCompile(`^[a-f0-9]{64}$`) ) @@ -292,6 +301,7 @@ func normalizeTargets(toolName string, targets []schemagate.IntentTarget) ([]sch sensitivity := strings.ToLower(strings.TrimSpace(target.Sensitivity)) endpointClass := strings.ToLower(strings.TrimSpace(target.EndpointClass)) endpointDomain := strings.ToLower(strings.TrimSpace(target.EndpointDomain)) + discoveryMethod := normalizeDiscoveryMethod(target.DiscoveryMethod) if kind == "" || value == "" { return nil, fmt.Errorf("targets require kind and value") @@ -308,21 +318,47 @@ func normalizeTargets(toolName string, targets []schemagate.IntentTarget) ([]sch if endpointDomain == "" { endpointDomain = inferEndpointDomain(kind, value) } - destructive := target.Destructive || inferDestructive(endpointClass, operation) + if discoveryMethod != "" { + if _, ok := allowedDiscoveryMethods[discoveryMethod]; !ok { + return nil, fmt.Errorf("unsupported discovery method: %s", discoveryMethod) + } + } + destructive := target.Destructive || target.DestructiveHint || inferDestructive(endpointClass, operation) normalized := schemagate.IntentTarget{ - Kind: kind, - Value: value, - Operation: operation, - Sensitivity: sensitivity, - EndpointClass: endpointClass, - EndpointDomain: endpointDomain, - Destructive: destructive, + Kind: kind, + Value: value, + Operation: operation, + Sensitivity: sensitivity, + EndpointClass: endpointClass, + EndpointDomain: endpointDomain, + Destructive: destructive, + DiscoveryMethod: discoveryMethod, + ReadOnlyHint: target.ReadOnlyHint, + DestructiveHint: target.DestructiveHint, + IdempotentHint: target.IdempotentHint, + OpenWorldHint: target.OpenWorldHint, } destructiveKey := "0" if destructive { destructiveKey = "1" } + readOnlyKey := "0" + if normalized.ReadOnlyHint { + readOnlyKey = "1" + } + destructiveHintKey := "0" + if normalized.DestructiveHint { + destructiveHintKey = "1" + } + idempotentHintKey := "0" + if normalized.IdempotentHint { + idempotentHintKey = "1" + } + openWorldHintKey := "0" + if normalized.OpenWorldHint { + openWorldHintKey = "1" + } key := strings.Join([]string{ normalized.Kind, normalized.Value, @@ -331,6 +367,11 @@ func normalizeTargets(toolName string, targets []schemagate.IntentTarget) ([]sch normalized.EndpointClass, normalized.EndpointDomain, destructiveKey, + normalized.DiscoveryMethod, + readOnlyKey, + destructiveHintKey, + idempotentHintKey, + openWorldHintKey, }, "\x00") if _, ok := seen[key]; ok { continue @@ -360,11 +401,34 @@ func normalizeTargets(toolName string, targets []schemagate.IntentTarget) ([]sch if out[i].Destructive != out[j].Destructive { return !out[i].Destructive && out[j].Destructive } + if out[i].DiscoveryMethod != out[j].DiscoveryMethod { + return out[i].DiscoveryMethod < out[j].DiscoveryMethod + } + if out[i].ReadOnlyHint != out[j].ReadOnlyHint { + return !out[i].ReadOnlyHint && out[j].ReadOnlyHint + } + if out[i].DestructiveHint != out[j].DestructiveHint { + return !out[i].DestructiveHint && out[j].DestructiveHint + } + if out[i].IdempotentHint != out[j].IdempotentHint { + return !out[i].IdempotentHint && out[j].IdempotentHint + } + if out[i].OpenWorldHint != out[j].OpenWorldHint { + return !out[i].OpenWorldHint && out[j].OpenWorldHint + } return false }) return out, nil } +func normalizeDiscoveryMethod(value string) string { + method := strings.ToLower(strings.TrimSpace(value)) + if method == "" { + return "" + } + return strings.NewReplacer("-", "_", " ", "_").Replace(method) +} + func inferEndpointClass(kind string, operation string, toolName string) string { switch kind { case "path": diff --git a/core/gate/intent_test.go b/core/gate/intent_test.go index f320437..8d6e014 100644 --- a/core/gate/intent_test.go +++ b/core/gate/intent_test.go @@ -610,6 +610,35 @@ func TestNormalizeTargetsAndProvenanceErrors(t *testing.T) { if _, err := normalizeTargets("tool.demo", []schemagate.IntentTarget{{Kind: "path", Value: "/tmp/out", EndpointClass: "invalid"}}); err == nil { t.Fatalf("expected unsupported endpoint class to fail") } + targets, err := normalizeTargets("tool.demo", []schemagate.IntentTarget{{ + Kind: "path", + Value: "/tmp/out", + Operation: "write", + DiscoveryMethod: " WebMCP ", + ReadOnlyHint: true, + DestructiveHint: true, + IdempotentHint: true, + OpenWorldHint: true, + }}) + if err != nil { + t.Fatalf("normalize targets with discovery method and annotations: %v", err) + } + if len(targets) != 1 { + t.Fatalf("expected one normalized target, got %#v", targets) + } + if targets[0].DiscoveryMethod != "webmcp" { + t.Fatalf("expected normalized discovery method webmcp, got %#v", targets[0]) + } + if !targets[0].ReadOnlyHint || !targets[0].DestructiveHint || !targets[0].IdempotentHint || !targets[0].OpenWorldHint { + t.Fatalf("expected annotation hints to be preserved, got %#v", targets[0]) + } + if _, err := normalizeTargets("tool.demo", []schemagate.IntentTarget{{ + Kind: "path", + Value: "/tmp/out", + DiscoveryMethod: "unsupported", + }}); err == nil { + t.Fatalf("expected unsupported discovery method to fail") + } if provenance, err := normalizeArgProvenance(nil); err != nil || len(provenance) != 0 { t.Fatalf("expected empty provenance, got entries=%#v err=%v", provenance, err) diff --git a/core/gate/policy.go b/core/gate/policy.go index 1165ea0..cc1cd96 100644 --- a/core/gate/policy.go +++ b/core/gate/policy.go @@ -63,6 +63,7 @@ type Policy struct { SchemaID string `yaml:"schema_id"` SchemaVersion string `yaml:"schema_version"` DefaultVerdict string `yaml:"default_verdict"` + DefaultAction string `yaml:"default_action"` Scripts ScriptPolicy `yaml:"scripts"` FailClosed FailClosedPolicy `yaml:"fail_closed"` Rules []PolicyRule `yaml:"rules"` @@ -84,6 +85,7 @@ type PolicyRule struct { Name string `yaml:"name"` Priority int `yaml:"priority"` Effect string `yaml:"effect"` + Action string `yaml:"action"` Match PolicyMatch `yaml:"match"` Endpoint EndpointPolicy `yaml:"endpoint"` ReasonCodes []string `yaml:"reason_codes"` @@ -131,29 +133,40 @@ type EndpointPolicy struct { } type PolicyMatch struct { - ToolNames []string `yaml:"tool_names"` - RiskClasses []string `yaml:"risk_classes"` - TargetKinds []string `yaml:"target_kinds"` - TargetValues []string `yaml:"target_values"` - EndpointClasses []string `yaml:"endpoint_classes"` - SkillPublishers []string `yaml:"skill_publishers"` - SkillSources []string `yaml:"skill_sources"` - DataClasses []string `yaml:"data_classes"` - DestinationKinds []string `yaml:"destination_kinds"` - DestinationValues []string `yaml:"destination_values"` - DestinationOps []string `yaml:"destination_operations"` - ProvenanceSources []string `yaml:"provenance_sources"` - Identities []string `yaml:"identities"` - WorkspacePrefixes []string `yaml:"workspace_prefixes"` - ContextToolNames []string `yaml:"context_tool_names"` - ContextDataClasses []string `yaml:"context_data_classes"` - ContextEndpointClasses []string `yaml:"context_endpoint_classes"` - ContextAutonomyLevels []string `yaml:"context_autonomy_levels"` - RequireDelegation bool `yaml:"require_delegation"` - AllowedDelegatorIdentities []string `yaml:"allowed_delegator_identities"` - AllowedDelegateIdentities []string `yaml:"allowed_delegate_identities"` - DelegationScopes []string `yaml:"delegation_scopes"` - MaxDelegationDepth *int `yaml:"max_delegation_depth"` + ToolName string `yaml:"tool_name"` + ToolNames []string `yaml:"tool_names"` + RiskClasses []string `yaml:"risk_classes"` + TargetKinds []string `yaml:"target_kinds"` + TargetValues []string `yaml:"target_values"` + EndpointClass []string `yaml:"endpoint_class"` + EndpointClasses []string `yaml:"endpoint_classes"` + DiscoveryMethods []string `yaml:"discovery_method"` + ToolAnnotations ToolAnnotationMatch `yaml:"tool_annotations"` + SkillPublishers []string `yaml:"skill_publishers"` + SkillSources []string `yaml:"skill_sources"` + DataClasses []string `yaml:"data_classes"` + DestinationKinds []string `yaml:"destination_kinds"` + DestinationValues []string `yaml:"destination_values"` + DestinationOps []string `yaml:"destination_operations"` + ProvenanceSources []string `yaml:"provenance_sources"` + Identities []string `yaml:"identities"` + WorkspacePrefixes []string `yaml:"workspace_prefixes"` + ContextToolNames []string `yaml:"context_tool_names"` + ContextDataClasses []string `yaml:"context_data_classes"` + ContextEndpointClasses []string `yaml:"context_endpoint_classes"` + ContextAutonomyLevels []string `yaml:"context_autonomy_levels"` + RequireDelegation bool `yaml:"require_delegation"` + AllowedDelegatorIdentities []string `yaml:"allowed_delegator_identities"` + AllowedDelegateIdentities []string `yaml:"allowed_delegate_identities"` + DelegationScopes []string `yaml:"delegation_scopes"` + MaxDelegationDepth *int `yaml:"max_delegation_depth"` +} + +type ToolAnnotationMatch struct { + ReadOnlyHint *bool `yaml:"readOnlyHint"` + DestructiveHint *bool `yaml:"destructiveHint"` + IdempotentHint *bool `yaml:"idempotentHint"` + OpenWorldHint *bool `yaml:"openWorldHint"` } type EvalOptions struct { @@ -584,6 +597,12 @@ func policyDigestPayload(policy Policy) map[string]any { if len(rule.Match.EndpointClasses) > 0 { matchPayload["EndpointClasses"] = rule.Match.EndpointClasses } + if len(rule.Match.DiscoveryMethods) > 0 { + matchPayload["DiscoveryMethods"] = rule.Match.DiscoveryMethods + } + if toolAnnotationsPayload, ok := toolAnnotationDigestPayload(rule.Match.ToolAnnotations); ok { + matchPayload["ToolAnnotations"] = toolAnnotationsPayload + } if len(rule.Match.SkillPublishers) > 0 { matchPayload["SkillPublishers"] = rule.Match.SkillPublishers } @@ -727,6 +746,26 @@ func policyDigestPayload(policy Policy) map[string]any { return payload } +func toolAnnotationDigestPayload(annotations ToolAnnotationMatch) (map[string]any, bool) { + payload := map[string]any{} + if annotations.ReadOnlyHint != nil { + payload["ReadOnlyHint"] = *annotations.ReadOnlyHint + } + if annotations.DestructiveHint != nil { + payload["DestructiveHint"] = *annotations.DestructiveHint + } + if annotations.IdempotentHint != nil { + payload["IdempotentHint"] = *annotations.IdempotentHint + } + if annotations.OpenWorldHint != nil { + payload["OpenWorldHint"] = *annotations.OpenWorldHint + } + if len(payload) == 0 { + return nil, false + } + return payload, true +} + func isHighRiskActionRule(rule PolicyRule) bool { if rule.Effect == "block" { return false @@ -755,6 +794,13 @@ func normalizePolicy(input Policy) (Policy, error) { } output.DefaultVerdict = strings.ToLower(strings.TrimSpace(output.DefaultVerdict)) + defaultAction := strings.ToLower(strings.TrimSpace(output.DefaultAction)) + if output.DefaultVerdict == "" { + output.DefaultVerdict = defaultAction + } + if defaultAction != "" && output.DefaultVerdict != defaultAction { + return Policy{}, fmt.Errorf("default_action must match default_verdict when both are set") + } if output.DefaultVerdict == "" { output.DefaultVerdict = defaultVerdict } @@ -788,6 +834,13 @@ func normalizePolicy(input Policy) (Policy, error) { } rule.Effect = strings.ToLower(strings.TrimSpace(rule.Effect)) + action := strings.ToLower(strings.TrimSpace(rule.Action)) + if rule.Effect == "" { + rule.Effect = action + } + if action != "" && rule.Effect != action { + return Policy{}, fmt.Errorf("rule action must match effect for %s", rule.Name) + } if rule.Effect == "" { return Policy{}, fmt.Errorf("rule effect is required for %s", rule.Name) } @@ -795,16 +848,26 @@ func normalizePolicy(input Policy) (Policy, error) { return Policy{}, fmt.Errorf("invalid rule effect %q for %s", rule.Effect, rule.Name) } + if aliasToolName := strings.TrimSpace(rule.Match.ToolName); aliasToolName != "" { + rule.Match.ToolNames = append(rule.Match.ToolNames, aliasToolName) + } rule.Match.ToolNames = normalizeStringListLower(rule.Match.ToolNames) rule.Match.RiskClasses = normalizeStringListLower(rule.Match.RiskClasses) rule.Match.TargetKinds = normalizeStringListLower(rule.Match.TargetKinds) rule.Match.TargetValues = normalizeStringList(rule.Match.TargetValues) + rule.Match.EndpointClasses = append(rule.Match.EndpointClasses, rule.Match.EndpointClass...) rule.Match.EndpointClasses = normalizeStringListLower(rule.Match.EndpointClasses) for _, endpointClass := range rule.Match.EndpointClasses { if _, ok := allowedEndpointClasses[endpointClass]; !ok { return Policy{}, fmt.Errorf("unsupported match endpoint_class %q for %s", endpointClass, rule.Name) } } + rule.Match.DiscoveryMethods = normalizeStringListLower(rule.Match.DiscoveryMethods) + for _, discoveryMethod := range rule.Match.DiscoveryMethods { + if _, ok := allowedDiscoveryMethods[discoveryMethod]; !ok { + return Policy{}, fmt.Errorf("unsupported match discovery_method %q for %s", discoveryMethod, rule.Name) + } + } rule.Match.SkillPublishers = normalizeStringListLower(rule.Match.SkillPublishers) rule.Match.SkillSources = normalizeStringListLower(rule.Match.SkillSources) rule.Match.DataClasses = normalizeStringListLower(rule.Match.DataClasses) @@ -1028,6 +1091,21 @@ func ruleMatches(match PolicyMatch, intent schemagate.IntentRequest) bool { return false } } + if len(match.DiscoveryMethods) > 0 { + discoveryMethodMatched := false + for _, target := range intent.Targets { + if contains(match.DiscoveryMethods, target.DiscoveryMethod) { + discoveryMethodMatched = true + break + } + } + if !discoveryMethodMatched { + return false + } + } + if !toolAnnotationsMatch(match.ToolAnnotations, intent.Targets) { + return false + } if len(match.SkillPublishers) > 0 { if intent.SkillProvenance == nil || !contains(match.SkillPublishers, strings.ToLower(strings.TrimSpace(intent.SkillProvenance.Publisher))) { return false @@ -1104,6 +1182,58 @@ func ruleMatches(match PolicyMatch, intent schemagate.IntentRequest) bool { return true } +func toolAnnotationsMatch(match ToolAnnotationMatch, targets []schemagate.IntentTarget) bool { + if match.ReadOnlyHint != nil { + matched := false + for _, target := range targets { + if target.ReadOnlyHint == *match.ReadOnlyHint { + matched = true + break + } + } + if !matched { + return false + } + } + if match.DestructiveHint != nil { + matched := false + for _, target := range targets { + if target.DestructiveHint == *match.DestructiveHint { + matched = true + break + } + } + if !matched { + return false + } + } + if match.IdempotentHint != nil { + matched := false + for _, target := range targets { + if target.IdempotentHint == *match.IdempotentHint { + matched = true + break + } + } + if !matched { + return false + } + } + if match.OpenWorldHint != nil { + matched := false + for _, target := range targets { + if target.OpenWorldHint == *match.OpenWorldHint { + matched = true + break + } + } + if !matched { + return false + } + } + return true +} + func delegationMatches(match PolicyMatch, delegation *schemagate.IntentDelegation) bool { delegationConstrained := match.RequireDelegation || len(match.AllowedDelegatorIdentities) > 0 || diff --git a/core/gate/policy_test.go b/core/gate/policy_test.go index 599d73a..a1faa74 100644 --- a/core/gate/policy_test.go +++ b/core/gate/policy_test.go @@ -62,6 +62,47 @@ fail_closed: } } +func TestParsePolicyYAMLAliasFields(t *testing.T) { + policyYAML := []byte(` +default_action: block +rules: + - name: block-dynamic-destructive + action: block + match: + tool_name: "Postgres_Query" + endpoint_class: [proc.exec, fs.delete] + discovery_method: [webmcp, dynamic_mcp] + tool_annotations: + destructiveHint: true +`) + policy, err := ParsePolicyYAML(policyYAML) + if err != nil { + t.Fatalf("parse policy aliases: %v", err) + } + if policy.DefaultVerdict != "block" { + t.Fatalf("expected default verdict from default_action alias, got %q", policy.DefaultVerdict) + } + if len(policy.Rules) != 1 { + t.Fatalf("expected one rule, got %d", len(policy.Rules)) + } + rule := policy.Rules[0] + if rule.Effect != "block" { + t.Fatalf("expected rule effect from action alias, got %q", rule.Effect) + } + if !reflect.DeepEqual(rule.Match.ToolNames, []string{"postgres_query"}) { + t.Fatalf("unexpected normalized tool_names alias expansion: %#v", rule.Match.ToolNames) + } + if !reflect.DeepEqual(rule.Match.EndpointClasses, []string{"fs.delete", "proc.exec"}) { + t.Fatalf("unexpected endpoint_class alias expansion: %#v", rule.Match.EndpointClasses) + } + if !reflect.DeepEqual(rule.Match.DiscoveryMethods, []string{"dynamic_mcp", "webmcp"}) { + t.Fatalf("unexpected discovery methods: %#v", rule.Match.DiscoveryMethods) + } + if rule.Match.ToolAnnotations.DestructiveHint == nil || !*rule.Match.ToolAnnotations.DestructiveHint { + t.Fatalf("expected destructive tool annotation hint match to be preserved") + } +} + func TestParsePolicyValidationErrors(t *testing.T) { tests := []struct { name string @@ -71,6 +112,13 @@ func TestParsePolicyValidationErrors(t *testing.T) { name: "invalid_default_verdict", yaml: `default_verdict: nope`, }, + { + name: "default_action_conflicts_default_verdict", + yaml: ` +default_verdict: allow +default_action: block +`, + }, { name: "unknown_top_level_field", yaml: `default_verdit: allow`, @@ -81,6 +129,15 @@ func TestParsePolicyValidationErrors(t *testing.T) { rules: - name: bad-rule effect: nope +`, + }, + { + name: "rule_action_conflicts_effect", + yaml: ` +rules: + - name: bad-rule + effect: allow + action: block `, }, { @@ -151,6 +208,16 @@ rules: effect: allow match: endpoint_classes: [net.invalid] +`, + }, + { + name: "invalid_discovery_method_match", + yaml: ` +rules: + - name: bad-discovery-method + effect: allow + match: + discovery_method: [unsupported] `, }, } @@ -432,7 +499,16 @@ func TestRuleMatchesCoverage(t *testing.T) { Publisher: "acme", } intent.Targets = []schemagate.IntentTarget{ - {Kind: "host", Value: "api.external.com", Operation: "write", Sensitivity: "confidential", EndpointClass: "net.http"}, + { + Kind: "host", + Value: "api.external.com", + Operation: "write", + Sensitivity: "confidential", + EndpointClass: "net.http", + DiscoveryMethod: "webmcp", + ReadOnlyHint: true, + IdempotentHint: true, + }, } normalizedIntent, err := NormalizeIntent(intent) if err != nil { @@ -449,6 +525,11 @@ func TestRuleMatchesCoverage(t *testing.T) { DestinationValues: []string{"api.external.com"}, DestinationOps: []string{"write"}, EndpointClasses: []string{"net.http"}, + DiscoveryMethods: []string{"webmcp"}, + ToolAnnotations: ToolAnnotationMatch{ + ReadOnlyHint: boolPtr(true), + IdempotentHint: boolPtr(true), + }, SkillPublishers: []string{"acme"}, SkillSources: []string{"registry"}, ProvenanceSources: []string{"external"}, @@ -469,6 +550,8 @@ func TestRuleMatchesCoverage(t *testing.T) { {DestinationValues: []string{"internal.local"}}, {DestinationOps: []string{"read"}}, {EndpointClasses: []string{"fs.delete"}}, + {DiscoveryMethods: []string{"dynamic_mcp"}}, + {ToolAnnotations: ToolAnnotationMatch{ReadOnlyHint: boolPtr(false)}}, {SkillPublishers: []string{"unknown"}}, {SkillSources: []string{"git"}}, {ProvenanceSources: []string{"user"}}, @@ -980,7 +1063,7 @@ rules: func TestPolicyDigestPayloadIncludesExtendedFields(t *testing.T) { policy, err := ParsePolicyYAML([]byte(` -default_verdict: allow + default_verdict: allow rules: - name: rich-rule effect: require_approval @@ -1005,6 +1088,10 @@ rules: match: tool_names: [tool.write] endpoint_classes: [net.http] + discovery_method: [webmcp] + tool_annotations: + readOnlyHint: true + idempotentHint: true data_classes: [confidential] destination_kinds: [host] destination_values: [api.external.com] @@ -1046,6 +1133,16 @@ rules: if _, ok := ruleAny["MinApprovals"]; !ok { t.Fatalf("expected min_approvals payload to be present: %#v", ruleAny) } + matchAny, ok := ruleAny["Match"].(map[string]any) + if !ok { + t.Fatalf("expected match payload map, got %#v", ruleAny["Match"]) + } + if _, ok := matchAny["DiscoveryMethods"]; !ok { + t.Fatalf("expected discovery methods payload to be present: %#v", matchAny) + } + if _, ok := matchAny["ToolAnnotations"]; !ok { + t.Fatalf("expected tool annotations payload to be present: %#v", matchAny) + } } func TestEvaluatePolicyDelegationConstraints(t *testing.T) { @@ -1516,3 +1613,7 @@ func baseIntent() schemagate.IntentRequest { }, } } + +func boolPtr(value bool) *bool { + return &value +} diff --git a/core/gateway/ingest.go b/core/gateway/ingest.go new file mode 100644 index 0000000..acbef9e --- /dev/null +++ b/core/gateway/ingest.go @@ -0,0 +1,496 @@ +package gateway + +import ( + "bufio" + "bytes" + "crypto/ed25519" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "time" + + "github.com/Clyra-AI/gait/core/fsx" + gaitjcs "github.com/Clyra-AI/proof/canon" + proofrecord "github.com/Clyra-AI/proof/core/record" + proofschema "github.com/Clyra-AI/proof/core/schema" + sign "github.com/Clyra-AI/proof/signing" +) + +const ( + SourceKong = "kong" + SourceDocker = "docker" + SourceMintMCP = "mintmcp" +) + +var supportedSources = map[string]struct{}{ + SourceKong: {}, + SourceDocker: {}, + SourceMintMCP: {}, +} + +var deterministicGatewayEpoch = time.Date(1980, time.January, 1, 0, 0, 0, 0, time.UTC) + +type IngestOptions struct { + Source string + LogPath string + OutputPath string + ProducerVersion string + SigningPrivateKey ed25519.PrivateKey +} + +type IngestResult struct { + Source string `json:"source"` + LogPath string `json:"log_path"` + ProofRecordsOut string `json:"proof_records_out"` + InputEvents int `json:"input_events"` + OutputRecords int `json:"output_records"` +} + +type gatewayEvent struct { + Timestamp time.Time + ToolName string + Verdict string + PolicyDigest string + ReasonCodes []string + RequestID string + Identity string + Path string + StatusCode int + RawDigest string +} + +func IngestLogs(opts IngestOptions) (IngestResult, error) { + source := strings.ToLower(strings.TrimSpace(opts.Source)) + if _, ok := supportedSources[source]; !ok { + return IngestResult{}, fmt.Errorf("unsupported gateway source: %s", opts.Source) + } + logPath, err := normalizePath(opts.LogPath) + if err != nil { + return IngestResult{}, fmt.Errorf("log path: %w", err) + } + outputPath, err := resolveOutputPath(opts.OutputPath, logPath, source) + if err != nil { + return IngestResult{}, fmt.Errorf("proof output path: %w", err) + } + producerVersion := strings.TrimSpace(opts.ProducerVersion) + if producerVersion == "" { + producerVersion = "0.0.0-dev" + } + + // #nosec G304 -- ingest path is explicit local user input. + raw, err := os.ReadFile(logPath) + if err != nil { + return IngestResult{}, fmt.Errorf("read gateway logs: %w", err) + } + scanner := bufio.NewScanner(bytes.NewReader(raw)) + scanner.Buffer(make([]byte, 0, 64*1024), 10*1024*1024) + + lines := make([][]byte, 0) + previousHash := "" + inputEvents := 0 + outputRecords := 0 + for lineNo := 1; scanner.Scan(); lineNo++ { + trimmedLine := strings.TrimSpace(scanner.Text()) + if trimmedLine == "" { + continue + } + inputEvents++ + event, err := parseGatewayEvent(source, trimmedLine, lineNo) + if err != nil { + return IngestResult{}, fmt.Errorf("parse gateway log line %d: %w", lineNo, err) + } + recordItem, err := proofrecord.New(proofrecord.RecordOpts{ + RecordVersion: "1.0", + Timestamp: event.Timestamp, + Source: "gait.gateway." + source, + SourceProduct: "gait", + AgentID: nonEmptyOrDefault(event.Identity, source), + Type: "policy_enforcement", + Event: map[string]any{ + "gateway_source": source, + "gateway_log_digest": event.RawDigest, + "gateway_request_id": event.RequestID, + "gateway_status_code": event.StatusCode, + "path": event.Path, + "policy_digest": event.PolicyDigest, + "producer_version": producerVersion, + "reason_codes": event.ReasonCodes, + "tool_name": event.ToolName, + "verdict": event.Verdict, + }, + Controls: proofrecord.Controls{ + PermissionsEnforced: event.Verdict == "block" || event.Verdict == "require_approval", + }, + Metadata: map[string]any{ + "artifact_kind": "gait.gateway.policy_enforcement", + "gateway_input_line": lineNo, + "gateway_log_path": logPath, + }, + }) + if err != nil { + return IngestResult{}, fmt.Errorf("build proof record line %d: %w", lineNo, err) + } + if previousHash != "" { + recordItem.Integrity.PreviousRecordHash = previousHash + recordHash, hashErr := proofrecord.ComputeHash(recordItem) + if hashErr != nil { + return IngestResult{}, fmt.Errorf("compute chained record hash line %d: %w", lineNo, hashErr) + } + recordItem.Integrity.RecordHash = recordHash + } + if len(opts.SigningPrivateKey) > 0 { + recordSig := sign.SignBytes(opts.SigningPrivateKey, []byte(recordItem.Integrity.RecordHash)) + recordItem.Integrity.SigningKeyID = recordSig.KeyID + recordItem.Integrity.Signature = "base64:" + recordSig.Sig + } + canonicalLine, err := canonicalJSON(recordItem) + if err != nil { + return IngestResult{}, fmt.Errorf("encode proof record line %d: %w", lineNo, err) + } + if err := proofschema.ValidateRecord(canonicalLine, recordItem.RecordType); err != nil { + return IngestResult{}, fmt.Errorf("validate proof record line %d: %w", lineNo, err) + } + lines = append(lines, canonicalLine) + previousHash = recordItem.Integrity.RecordHash + outputRecords++ + } + if err := scanner.Err(); err != nil { + return IngestResult{}, fmt.Errorf("read gateway logs: %w", err) + } + + payload := bytes.Join(lines, []byte{'\n'}) + if len(payload) > 0 { + payload = append(payload, '\n') + } + if err := os.MkdirAll(filepath.Dir(outputPath), 0o750); err != nil { + return IngestResult{}, fmt.Errorf("create proof output directory: %w", err) + } + if err := fsx.WriteFileAtomic(outputPath, payload, 0o644); err != nil { + return IngestResult{}, fmt.Errorf("write proof records: %w", err) + } + return IngestResult{ + Source: source, + LogPath: logPath, + ProofRecordsOut: outputPath, + InputEvents: inputEvents, + OutputRecords: outputRecords, + }, nil +} + +func parseGatewayEvent(source string, line string, lineNo int) (gatewayEvent, error) { + payload, err := decodeLinePayload(source, line) + if err != nil { + return gatewayEvent{}, err + } + timestamp := extractEventTimestamp(payload, lineNo) + statusCode, _ := extractInt(payload, "status", "status_code", "response.status", "http.status") + verdict := normalizeVerdict(firstNonEmpty( + extractString(payload, "verdict", "decision", "action", "outcome"), + verdictFromStatus(statusCode), + )) + toolName := firstNonEmpty(extractString(payload, "tool_name", "tool", "route.name", "service.name", "request.path", "request.uri", "path"), "gateway.unknown") + reasonCodes := extractReasonCodes(payload) + return gatewayEvent{ + Timestamp: timestamp, + ToolName: toolName, + Verdict: verdict, + PolicyDigest: extractString(payload, "policy_digest", "policy.hash", "policy.id", "policy_version"), + ReasonCodes: reasonCodes, + RequestID: extractString(payload, "request_id", "request.id", "correlation_id", "trace_id"), + Identity: extractString(payload, "identity", "consumer.username", "user", "actor"), + Path: extractString(payload, "path", "request.path", "request.uri"), + StatusCode: statusCode, + RawDigest: sha256Hex([]byte(line)), + }, nil +} + +func decodeLinePayload(source string, line string) (map[string]any, error) { + var payload map[string]any + if err := json.Unmarshal([]byte(line), &payload); err != nil { + if source == SourceDocker { + return map[string]any{ + "log": line, + }, nil + } + return nil, fmt.Errorf("line is not valid JSON") + } + if source == SourceDocker { + if nestedRaw, ok := payload["log"].(string); ok { + trimmedNested := strings.TrimSpace(nestedRaw) + if trimmedNested != "" { + var nestedPayload map[string]any + if err := json.Unmarshal([]byte(trimmedNested), &nestedPayload); err == nil { + for key, value := range payload { + if key == "log" { + continue + } + if _, exists := nestedPayload[key]; !exists { + nestedPayload[key] = value + } + } + payload = nestedPayload + } + } + } + } + return payload, nil +} + +func resolveOutputPath(rawOutputPath string, logPath string, source string) (string, error) { + trimmed := strings.TrimSpace(rawOutputPath) + if trimmed == "" { + baseDir := filepath.Dir(logPath) + return filepath.Join(baseDir, fmt.Sprintf("gateway_%s_policy_enforcement.jsonl", source)), nil + } + return normalizePath(trimmed) +} + +func normalizePath(path string) (string, error) { + trimmed := strings.TrimSpace(path) + if trimmed == "" { + return "", fmt.Errorf("path is required") + } + cleanPath := filepath.Clean(trimmed) + if cleanPath == "." { + return "", fmt.Errorf("path must not resolve to current directory") + } + return cleanPath, nil +} + +func extractEventTimestamp(payload map[string]any, lineNo int) time.Time { + for _, key := range []string{"timestamp", "time", "ts", "started_at", "request.timestamp", "request.time"} { + if value, ok := extractValue(payload, key); ok { + if timestamp, parsed := parseTimestamp(value); parsed { + return timestamp + } + } + } + return deterministicGatewayEpoch.Add(time.Duration(lineNo) * time.Second) +} + +func parseTimestamp(value any) (time.Time, bool) { + switch typed := value.(type) { + case string: + trimmed := strings.TrimSpace(typed) + if trimmed == "" { + return time.Time{}, false + } + layouts := []string{ + time.RFC3339Nano, + time.RFC3339, + "2006-01-02 15:04:05", + } + for _, layout := range layouts { + if parsed, err := time.Parse(layout, trimmed); err == nil { + return parsed.UTC(), true + } + } + if seconds, err := strconv.ParseInt(trimmed, 10, 64); err == nil { + return parseUnixTimestamp(seconds), true + } + case float64: + return parseUnixTimestamp(int64(typed)), true + case json.Number: + parsed, err := typed.Int64() + if err == nil { + return parseUnixTimestamp(parsed), true + } + case int: + return parseUnixTimestamp(int64(typed)), true + case int64: + return parseUnixTimestamp(typed), true + } + return time.Time{}, false +} + +func parseUnixTimestamp(value int64) time.Time { + if value > 1_000_000_000_000 { + return time.UnixMilli(value).UTC() + } + return time.Unix(value, 0).UTC() +} + +func normalizeVerdict(value string) string { + switch strings.ToLower(strings.TrimSpace(value)) { + case "allow", "allowed", "permit", "permitted", "ok", "success": + return "allow" + case "block", "blocked", "deny", "denied", "reject", "rejected", "forbidden", "error": + return "block" + case "require_approval", "approval_required", "needs_approval": + return "require_approval" + case "dry_run", "observe", "simulate": + return "dry_run" + default: + return "unknown" + } +} + +func verdictFromStatus(statusCode int) string { + if statusCode >= 400 && statusCode != 0 { + return "block" + } + if statusCode >= 200 && statusCode < 400 { + return "allow" + } + return "" +} + +func extractReasonCodes(payload map[string]any) []string { + codes := normalizeStringSlice( + extractStrings(payload, "reason_codes", "reasons", "policy.reason_codes"), + ) + if reasonCode := extractString(payload, "reason_code", "policy.reason_code"); reasonCode != "" { + codes = append(codes, reasonCode) + } + return normalizeStringSlice(codes) +} + +func normalizeStringSlice(values []string) []string { + if len(values) == 0 { + return nil + } + seen := map[string]struct{}{} + out := make([]string, 0, len(values)) + for _, value := range values { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + continue + } + if _, ok := seen[trimmed]; ok { + continue + } + seen[trimmed] = struct{}{} + out = append(out, trimmed) + } + sort.Strings(out) + if len(out) == 0 { + return nil + } + return out +} + +func extractStrings(payload map[string]any, keys ...string) []string { + for _, key := range keys { + value, ok := extractValue(payload, key) + if !ok { + continue + } + switch typed := value.(type) { + case []string: + return typed + case []any: + out := make([]string, 0, len(typed)) + for _, item := range typed { + switch text := item.(type) { + case string: + out = append(out, text) + case fmt.Stringer: + out = append(out, text.String()) + } + } + return out + case string: + return []string{typed} + } + } + return nil +} + +func extractString(payload map[string]any, keys ...string) string { + for _, key := range keys { + value, ok := extractValue(payload, key) + if !ok { + continue + } + switch typed := value.(type) { + case string: + if trimmed := strings.TrimSpace(typed); trimmed != "" { + return trimmed + } + case fmt.Stringer: + if trimmed := strings.TrimSpace(typed.String()); trimmed != "" { + return trimmed + } + } + } + return "" +} + +func extractInt(payload map[string]any, keys ...string) (int, bool) { + for _, key := range keys { + value, ok := extractValue(payload, key) + if !ok { + continue + } + switch typed := value.(type) { + case int: + return typed, true + case int64: + return int(typed), true + case float64: + return int(typed), true + case json.Number: + if parsed, err := typed.Int64(); err == nil { + return int(parsed), true + } + case string: + if parsed, err := strconv.Atoi(strings.TrimSpace(typed)); err == nil { + return parsed, true + } + } + } + return 0, false +} + +func extractValue(payload map[string]any, path string) (any, bool) { + parts := strings.Split(path, ".") + var current any = payload + for _, part := range parts { + asMap, ok := current.(map[string]any) + if !ok { + return nil, false + } + value, exists := asMap[part] + if !exists { + return nil, false + } + current = value + } + return current, true +} + +func canonicalJSON(value any) ([]byte, error) { + raw, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("marshal json: %w", err) + } + return gaitjcs.CanonicalizeJSON(raw) +} + +func sha256Hex(raw []byte) string { + sum := sha256.Sum256(raw) + return "sha256:" + hex.EncodeToString(sum[:]) +} + +func firstNonEmpty(values ...string) string { + for _, value := range values { + if trimmed := strings.TrimSpace(value); trimmed != "" { + return trimmed + } + } + return "" +} + +func nonEmptyOrDefault(value string, fallback string) string { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return fallback + } + return trimmed +} diff --git a/core/gateway/ingest_test.go b/core/gateway/ingest_test.go new file mode 100644 index 0000000..e6186f2 --- /dev/null +++ b/core/gateway/ingest_test.go @@ -0,0 +1,137 @@ +package gateway + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + proofrecord "github.com/Clyra-AI/proof/core/record" + sign "github.com/Clyra-AI/proof/signing" +) + +func TestIngestLogsKongProducesSignedPolicyEnforcementRecords(t *testing.T) { + workDir := t.TempDir() + logPath := filepath.Join(workDir, "kong.log.jsonl") + outPath := filepath.Join(workDir, "proof_records.jsonl") + logPayload := strings.Join([]string{ + `{"time":"2026-02-20T10:00:00Z","route":{"name":"tool.read"},"consumer":{"username":"alice"},"status":200,"request_id":"req-1","reason_codes":["allowed"]}`, + `{"time":"2026-02-20T10:00:01Z","route":{"name":"tool.write"},"consumer":{"username":"alice"},"status":403,"request_id":"req-2","reason_code":"blocked_by_policy"}`, + }, "\n") + if err := os.WriteFile(logPath, []byte(logPayload), 0o600); err != nil { + t.Fatalf("write kong logs: %v", err) + } + keyPair, err := sign.GenerateKeyPair() + if err != nil { + t.Fatalf("generate signing key pair: %v", err) + } + + result, err := IngestLogs(IngestOptions{ + Source: SourceKong, + LogPath: logPath, + OutputPath: outPath, + ProducerVersion: "0.0.0-test", + SigningPrivateKey: keyPair.Private, + }) + if err != nil { + t.Fatalf("ingest kong logs: %v", err) + } + if result.InputEvents != 2 || result.OutputRecords != 2 { + t.Fatalf("unexpected ingest counts: %#v", result) + } + + // #nosec G304 -- test reads explicit artifact path from TempDir. + raw, err := os.ReadFile(outPath) + if err != nil { + t.Fatalf("read proof records: %v", err) + } + lines := strings.Split(strings.TrimSpace(string(raw)), "\n") + if len(lines) != 2 { + t.Fatalf("unexpected proof record line count: %d", len(lines)) + } + records := make([]proofrecord.Record, 0, len(lines)) + for index, line := range lines { + var recordItem proofrecord.Record + if err := json.Unmarshal([]byte(line), &recordItem); err != nil { + t.Fatalf("parse record line %d: %v", index+1, err) + } + records = append(records, recordItem) + if recordItem.RecordType != "policy_enforcement" { + t.Fatalf("expected policy_enforcement record type, got %#v", recordItem) + } + if recordItem.Source != "gait.gateway.kong" { + t.Fatalf("expected gateway source tag, got %#v", recordItem.Source) + } + if strings.TrimSpace(recordItem.Integrity.Signature) == "" || strings.TrimSpace(recordItem.Integrity.SigningKeyID) == "" { + t.Fatalf("expected signed record integrity payload, got %#v", recordItem.Integrity) + } + ok, verifyErr := sign.VerifyBytes(keyPair.Public, sign.Signature{ + Alg: sign.AlgEd25519, + KeyID: recordItem.Integrity.SigningKeyID, + Sig: strings.TrimPrefix(recordItem.Integrity.Signature, "base64:"), + }, []byte(recordItem.Integrity.RecordHash)) + if verifyErr != nil { + t.Fatalf("verify signature line %d: %v", index+1, verifyErr) + } + if !ok { + t.Fatalf("signature verification failed line %d", index+1) + } + } + if records[0].Integrity.PreviousRecordHash != "" { + t.Fatalf("expected first record previous hash to be empty, got %#v", records[0].Integrity) + } + if records[1].Integrity.PreviousRecordHash != records[0].Integrity.RecordHash { + t.Fatalf("expected second record previous hash to chain to first record hash") + } +} + +func TestIngestLogsDockerNestedPayload(t *testing.T) { + workDir := t.TempDir() + logPath := filepath.Join(workDir, "docker.log.jsonl") + inner := `{"tool_name":"tool.write","verdict":"block","request_id":"req-docker-1","reason_codes":["policy_blocked"]}` + if err := os.WriteFile(logPath, []byte(`{"log":"`+strings.ReplaceAll(inner, `"`, `\"`)+`\n","time":"2026-02-20T10:00:00Z"}`+"\n"), 0o600); err != nil { + t.Fatalf("write docker logs: %v", err) + } + + result, err := IngestLogs(IngestOptions{ + Source: SourceDocker, + LogPath: logPath, + ProducerVersion: "0.0.0-test", + }) + if err != nil { + t.Fatalf("ingest docker logs: %v", err) + } + if result.OutputRecords != 1 { + t.Fatalf("expected one output record, got %#v", result) + } + // #nosec G304 -- test reads explicit artifact path from TempDir. + raw, err := os.ReadFile(result.ProofRecordsOut) + if err != nil { + t.Fatalf("read proof records: %v", err) + } + var recordItem proofrecord.Record + if err := json.Unmarshal([]byte(strings.TrimSpace(string(raw))), &recordItem); err != nil { + t.Fatalf("parse docker proof record: %v", err) + } + if verdict, _ := recordItem.Event["verdict"].(string); verdict != "block" { + t.Fatalf("expected block verdict in proof record event, got %#v", recordItem.Event) + } + if toolName, _ := recordItem.Event["tool_name"].(string); toolName != "tool.write" { + t.Fatalf("expected tool_name to be carried into proof record event, got %#v", recordItem.Event) + } +} + +func TestIngestLogsErrors(t *testing.T) { + workDir := t.TempDir() + logPath := filepath.Join(workDir, "bad.log") + if err := os.WriteFile(logPath, []byte("not-json\n"), 0o600); err != nil { + t.Fatalf("write bad logs: %v", err) + } + if _, err := IngestLogs(IngestOptions{Source: "unknown", LogPath: logPath}); err == nil { + t.Fatalf("expected unknown source error") + } + if _, err := IngestLogs(IngestOptions{Source: SourceKong, LogPath: logPath}); err == nil { + t.Fatalf("expected parse error for invalid kong log format") + } +} diff --git a/core/jobruntime/runtime.go b/core/jobruntime/runtime.go index 81d7cb8..8075de3 100644 --- a/core/jobruntime/runtime.go +++ b/core/jobruntime/runtime.go @@ -50,12 +50,15 @@ const ( ) var ( - ErrJobNotFound = errors.New("job not found") - ErrInvalidTransition = errors.New("invalid transition") - ErrApprovalRequired = errors.New("approval required") - ErrEnvironmentMismatch = errors.New("environment fingerprint mismatch") - ErrInvalidCheckpoint = errors.New("invalid checkpoint") - ErrStateContention = errors.New("state contention") + ErrJobNotFound = errors.New("job not found") + ErrInvalidTransition = errors.New("invalid transition") + ErrApprovalRequired = errors.New("approval required") + ErrEnvironmentMismatch = errors.New("environment fingerprint mismatch") + ErrInvalidCheckpoint = errors.New("invalid checkpoint") + ErrStateContention = errors.New("state contention") + ErrPolicyEvaluationRequired = errors.New("policy evaluation required") + ErrIdentityValidationMissing = errors.New("identity validation required") + ErrIdentityRevoked = errors.New("identity revoked") ) type JobState struct { @@ -69,6 +72,9 @@ type JobState struct { StopReason string `json:"stop_reason"` StatusReasonCode string `json:"status_reason_code"` EnvironmentFingerprint string `json:"environment_fingerprint"` + PolicyDigest string `json:"policy_digest,omitempty"` + PolicyRef string `json:"policy_ref,omitempty"` + Identity string `json:"identity,omitempty"` Revision int64 `json:"revision"` Checkpoints []Checkpoint `json:"checkpoints"` Approvals []Approval `json:"approvals,omitempty"` @@ -106,6 +112,9 @@ type SubmitOptions struct { JobID string ProducerVersion string EnvironmentFingerprint string + PolicyDigest string + PolicyRef string + Identity string Actor string Now time.Time } @@ -121,6 +130,13 @@ type CheckpointOptions struct { type ResumeOptions struct { CurrentEnvironmentFingerprint string AllowEnvironmentMismatch bool + PolicyDigest string + PolicyRef string + RequirePolicyEvaluation bool + Identity string + RequireIdentityValidation bool + IdentityValidationSource string + IdentityRevoked bool Reason string Actor string Now time.Time @@ -151,6 +167,12 @@ func Submit(root string, opts SubmitOptions) (JobState, error) { if envfp == "" { envfp = EnvironmentFingerprint("") } + policyDigest := strings.TrimSpace(opts.PolicyDigest) + policyRef := strings.TrimSpace(opts.PolicyRef) + identity := strings.TrimSpace(opts.Identity) + if identity == "" { + identity = strings.TrimSpace(opts.Actor) + } statePath, eventsPath := jobPaths(root, jobID) if err := os.MkdirAll(filepath.Dir(statePath), 0o750); err != nil { return JobState{}, fmt.Errorf("create job directory: %w", err) @@ -171,12 +193,27 @@ func Submit(root string, opts SubmitOptions) (JobState, error) { StopReason: StopReasonNone, StatusReasonCode: "submitted", EnvironmentFingerprint: envfp, + PolicyDigest: policyDigest, + PolicyRef: policyRef, + Identity: identity, Revision: 1, Checkpoints: []Checkpoint{}, } if err := writeJSON(statePath, state); err != nil { return JobState{}, err } + eventPayload := map[string]any{ + "environment_fingerprint": envfp, + } + if policyDigest != "" { + eventPayload["policy_digest"] = policyDigest + } + if policyRef != "" { + eventPayload["policy_ref"] = policyRef + } + if identity != "" { + eventPayload["identity"] = identity + } event := Event{ SchemaID: eventSchemaID, SchemaVersion: jobSchemaVersion, @@ -186,9 +223,7 @@ func Submit(root string, opts SubmitOptions) (JobState, error) { Type: "submitted", Actor: strings.TrimSpace(opts.Actor), ReasonCode: "submitted", - Payload: map[string]any{ - "environment_fingerprint": envfp, - }, + Payload: eventPayload, } if err := appendEvent(eventsPath, event); err != nil { return JobState{}, err @@ -350,37 +385,84 @@ func Resume(root string, jobID string, opts ResumeOptions) (JobState, error) { if reason == "" { reason = "resume" } + previousPolicyDigest := strings.TrimSpace(state.PolicyDigest) + previousPolicyRef := strings.TrimSpace(state.PolicyRef) + policyDigest := strings.TrimSpace(opts.PolicyDigest) + policyRef := strings.TrimSpace(opts.PolicyRef) + policyEvaluationRequired := opts.RequirePolicyEvaluation || previousPolicyDigest != "" || previousPolicyRef != "" + if policyEvaluationRequired && policyDigest == "" { + return JobState{}, Event{}, fmt.Errorf("%w: policy digest is required when policy evaluation is enabled", ErrPolicyEvaluationRequired) + } + policyChanged := false + if policyDigest != "" { + policyChanged = previousPolicyDigest != "" && previousPolicyDigest != policyDigest + state.PolicyDigest = policyDigest + } + if policyRef != "" { + state.PolicyRef = policyRef + } + identity := strings.TrimSpace(opts.Identity) + if identity == "" { + identity = strings.TrimSpace(state.Identity) + } + identityValidationRequired := opts.RequireIdentityValidation || strings.TrimSpace(state.Identity) != "" + if identityValidationRequired && identity == "" { + return JobState{}, Event{}, fmt.Errorf("%w: identity is required for resume validation", ErrIdentityValidationMissing) + } + if identity != "" && opts.IdentityRevoked { + return JobState{}, Event{}, fmt.Errorf("%w: %s", ErrIdentityRevoked, identity) + } + if identity != "" { + state.Identity = identity + } + reasonCode := "resumed" + if policyChanged { + reasonCode = "resumed_with_policy_transition" + } + payload := buildResumePayload(resumePayloadOptions{ + Reason: reason, + ExpectedFingerprint: state.EnvironmentFingerprint, + ActualFingerprint: current, + PolicyEvaluationRequired: policyEvaluationRequired, + PreviousPolicyDigest: previousPolicyDigest, + CurrentPolicyDigest: strings.TrimSpace(state.PolicyDigest), + PreviousPolicyRef: previousPolicyRef, + CurrentPolicyRef: strings.TrimSpace(state.PolicyRef), + PolicyChanged: policyChanged, + Identity: identity, + IdentityValidationRequired: identityValidationRequired, + IdentityValidationSource: strings.TrimSpace(opts.IdentityValidationSource), + }) if state.EnvironmentFingerprint != "" && current != state.EnvironmentFingerprint { if !opts.AllowEnvironmentMismatch { return JobState{}, Event{}, fmt.Errorf("%w: expected=%s actual=%s", ErrEnvironmentMismatch, state.EnvironmentFingerprint, current) } - state.StatusReasonCode = "resumed_with_env_override" + if policyChanged { + reasonCode = "resumed_with_env_override_policy_transition" + } else { + reasonCode = "resumed_with_env_override" + } + state.StatusReasonCode = reasonCode state.StopReason = StopReasonNone state.Status = StatusRunning updated := *state return updated, Event{ Type: "resumed", Actor: strings.TrimSpace(opts.Actor), - ReasonCode: "resumed_with_env_override", - Payload: map[string]any{ - "expected_fingerprint": state.EnvironmentFingerprint, - "actual_fingerprint": current, - "reason": reason, - }, + ReasonCode: reasonCode, + Payload: payload, }, nil } state.Status = StatusRunning state.StopReason = StopReasonNone - state.StatusReasonCode = "resumed" + state.StatusReasonCode = reasonCode updated := *state return updated, Event{ Type: "resumed", Actor: strings.TrimSpace(opts.Actor), - ReasonCode: "resumed", - Payload: map[string]any{ - "reason": reason, - }, + ReasonCode: reasonCode, + Payload: payload, }, nil }) } @@ -415,6 +497,55 @@ func simpleTransition(root string, jobID string, now time.Time, actor string, ev }) } +type resumePayloadOptions struct { + Reason string + ExpectedFingerprint string + ActualFingerprint string + PolicyEvaluationRequired bool + PreviousPolicyDigest string + CurrentPolicyDigest string + PreviousPolicyRef string + CurrentPolicyRef string + PolicyChanged bool + Identity string + IdentityValidationRequired bool + IdentityValidationSource string +} + +func buildResumePayload(options resumePayloadOptions) map[string]any { + payload := map[string]any{ + "reason": options.Reason, + } + if options.ExpectedFingerprint != "" { + payload["expected_fingerprint"] = options.ExpectedFingerprint + } + if options.ActualFingerprint != "" { + payload["actual_fingerprint"] = options.ActualFingerprint + } + if options.PolicyEvaluationRequired { + payload["policy_evaluation_required"] = true + } + if options.PreviousPolicyDigest != "" || options.CurrentPolicyDigest != "" { + payload["previous_policy_digest"] = options.PreviousPolicyDigest + payload["current_policy_digest"] = options.CurrentPolicyDigest + payload["policy_changed"] = options.PolicyChanged + } + if options.PreviousPolicyRef != "" || options.CurrentPolicyRef != "" { + payload["previous_policy_ref"] = options.PreviousPolicyRef + payload["current_policy_ref"] = options.CurrentPolicyRef + } + if options.IdentityValidationRequired { + payload["identity_validation_required"] = true + } + if options.Identity != "" { + payload["identity"] = options.Identity + } + if options.IdentityValidationSource != "" { + payload["identity_validation_source"] = options.IdentityValidationSource + } + return payload +} + func mutate(root string, jobID string, mutator func(*JobState, time.Time) (Event, error), now time.Time) (JobState, error) { return mutateWithResult(root, jobID, now, func(state *JobState, ts time.Time) (JobState, Event, error) { event, err := mutator(state, ts) diff --git a/core/jobruntime/runtime_test.go b/core/jobruntime/runtime_test.go index 99304c5..ff8d22c 100644 --- a/core/jobruntime/runtime_test.go +++ b/core/jobruntime/runtime_test.go @@ -134,6 +134,119 @@ func TestResumeEnvironmentMismatchFailClosed(t *testing.T) { } } +func TestResumePolicyTransitionAndIdentityValidation(t *testing.T) { + root := filepath.Join(t.TempDir(), "jobs") + jobID := "job-policy-transition" + if _, err := Submit(root, SubmitOptions{ + JobID: jobID, + EnvironmentFingerprint: "env:a", + PolicyDigest: "policy-digest-a", + PolicyRef: "policy-a.yaml", + Identity: "agent.alice", + }); err != nil { + t.Fatalf("submit job: %v", err) + } + if _, err := Pause(root, jobID, TransitionOptions{}); err != nil { + t.Fatalf("pause: %v", err) + } + state, err := Resume(root, jobID, ResumeOptions{ + CurrentEnvironmentFingerprint: "env:a", + PolicyDigest: "policy-digest-b", + PolicyRef: "policy-b.yaml", + RequirePolicyEvaluation: true, + RequireIdentityValidation: true, + IdentityValidationSource: "revocation_list", + Identity: "agent.alice", + }) + if err != nil { + t.Fatalf("resume with policy transition: %v", err) + } + if state.StatusReasonCode != "resumed_with_policy_transition" { + t.Fatalf("expected policy transition reason code, got %s", state.StatusReasonCode) + } + if state.PolicyDigest != "policy-digest-b" || state.PolicyRef != "policy-b.yaml" { + t.Fatalf("expected updated policy metadata in state, got %#v", state) + } + _, events, err := Inspect(root, jobID) + if err != nil { + t.Fatalf("inspect: %v", err) + } + if len(events) == 0 { + t.Fatalf("expected events in journal") + } + last := events[len(events)-1] + if last.ReasonCode != "resumed_with_policy_transition" { + t.Fatalf("unexpected last event reason code: %#v", last) + } + if got, _ := last.Payload["previous_policy_digest"].(string); got != "policy-digest-a" { + t.Fatalf("unexpected previous policy digest in event payload: %#v", last.Payload) + } + if got, _ := last.Payload["current_policy_digest"].(string); got != "policy-digest-b" { + t.Fatalf("unexpected current policy digest in event payload: %#v", last.Payload) + } + if got, _ := last.Payload["identity_validation_source"].(string); got != "revocation_list" { + t.Fatalf("unexpected identity validation source in event payload: %#v", last.Payload) + } +} + +func TestResumeRequiresPolicyEvaluationWhenBoundToPolicy(t *testing.T) { + root := filepath.Join(t.TempDir(), "jobs") + jobID := "job-policy-required" + if _, err := Submit(root, SubmitOptions{ + JobID: jobID, + EnvironmentFingerprint: "env:a", + PolicyDigest: "policy-digest-a", + PolicyRef: "policy-a.yaml", + }); err != nil { + t.Fatalf("submit job: %v", err) + } + if _, err := Pause(root, jobID, TransitionOptions{}); err != nil { + t.Fatalf("pause: %v", err) + } + if _, err := Resume(root, jobID, ResumeOptions{ + CurrentEnvironmentFingerprint: "env:a", + }); !errors.Is(err, ErrPolicyEvaluationRequired) { + t.Fatalf("expected policy evaluation required error, got %v", err) + } +} + +func TestResumeIdentityValidationErrors(t *testing.T) { + root := filepath.Join(t.TempDir(), "jobs") + + if _, err := Submit(root, SubmitOptions{ + JobID: "job-identity-required", + EnvironmentFingerprint: "env:a", + }); err != nil { + t.Fatalf("submit identity-required job: %v", err) + } + if _, err := Pause(root, "job-identity-required", TransitionOptions{}); err != nil { + t.Fatalf("pause identity-required job: %v", err) + } + if _, err := Resume(root, "job-identity-required", ResumeOptions{ + CurrentEnvironmentFingerprint: "env:a", + RequireIdentityValidation: true, + }); !errors.Is(err, ErrIdentityValidationMissing) { + t.Fatalf("expected missing identity validation error, got %v", err) + } + + if _, err := Submit(root, SubmitOptions{ + JobID: "job-identity-revoked", + EnvironmentFingerprint: "env:a", + Identity: "agent.revoked", + }); err != nil { + t.Fatalf("submit identity-revoked job: %v", err) + } + if _, err := Pause(root, "job-identity-revoked", TransitionOptions{}); err != nil { + t.Fatalf("pause identity-revoked job: %v", err) + } + if _, err := Resume(root, "job-identity-revoked", ResumeOptions{ + CurrentEnvironmentFingerprint: "env:a", + IdentityRevoked: true, + }); !errors.Is(err, ErrIdentityRevoked) { + t.Fatalf("expected identity revoked error, got %v", err) + } +} + func TestInvalidPauseTransition(t *testing.T) { root := filepath.Join(t.TempDir(), "jobs") if _, err := Submit(root, SubmitOptions{JobID: "job-5"}); err != nil { diff --git a/core/mcp/interfaces.go b/core/mcp/interfaces.go index c22fe6f..c94dfe0 100644 --- a/core/mcp/interfaces.go +++ b/core/mcp/interfaces.go @@ -29,10 +29,21 @@ type ScriptStep struct { } type Target struct { - Kind string `json:"kind"` - Value string `json:"value"` - Operation string `json:"operation,omitempty"` - Sensitivity string `json:"sensitivity,omitempty"` + Kind string `json:"kind"` + Value string `json:"value"` + Operation string `json:"operation,omitempty"` + Sensitivity string `json:"sensitivity,omitempty"` + DiscoveryMethod string `json:"discovery_method,omitempty"` + DiscoveryMethodAlias string `json:"discoveryMethod,omitempty"` + ReadOnlyHint bool `json:"read_only_hint,omitempty"` + ReadOnlyHintAlias bool `json:"readOnlyHint,omitempty"` + DestructiveHint bool `json:"destructive_hint,omitempty"` + DestructiveHintAlias bool `json:"destructiveHint,omitempty"` + IdempotentHint bool `json:"idempotent_hint,omitempty"` + IdempotentHintAlias bool `json:"idempotentHint,omitempty"` + OpenWorldHint bool `json:"open_world_hint,omitempty"` + OpenWorldHintAlias bool `json:"openWorldHint,omitempty"` + Annotations map[string]any `json:"annotations,omitempty"` } type ArgProvenance struct { diff --git a/core/mcp/proxy.go b/core/mcp/proxy.go index 8a8e30a..54ba2d2 100644 --- a/core/mcp/proxy.go +++ b/core/mcp/proxy.go @@ -68,11 +68,17 @@ func ToIntentRequestWithOptions(call ToolCall, opts IntentOptions) (schemagate.I intentTargets := make([]schemagate.IntentTarget, 0, len(targets)) for _, target := range targets { + hints := normalizeTargetHints(target) intentTargets = append(intentTargets, schemagate.IntentTarget{ - Kind: strings.TrimSpace(target.Kind), - Value: strings.TrimSpace(target.Value), - Operation: strings.TrimSpace(target.Operation), - Sensitivity: strings.TrimSpace(target.Sensitivity), + Kind: strings.TrimSpace(target.Kind), + Value: strings.TrimSpace(target.Value), + Operation: strings.TrimSpace(target.Operation), + Sensitivity: strings.TrimSpace(target.Sensitivity), + DiscoveryMethod: hints.DiscoveryMethod, + ReadOnlyHint: hints.ReadOnlyHint, + DestructiveHint: hints.DestructiveHint, + IdempotentHint: hints.IdempotentHint, + OpenWorldHint: hints.OpenWorldHint, }) } @@ -100,11 +106,17 @@ func ToIntentRequestWithOptions(call ToolCall, opts IntentOptions) (schemagate.I } stepTargets := make([]schemagate.IntentTarget, 0, len(step.Targets)) for _, target := range step.Targets { + hints := normalizeTargetHints(target) stepTargets = append(stepTargets, schemagate.IntentTarget{ - Kind: strings.TrimSpace(target.Kind), - Value: strings.TrimSpace(target.Value), - Operation: strings.TrimSpace(target.Operation), - Sensitivity: strings.TrimSpace(target.Sensitivity), + Kind: strings.TrimSpace(target.Kind), + Value: strings.TrimSpace(target.Value), + Operation: strings.TrimSpace(target.Operation), + Sensitivity: strings.TrimSpace(target.Sensitivity), + DiscoveryMethod: hints.DiscoveryMethod, + ReadOnlyHint: hints.ReadOnlyHint, + DestructiveHint: hints.DestructiveHint, + IdempotentHint: hints.IdempotentHint, + OpenWorldHint: hints.OpenWorldHint, }) } stepProvenance := make([]schemagate.IntentArgProvenance, 0, len(step.ArgProvenance)) @@ -366,6 +378,87 @@ func normalizeClaudeCodeToolName(rawName string) string { return "tool." + normalized } +type targetHints struct { + DiscoveryMethod string + ReadOnlyHint bool + DestructiveHint bool + IdempotentHint bool + OpenWorldHint bool +} + +func normalizeTargetHints(target Target) targetHints { + hints := targetHints{ + DiscoveryMethod: strings.TrimSpace(target.DiscoveryMethod), + ReadOnlyHint: target.ReadOnlyHint || target.ReadOnlyHintAlias, + DestructiveHint: target.DestructiveHint || target.DestructiveHintAlias, + IdempotentHint: target.IdempotentHint || target.IdempotentHintAlias, + OpenWorldHint: target.OpenWorldHint || target.OpenWorldHintAlias, + } + if hints.DiscoveryMethod == "" { + hints.DiscoveryMethod = strings.TrimSpace(target.DiscoveryMethodAlias) + } + if value, ok := annotationString(target.Annotations, "discovery_method", "discoveryMethod"); ok { + hints.DiscoveryMethod = strings.TrimSpace(value) + } + if value, ok := annotationBool(target.Annotations, "read_only_hint", "readOnlyHint"); ok { + hints.ReadOnlyHint = value + } + if value, ok := annotationBool(target.Annotations, "destructive_hint", "destructiveHint"); ok { + hints.DestructiveHint = value + } + if value, ok := annotationBool(target.Annotations, "idempotent_hint", "idempotentHint"); ok { + hints.IdempotentHint = value + } + if value, ok := annotationBool(target.Annotations, "open_world_hint", "openWorldHint"); ok { + hints.OpenWorldHint = value + } + return hints +} + +func annotationBool(values map[string]any, keys ...string) (bool, bool) { + if values == nil { + return false, false + } + for _, key := range keys { + value, ok := values[key] + if !ok { + continue + } + switch typed := value.(type) { + case bool: + return typed, true + case string: + normalized := strings.ToLower(strings.TrimSpace(typed)) + if normalized == "true" { + return true, true + } + if normalized == "false" { + return false, true + } + } + } + return false, false +} + +func annotationString(values map[string]any, keys ...string) (string, bool) { + if values == nil { + return "", false + } + for _, key := range keys { + value, ok := values[key] + if !ok { + continue + } + switch typed := value.(type) { + case string: + return typed, true + case fmt.Stringer: + return typed.String(), true + } + } + return "", false +} + func inferClaudeCodeTargets(toolName string, args map[string]any) []Target { switch toolName { case "tool.read": diff --git a/core/mcp/proxy_test.go b/core/mcp/proxy_test.go index 63cbae3..a4a5bd0 100644 --- a/core/mcp/proxy_test.go +++ b/core/mcp/proxy_test.go @@ -320,6 +320,49 @@ func TestToIntentRequestWrapper(t *testing.T) { } } +func TestToIntentRequestCarriesTargetDiscoveryAndAnnotations(t *testing.T) { + intent, err := ToIntentRequest(ToolCall{ + Name: "tool.read", + Args: map[string]any{"path": "/tmp/input.txt"}, + Targets: []Target{ + { + Kind: "path", + Value: "/tmp/input.txt", + Operation: "read", + DiscoveryMethodAlias: "webmcp", + Annotations: map[string]any{ + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": "true", + "openWorldHint": "false", + }, + }, + }, + }) + if err != nil { + t.Fatalf("ToIntentRequest with annotation hints failed: %v", err) + } + if len(intent.Targets) != 1 { + t.Fatalf("expected one target, got %#v", intent.Targets) + } + target := intent.Targets[0] + if target.DiscoveryMethod != "webmcp" { + t.Fatalf("expected discovery method to be preserved, got %#v", target) + } + if !target.ReadOnlyHint { + t.Fatalf("expected read_only_hint to be true, got %#v", target) + } + if target.DestructiveHint { + t.Fatalf("expected destructive_hint to be false, got %#v", target) + } + if !target.IdempotentHint { + t.Fatalf("expected idempotent_hint to be true, got %#v", target) + } + if target.OpenWorldHint { + t.Fatalf("expected open_world_hint to be false, got %#v", target) + } +} + func TestToIntentRequestScriptPayload(t *testing.T) { intent, err := ToIntentRequest(ToolCall{ Script: &ScriptCall{ diff --git a/core/schema/v1/gate/types.go b/core/schema/v1/gate/types.go index 2989031..de22f4e 100644 --- a/core/schema/v1/gate/types.go +++ b/core/schema/v1/gate/types.go @@ -82,13 +82,18 @@ type IntentScriptStep struct { } type IntentTarget struct { - Kind string `json:"kind"` - Value string `json:"value"` - Operation string `json:"operation,omitempty"` - Sensitivity string `json:"sensitivity,omitempty"` - EndpointClass string `json:"endpoint_class,omitempty"` - EndpointDomain string `json:"endpoint_domain,omitempty"` - Destructive bool `json:"destructive,omitempty"` + Kind string `json:"kind"` + Value string `json:"value"` + Operation string `json:"operation,omitempty"` + Sensitivity string `json:"sensitivity,omitempty"` + EndpointClass string `json:"endpoint_class,omitempty"` + EndpointDomain string `json:"endpoint_domain,omitempty"` + Destructive bool `json:"destructive,omitempty"` + DiscoveryMethod string `json:"discovery_method,omitempty"` + ReadOnlyHint bool `json:"read_only_hint,omitempty"` + DestructiveHint bool `json:"destructive_hint,omitempty"` + IdempotentHint bool `json:"idempotent_hint,omitempty"` + OpenWorldHint bool `json:"open_world_hint,omitempty"` } type IntentArgProvenance struct { diff --git a/docs-site/public/llms.txt b/docs-site/public/llms.txt index e59c889..0351c20 100644 --- a/docs-site/public/llms.txt +++ b/docs-site/public/llms.txt @@ -23,13 +23,14 @@ - gait gate eval --policy --intent --wrkr-inventory - gait approve-script --policy --intent --registry --approver - gait list-scripts --registry -- gait job submit --id +- gait job submit --id --policy --identity - gait job status --id - gait job checkpoint add --id --type --summary - gait job checkpoint list --id - gait job pause --id -- gait job resume --id +- gait job resume --id --policy [--identity-revocations ] - gait job cancel --id +- gait gateway ingest --source --log-path - gait voice token mint --intent --policy - gait voice token verify --token - gait voice pack build --from diff --git a/docs/durable_jobs.md b/docs/durable_jobs.md index b64a314..34a5050 100644 --- a/docs/durable_jobs.md +++ b/docs/durable_jobs.md @@ -38,15 +38,17 @@ The job surface is for runtime control and evidence, not prompt orchestration. ## Minimal Lifecycle ```bash -gait job submit --id job_1 --json +gait job submit --id job_1 --identity worker_1 --policy ./policy.yaml --json gait job checkpoint add --id job_1 --type progress --summary "step 1 complete" --json gait job pause --id job_1 --json gait job approve --id job_1 --actor reviewer_1 --reason "validated input" --json -gait job resume --id job_1 --actor worker_1 --reason "continue after approval" --json +gait job resume --id job_1 --actor worker_1 --reason "continue after approval" --policy ./policy.yaml --identity-revocations ./revoked_identities.txt --identity-validation-source revocation_list --json gait job inspect --id job_1 --json gait job status --id job_1 --json ``` +`resume` is fail-closed for policy-bound jobs: if a paused job has a bound policy digest, you must provide current policy evaluation metadata (for example `--policy`) before continuation. + ## Artifact And Verification Path Durable jobs produce state under the job root (default `./gait-out/jobs`) and can be promoted to a pack: @@ -94,4 +96,3 @@ Not necessary: - runtime implementation: `core/jobruntime/` - pack conversion/verification: `core/pack/` - representative adapter path: `examples/integrations/openai_agents/quickstart.py` - diff --git a/go.mod b/go.mod index f21d929..c8d53b5 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/Clyra-AI/gait go 1.25.7 require ( - github.com/Clyra-AI/proof v0.4.3 + github.com/Clyra-AI/proof v0.4.4 github.com/goccy/go-yaml v1.19.2 gopkg.in/yaml.v3 v3.0.1 ) diff --git a/go.sum b/go.sum index 2c74959..02af93c 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,5 @@ -github.com/Clyra-AI/proof v0.4.3 h1:8JzOvB95mLDZofM6nv6stDvhGrJKZJwfASdf6O9goYs= -github.com/Clyra-AI/proof v0.4.3/go.mod h1:EDff6buidj222E+EYyqQXXj1rtPgSFlYOxl2JFfWKFs= +github.com/Clyra-AI/proof v0.4.4 h1:JHMPthKX+2Ud3pRWSd13LKHkS04uDXHizna7T1D2mpw= +github.com/Clyra-AI/proof v0.4.4/go.mod h1:EDff6buidj222E+EYyqQXXj1rtPgSFlYOxl2JFfWKFs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/schemas/v1/gate/intent_request.schema.json b/schemas/v1/gate/intent_request.schema.json index fc3554e..559f3e2 100644 --- a/schemas/v1/gate/intent_request.schema.json +++ b/schemas/v1/gate/intent_request.schema.json @@ -65,7 +65,15 @@ ] }, "endpoint_domain": { "type": "string" }, - "destructive": { "type": "boolean" } + "destructive": { "type": "boolean" }, + "discovery_method": { + "type": "string", + "enum": ["webmcp", "dynamic_mcp", "a2a", "mcp", "static_mcp", "manual", "unknown"] + }, + "read_only_hint": { "type": "boolean" }, + "destructive_hint": { "type": "boolean" }, + "idempotent_hint": { "type": "boolean" }, + "open_world_hint": { "type": "boolean" } }, "additionalProperties": false } @@ -120,7 +128,15 @@ ] }, "endpoint_domain": { "type": "string" }, - "destructive": { "type": "boolean" } + "destructive": { "type": "boolean" }, + "discovery_method": { + "type": "string", + "enum": ["webmcp", "dynamic_mcp", "a2a", "mcp", "static_mcp", "manual", "unknown"] + }, + "read_only_hint": { "type": "boolean" }, + "destructive_hint": { "type": "boolean" }, + "idempotent_hint": { "type": "boolean" }, + "open_world_hint": { "type": "boolean" } }, "additionalProperties": false } diff --git a/schemas/v1/gate/policy.schema.json b/schemas/v1/gate/policy.schema.json index 9aa378b..577c03e 100644 --- a/schemas/v1/gate/policy.schema.json +++ b/schemas/v1/gate/policy.schema.json @@ -10,6 +10,10 @@ "type": "string", "enum": ["allow", "block", "dry_run", "require_approval"] }, + "default_action": { + "type": "string", + "enum": ["allow", "block", "dry_run", "require_approval"] + }, "scripts": { "type": "object", "properties": { @@ -41,7 +45,11 @@ "type": "array", "items": { "type": "object", - "required": ["name", "effect"], + "required": ["name"], + "anyOf": [ + { "required": ["effect"] }, + { "required": ["action"] } + ], "properties": { "name": { "type": "string", "minLength": 1 }, "priority": { "type": "integer", "minimum": 0 }, @@ -49,14 +57,31 @@ "type": "string", "enum": ["allow", "block", "dry_run", "require_approval"] }, + "action": { + "type": "string", + "enum": ["allow", "block", "dry_run", "require_approval"] + }, "match": { "type": "object", "properties": { + "tool_name": { "type": "string" }, "tool_names": { "type": "array", "items": { "type": "string" } }, "risk_classes": { "type": "array", "items": { "type": "string" } }, "target_kinds": { "type": "array", "items": { "type": "string" } }, "target_values": { "type": "array", "items": { "type": "string" } }, + "endpoint_class": { "type": "array", "items": { "type": "string" } }, "endpoint_classes": { "type": "array", "items": { "type": "string" } }, + "discovery_method": { "type": "array", "items": { "type": "string" } }, + "tool_annotations": { + "type": "object", + "properties": { + "readOnlyHint": { "type": "boolean" }, + "destructiveHint": { "type": "boolean" }, + "idempotentHint": { "type": "boolean" }, + "openWorldHint": { "type": "boolean" } + }, + "additionalProperties": false + }, "skill_publishers": { "type": "array", "items": { "type": "string" } }, "skill_sources": { "type": "array", "items": { "type": "string" } }, "data_classes": { "type": "array", "items": { "type": "string" } },