diff --git a/docs/data-sources/argus_job.md b/docs/data-sources/argus_job.md index 09b9522f..2f75b6dd 100644 --- a/docs/data-sources/argus_job.md +++ b/docs/data-sources/argus_job.md @@ -50,6 +50,7 @@ data "stackit_argus_job" "example" { - `basic_auth` (Attributes) A basic_auth block (see [below for nested schema](#nestedatt--basic_auth)) - `id` (String) Specifies the Argus Job ID - `metrics_path` (String) Specifies the job scraping path. +- `sample_limit` (Number) Specifies the scrape sample limit. - `scheme` (String) Specifies the scheme. - `scrape_interval` (String) Specifies the scrape interval as duration string. - `scrape_timeout` (String) Specifies the scrape timeout as duration string. diff --git a/docs/resources/argus_job.md b/docs/resources/argus_job.md index c048ad81..6a507aeb 100644 --- a/docs/resources/argus_job.md +++ b/docs/resources/argus_job.md @@ -52,6 +52,7 @@ resource "stackit_argus_job" "example" { - `basic_auth` (Attributes) A basic_auth block (see [below for nested schema](#nestedatt--basic_auth)) - `metrics_path` (String) Specifies the job scraping path. Defaults to `/metrics` - `saml2` (Attributes) A saml2 configuration block (see [below for nested schema](#nestedatt--saml2)) +- `sample_limit` (Number) Specifies the scrape sample limit. Upper limit is depends on the service plan. Default is `5000`. - `scheme` (String) Specifies the scheme. Default is `https`. - `scrape_interval` (String) Specifies the scrape interval as duration string. Default is `5m`. - `scrape_timeout` (String) Specifies the scrape timeout as duration string. Default is `2m`. diff --git a/stackit/internal/data-sources/argus/job/schema.go b/stackit/internal/data-sources/argus/job/schema.go index 26692f44..467ba6bc 100644 --- a/stackit/internal/data-sources/argus/job/schema.go +++ b/stackit/internal/data-sources/argus/job/schema.go @@ -66,6 +66,11 @@ func (d *DataSource) Schema(ctx context.Context, req datasource.SchemaRequest, r Computed: true, }, + "sample_limit": schema.Int64Attribute{ + Description: "Specifies the scrape sample limit.", + Computed: true, + }, + "saml2": schema.SingleNestedAttribute{ Description: "A saml2 configuration block", Optional: true, diff --git a/stackit/internal/resources/argus/job/actions.go b/stackit/internal/resources/argus/job/actions.go index 7b4f6767..4c0d91ee 100644 --- a/stackit/internal/resources/argus/job/actions.go +++ b/stackit/internal/resources/argus/job/actions.go @@ -7,9 +7,10 @@ import ( scrapeconfig "github.com/SchwarzIT/community-stackit-go-client/pkg/services/argus/v1.0/scrape-config" clientValidate "github.com/SchwarzIT/community-stackit-go-client/pkg/validate" - "github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" + + "github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common" ) // Create - lifecycle function @@ -86,8 +87,8 @@ func (r Resource) Update(ctx context.Context, req resource.UpdateRequest, resp * } c := r.client - job := scrapeconfig.UpdateJSONRequestBody(plan.ToClientUpdateJob()) - ures, err := c.Argus.ScrapeConfig.Update(ctx, plan.ProjectID.ValueString(), plan.ArgusInstanceID.ValueString(), plan.Name.ValueString(), job) + jobs := plan.ToClientPartialUpdateJobs() + ures, err := c.Argus.ScrapeConfig.PartialUpdate(ctx, plan.ProjectID.ValueString(), plan.ArgusInstanceID.ValueString(), jobs) if agg := common.Validate(&resp.Diagnostics, ures, err); agg != nil { resp.Diagnostics.AddError("failed to update argus job", agg.Error()) return @@ -160,5 +161,4 @@ func (r *Resource) ImportState(ctx context.Context, req resource.ImportStateRequ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("argus_instance_id"), instanceID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), name)...) - } diff --git a/stackit/internal/resources/argus/job/helpers.go b/stackit/internal/resources/argus/job/helpers.go index d41691bd..699097c1 100644 --- a/stackit/internal/resources/argus/job/helpers.go +++ b/stackit/internal/resources/argus/job/helpers.go @@ -4,9 +4,10 @@ import ( "context" scrapeconfig "github.com/SchwarzIT/community-stackit-go-client/pkg/services/argus/v1.0/scrape-config" - "github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common" "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common" ) const ( @@ -14,6 +15,7 @@ const ( DefaultScheme = "https" DefaultScrapeInterval = "5m" DefaultScrapeTimeout = "2m" + DefaultSampleLimit = 5000 DefaultSAML2EnableURLParameters = true ) @@ -26,7 +28,7 @@ func (j *Job) setDefaults(job *scrapeconfig.CreateJSONBody) { job.MetricsPath = &s } if j.Scheme.IsNull() || j.Scheme.IsUnknown() { - job.Scheme = scrapeconfig.CreateJSONBodyScheme(DefaultScheme) + job.Scheme = DefaultScheme } if j.ScrapeInterval.IsNull() || j.ScrapeInterval.IsUnknown() { job.ScrapeInterval = DefaultScrapeInterval @@ -34,17 +36,22 @@ func (j *Job) setDefaults(job *scrapeconfig.CreateJSONBody) { if j.ScrapeTimeout.IsNull() || j.ScrapeTimeout.IsUnknown() { job.ScrapeTimeout = DefaultScrapeTimeout } + if j.SampleLimit.IsNull() || j.SampleLimit.IsUnknown() { + job.SampleLimit = toFloat32Ptr(DefaultSampleLimit) + } } -func (j *Job) setDefaultsUpdate(job *scrapeconfig.UpdateJSONBody) { - if job == nil { +func (j *Job) setDefaultsUpdate(partialUpdate scrapeconfig.PartialUpdateJSONBody) { + if len(partialUpdate) == 0 { return } + job := &partialUpdate[0] if j.MetricsPath.IsNull() || j.MetricsPath.IsUnknown() { - job.MetricsPath = DefaultMetricsPath + s := DefaultMetricsPath + job.MetricsPath = &s } if j.Scheme.IsNull() || j.Scheme.IsUnknown() { - job.Scheme = scrapeconfig.UpdateJSONBodyScheme(DefaultScheme) + job.Scheme = DefaultScheme } if j.ScrapeInterval.IsNull() || j.ScrapeInterval.IsUnknown() { job.ScrapeInterval = DefaultScrapeInterval @@ -55,13 +62,14 @@ func (j *Job) setDefaultsUpdate(job *scrapeconfig.UpdateJSONBody) { } func (j *Job) ToClientJob() scrapeconfig.CreateJSONBody { - mp := j.MetricsPath.ValueString() job := scrapeconfig.CreateJSONBody{ JobName: j.Name.ValueString(), Scheme: scrapeconfig.CreateJSONBodyScheme(j.Scheme.ValueString()), - MetricsPath: &mp, + MetricsPath: j.MetricsPath.ValueStringPointer(), ScrapeInterval: j.ScrapeInterval.ValueString(), ScrapeTimeout: j.ScrapeTimeout.ValueString(), + // This conversion might be lossy if the value is greater than 16777215. + SampleLimit: toFloat32Ptr(float32(j.SampleLimit.ValueInt64())), } j.setDefaults(&job) @@ -114,15 +122,18 @@ func (j *Job) ToClientJob() scrapeconfig.CreateJSONBody { return job } -func (j *Job) ToClientUpdateJob() scrapeconfig.UpdateJSONBody { - job := scrapeconfig.UpdateJSONBody{ - Scheme: scrapeconfig.UpdateJSONBodyScheme(j.Scheme.ValueString()), - MetricsPath: j.MetricsPath.ValueString(), +func (j *Job) ToClientPartialUpdateJobs() scrapeconfig.PartialUpdateJSONBody { + jobs := scrapeconfig.PartialUpdateJSONBody{{ + JobName: j.Name.ValueString(), + Scheme: scrapeconfig.PartialUpdateJSONBodyScheme(j.Scheme.ValueString()), + MetricsPath: j.MetricsPath.ValueStringPointer(), ScrapeInterval: j.ScrapeInterval.ValueString(), ScrapeTimeout: j.ScrapeTimeout.ValueString(), - } + SampleLimit: toFloat32Ptr(float32(j.SampleLimit.ValueInt64())), + }} + j.setDefaultsUpdate(jobs) - j.setDefaultsUpdate(&job) + job := &jobs[0] if j.SAML2 != nil && !j.SAML2.EnableURLParameters.ValueBool() { if job.Params == nil { @@ -169,7 +180,7 @@ func (j *Job) ToClientUpdateJob() scrapeconfig.UpdateJSONBody { t[i] = ti } job.StaticConfigs = t - return job + return jobs } func (j *Job) FromClientJob(cj scrapeconfig.Job) { @@ -183,6 +194,10 @@ func (j *Job) FromClientJob(cj scrapeconfig.Job) { } j.ScrapeInterval = types.StringValue(cj.ScrapeInterval) j.ScrapeTimeout = types.StringValue(cj.ScrapeTimeout) + j.SampleLimit = types.Int64Null() + if cj.SampleLimit != nil { + j.SampleLimit = types.Int64Value(int64(*cj.SampleLimit)) + } j.handleSAML2(cj) j.handleBasicAuth(cj) j.handleTargets(cj) @@ -247,3 +262,7 @@ func (j *Job) handleTargets(cj scrapeconfig.Job) { } j.Targets = newTargets } + +func toFloat32Ptr(v float32) *float32 { + return &v +} diff --git a/stackit/internal/resources/argus/job/resource_test.go b/stackit/internal/resources/argus/job/resource_test.go index 83a14e39..225ac259 100644 --- a/stackit/internal/resources/argus/job/resource_test.go +++ b/stackit/internal/resources/argus/job/resource_test.go @@ -14,7 +14,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" ) -const run_this_test = false +const run_this_test = true func TestAcc_ArgusJob(t *testing.T) { if !common.ShouldAccTestRun(run_this_test) { diff --git a/stackit/internal/resources/argus/job/schema.go b/stackit/internal/resources/argus/job/schema.go index 41ab39c1..acf86897 100644 --- a/stackit/internal/resources/argus/job/schema.go +++ b/stackit/internal/resources/argus/job/schema.go @@ -4,17 +4,19 @@ import ( "context" "fmt" - "github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common" - "github.com/SchwarzIT/terraform-provider-stackit/stackit/pkg/validate" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common" + "github.com/SchwarzIT/terraform-provider-stackit/stackit/pkg/validate" ) // Job is the schema model @@ -27,6 +29,7 @@ type Job struct { Scheme types.String `tfsdk:"scheme"` ScrapeInterval types.String `tfsdk:"scrape_interval"` ScrapeTimeout types.String `tfsdk:"scrape_timeout"` + SampleLimit types.Int64 `tfsdk:"sample_limit"` SAML2 *SAML2 `tfsdk:"saml2"` BasicAuth *BasicAuth `tfsdk:"basic_auth"` Targets []Target `tfsdk:"targets"` @@ -119,6 +122,13 @@ func (r *Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp Default: stringdefault.StaticString(DefaultScrapeTimeout), }, + "sample_limit": schema.Int64Attribute{ + Description: "Specifies the scrape sample limit. Upper limit is depends on the service plan. Default is `5000`.", + Optional: true, + Computed: true, + Default: int64default.StaticInt64(DefaultSampleLimit), + }, + "saml2": schema.SingleNestedAttribute{ Description: "A saml2 configuration block", Optional: true,