Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add sample limit support for argus scraper jobs (#200) #202

Merged
merged 4 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/data-sources/argus_job.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ data "stackit_argus_job" "example" {
- `basic_auth` (Attributes) A basic_auth block (see [below for nested schema](#nestedatt--basic_auth))
- `id` (String) Specifies the Argus Job ID
- `metrics_path` (String) Specifies the job scraping path.
- `sample_limit` (Number) Specifies the scrape sample limit.
- `scheme` (String) Specifies the scheme.
- `scrape_interval` (String) Specifies the scrape interval as duration string.
- `scrape_timeout` (String) Specifies the scrape timeout as duration string.
Expand Down
1 change: 1 addition & 0 deletions docs/resources/argus_job.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ resource "stackit_argus_job" "example" {
- `basic_auth` (Attributes) A basic_auth block (see [below for nested schema](#nestedatt--basic_auth))
- `metrics_path` (String) Specifies the job scraping path. Defaults to `/metrics`
- `saml2` (Attributes) A saml2 configuration block (see [below for nested schema](#nestedatt--saml2))
- `sample_limit` (Number) Specifies the scrape sample limit. Upper limit is depends on the service plan. Default is `5000`.
- `scheme` (String) Specifies the scheme. Default is `https`.
- `scrape_interval` (String) Specifies the scrape interval as duration string. Default is `5m`.
- `scrape_timeout` (String) Specifies the scrape timeout as duration string. Default is `2m`.
Expand Down
5 changes: 5 additions & 0 deletions stackit/internal/data-sources/argus/job/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,11 @@ func (d *DataSource) Schema(ctx context.Context, req datasource.SchemaRequest, r
Computed: true,
},

"sample_limit": schema.Int64Attribute{
Description: "Specifies the scrape sample limit.",
Computed: true,
},

"saml2": schema.SingleNestedAttribute{
Description: "A saml2 configuration block",
Optional: true,
Expand Down
8 changes: 4 additions & 4 deletions stackit/internal/resources/argus/job/actions.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ import (

scrapeconfig "github.com/SchwarzIT/community-stackit-go-client/pkg/services/argus/v1.0/scrape-config"
clientValidate "github.com/SchwarzIT/community-stackit-go-client/pkg/validate"
"github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"

"github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common"
)

// Create - lifecycle function
Expand Down Expand Up @@ -86,8 +87,8 @@ func (r Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *
}

c := r.client
job := scrapeconfig.UpdateJSONRequestBody(plan.ToClientUpdateJob())
ures, err := c.Argus.ScrapeConfig.Update(ctx, plan.ProjectID.ValueString(), plan.ArgusInstanceID.ValueString(), plan.Name.ValueString(), job)
jobs := plan.ToClientPartialUpdateJobs()
ures, err := c.Argus.ScrapeConfig.PartialUpdate(ctx, plan.ProjectID.ValueString(), plan.ArgusInstanceID.ValueString(), jobs)
if agg := common.Validate(&resp.Diagnostics, ures, err); agg != nil {
resp.Diagnostics.AddError("failed to update argus job", agg.Error())
return
Expand Down Expand Up @@ -160,5 +161,4 @@ func (r *Resource) ImportState(ctx context.Context, req resource.ImportStateRequ
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("argus_instance_id"), instanceID)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), name)...)

}
49 changes: 34 additions & 15 deletions stackit/internal/resources/argus/job/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,18 @@ import (
"context"

scrapeconfig "github.com/SchwarzIT/community-stackit-go-client/pkg/services/argus/v1.0/scrape-config"
"github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"

"github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common"
)

const (
DefaultMetricsPath = "/metrics"
DefaultScheme = "https"
DefaultScrapeInterval = "5m"
DefaultScrapeTimeout = "2m"
DefaultSampleLimit = 5000
DefaultSAML2EnableURLParameters = true
)

Expand All @@ -26,25 +28,30 @@ func (j *Job) setDefaults(job *scrapeconfig.CreateJSONBody) {
job.MetricsPath = &s
}
if j.Scheme.IsNull() || j.Scheme.IsUnknown() {
job.Scheme = scrapeconfig.CreateJSONBodyScheme(DefaultScheme)
job.Scheme = DefaultScheme
}
if j.ScrapeInterval.IsNull() || j.ScrapeInterval.IsUnknown() {
job.ScrapeInterval = DefaultScrapeInterval
}
if j.ScrapeTimeout.IsNull() || j.ScrapeTimeout.IsUnknown() {
job.ScrapeTimeout = DefaultScrapeTimeout
}
if j.SampleLimit.IsNull() || j.SampleLimit.IsUnknown() {
job.SampleLimit = toFloat32Ptr(DefaultSampleLimit)
}
}

func (j *Job) setDefaultsUpdate(job *scrapeconfig.UpdateJSONBody) {
if job == nil {
func (j *Job) setDefaultsUpdate(partialUpdate scrapeconfig.PartialUpdateJSONBody) {
if len(partialUpdate) == 0 {
return
}
job := &partialUpdate[0]
if j.MetricsPath.IsNull() || j.MetricsPath.IsUnknown() {
job.MetricsPath = DefaultMetricsPath
s := DefaultMetricsPath
job.MetricsPath = &s
}
if j.Scheme.IsNull() || j.Scheme.IsUnknown() {
job.Scheme = scrapeconfig.UpdateJSONBodyScheme(DefaultScheme)
job.Scheme = DefaultScheme
}
if j.ScrapeInterval.IsNull() || j.ScrapeInterval.IsUnknown() {
job.ScrapeInterval = DefaultScrapeInterval
Expand All @@ -55,13 +62,14 @@ func (j *Job) setDefaultsUpdate(job *scrapeconfig.UpdateJSONBody) {
}

func (j *Job) ToClientJob() scrapeconfig.CreateJSONBody {
mp := j.MetricsPath.ValueString()
job := scrapeconfig.CreateJSONBody{
JobName: j.Name.ValueString(),
Scheme: scrapeconfig.CreateJSONBodyScheme(j.Scheme.ValueString()),
MetricsPath: &mp,
MetricsPath: j.MetricsPath.ValueStringPointer(),
ScrapeInterval: j.ScrapeInterval.ValueString(),
ScrapeTimeout: j.ScrapeTimeout.ValueString(),
// This conversion might be lossy if the value is greater than 16777215.
SampleLimit: toFloat32Ptr(float32(j.SampleLimit.ValueInt64())),
}

j.setDefaults(&job)
Expand Down Expand Up @@ -114,15 +122,18 @@ func (j *Job) ToClientJob() scrapeconfig.CreateJSONBody {
return job
}

func (j *Job) ToClientUpdateJob() scrapeconfig.UpdateJSONBody {
job := scrapeconfig.UpdateJSONBody{
Scheme: scrapeconfig.UpdateJSONBodyScheme(j.Scheme.ValueString()),
MetricsPath: j.MetricsPath.ValueString(),
func (j *Job) ToClientPartialUpdateJobs() scrapeconfig.PartialUpdateJSONBody {
jobs := scrapeconfig.PartialUpdateJSONBody{{
JobName: j.Name.ValueString(),
Scheme: scrapeconfig.PartialUpdateJSONBodyScheme(j.Scheme.ValueString()),
MetricsPath: j.MetricsPath.ValueStringPointer(),
ScrapeInterval: j.ScrapeInterval.ValueString(),
ScrapeTimeout: j.ScrapeTimeout.ValueString(),
}
SampleLimit: toFloat32Ptr(float32(j.SampleLimit.ValueInt64())),
}}
j.setDefaultsUpdate(jobs)

j.setDefaultsUpdate(&job)
job := &jobs[0]

if j.SAML2 != nil && !j.SAML2.EnableURLParameters.ValueBool() {
if job.Params == nil {
Expand Down Expand Up @@ -169,7 +180,7 @@ func (j *Job) ToClientUpdateJob() scrapeconfig.UpdateJSONBody {
t[i] = ti
}
job.StaticConfigs = t
return job
return jobs
}

func (j *Job) FromClientJob(cj scrapeconfig.Job) {
Expand All @@ -183,6 +194,10 @@ func (j *Job) FromClientJob(cj scrapeconfig.Job) {
}
j.ScrapeInterval = types.StringValue(cj.ScrapeInterval)
j.ScrapeTimeout = types.StringValue(cj.ScrapeTimeout)
j.SampleLimit = types.Int64Null()
if cj.SampleLimit != nil {
j.SampleLimit = types.Int64Value(int64(*cj.SampleLimit))
}
j.handleSAML2(cj)
j.handleBasicAuth(cj)
j.handleTargets(cj)
Expand Down Expand Up @@ -247,3 +262,7 @@ func (j *Job) handleTargets(cj scrapeconfig.Job) {
}
j.Targets = newTargets
}

func toFloat32Ptr(v float32) *float32 {
return &v
}
2 changes: 1 addition & 1 deletion stackit/internal/resources/argus/job/resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)

const run_this_test = false
const run_this_test = true

func TestAcc_ArgusJob(t *testing.T) {
if !common.ShouldAccTestRun(run_this_test) {
Expand Down
14 changes: 12 additions & 2 deletions stackit/internal/resources/argus/job/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,19 @@ import (
"context"
"fmt"

"github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common"
"github.com/SchwarzIT/terraform-provider-stackit/stackit/pkg/validate"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"

"github.com/SchwarzIT/terraform-provider-stackit/stackit/internal/common"
"github.com/SchwarzIT/terraform-provider-stackit/stackit/pkg/validate"
)

// Job is the schema model
Expand All @@ -27,6 +29,7 @@ type Job struct {
Scheme types.String `tfsdk:"scheme"`
ScrapeInterval types.String `tfsdk:"scrape_interval"`
ScrapeTimeout types.String `tfsdk:"scrape_timeout"`
SampleLimit types.Int64 `tfsdk:"sample_limit"`
SAML2 *SAML2 `tfsdk:"saml2"`
BasicAuth *BasicAuth `tfsdk:"basic_auth"`
Targets []Target `tfsdk:"targets"`
Expand Down Expand Up @@ -119,6 +122,13 @@ func (r *Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp
Default: stringdefault.StaticString(DefaultScrapeTimeout),
},

"sample_limit": schema.Int64Attribute{
Description: "Specifies the scrape sample limit. Upper limit is depends on the service plan. Default is `5000`.",
Optional: true,
Computed: true,
Default: int64default.StaticInt64(DefaultSampleLimit),
},

"saml2": schema.SingleNestedAttribute{
Description: "A saml2 configuration block",
Optional: true,
Expand Down