add sample limit for argus scraper jobs (#219)

* add sample limit for argus scraper jobs

* adjust range

* Add sampleLimit to acceptance test

* Fix missing config

---------

Co-authored-by: Henrique Santos <henrique.santos@freiheit.com>
This commit is contained in:
Piet van der Meulen 2024-01-18 15:13:54 +01:00 committed by GitHub
parent 2be2c11558
commit c975a933bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 71 additions and 40 deletions

View file

@ -30,6 +30,7 @@ var scrapeConfigResource = map[string]string{
"metrics_path": "/metrics",
"scheme": "https",
"scrape_interval": "4m", // non-default
"sample_limit": "7", // non-default
"saml2_enable_url_parameters": "false",
}
@ -54,6 +55,7 @@ func resourceConfig(instanceName, target, saml2EnableUrlParameters string) strin
metrics_path = "%s"
targets = [%s]
scrape_interval = "%s"
sample_limit = %s
saml2 = {
enable_url_parameters = %s
}
@ -73,6 +75,7 @@ func resourceConfig(instanceName, target, saml2EnableUrlParameters string) strin
scrapeConfigResource["metrics_path"],
target,
scrapeConfigResource["scrape_interval"],
scrapeConfigResource["sample_limit"],
saml2EnableUrlParameters,
)
}
@ -125,7 +128,7 @@ func TestAccResource(t *testing.T) {
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "metrics_path", scrapeConfigResource["metrics_path"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scheme", scrapeConfigResource["scheme"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scrape_interval", scrapeConfigResource["scrape_interval"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scrape_interval", scrapeConfigResource["scrape_interval"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "sample_limit", scrapeConfigResource["sample_limit"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "saml2.enable_url_parameters", scrapeConfigResource["saml2_enable_url_parameters"]),
// credentials
@ -187,6 +190,7 @@ func TestAccResource(t *testing.T) {
resource.TestCheckResourceAttr("data.stackit_argus_scrapeconfig.scrapeconfig", "metrics_path", scrapeConfigResource["metrics_path"]),
resource.TestCheckResourceAttr("data.stackit_argus_scrapeconfig.scrapeconfig", "scheme", scrapeConfigResource["scheme"]),
resource.TestCheckResourceAttr("data.stackit_argus_scrapeconfig.scrapeconfig", "scrape_interval", scrapeConfigResource["scrape_interval"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "sample_limit", scrapeConfigResource["sample_limit"]),
resource.TestCheckResourceAttr("data.stackit_argus_scrapeconfig.scrapeconfig", "saml2.enable_url_parameters", scrapeConfigResource["saml2_enable_url_parameters"]),
),
},
@ -245,6 +249,7 @@ func TestAccResource(t *testing.T) {
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "metrics_path", scrapeConfigResource["metrics_path"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scheme", scrapeConfigResource["scheme"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scrape_interval", scrapeConfigResource["scrape_interval"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "sample_limit", scrapeConfigResource["sample_limit"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "saml2.%", "1"),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "saml2.enable_url_parameters", "true"),
@ -270,6 +275,7 @@ func TestAccResource(t *testing.T) {
name = "%s"
targets = [%s]
scrape_interval = "%s"
sample_limit = %s
metrics_path = "%s"
}
`,
@ -280,6 +286,7 @@ func TestAccResource(t *testing.T) {
scrapeConfigResource["name"],
scrapeConfigResource["urls"],
scrapeConfigResource["scrape_interval"],
scrapeConfigResource["sample_limit"],
scrapeConfigResource["metrics_path"],
),
Check: resource.ComposeAggregateTestCheckFunc(
@ -295,6 +302,7 @@ func TestAccResource(t *testing.T) {
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "metrics_path", scrapeConfigResource["metrics_path"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scheme", scrapeConfigResource["scheme"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "scrape_interval", scrapeConfigResource["scrape_interval"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "sample_limit", scrapeConfigResource["sample_limit"]),
resource.TestCheckResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "saml2.%", "0"),
resource.TestCheckNoResourceAttr("stackit_argus_scrapeconfig.scrapeconfig", "saml2.enable_url_parameters"),
),

View file

@ -4,6 +4,7 @@ import (
"context"
"fmt"
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
@ -122,6 +123,14 @@ func (d *scrapeConfigDataSource) Schema(_ context.Context, _ datasource.SchemaRe
Computed: true,
},
"sample_limit": schema.Int64Attribute{
Description: "Specifies the scrape sample limit.",
Computed: true,
Validators: []validator.Int64{
int64validator.Between(1, 3000000),
},
},
"scrape_timeout": schema.StringAttribute{
Description: "Specifies the scrape timeout as duration string.",
Computed: true,

View file

@ -6,6 +6,7 @@ import (
"strings"
"time"
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
@ -14,6 +15,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
@ -33,6 +35,7 @@ const (
DefaultScheme = "https" // API default is "http"
DefaultScrapeInterval = "5m"
DefaultScrapeTimeout = "2m"
DefaultSampleLimit = int64(5000)
DefaultSAML2EnableURLParameters = true
)
@ -52,6 +55,7 @@ type Model struct {
Scheme types.String `tfsdk:"scheme"`
ScrapeInterval types.String `tfsdk:"scrape_interval"`
ScrapeTimeout types.String `tfsdk:"scrape_timeout"`
SampleLimit types.Int64 `tfsdk:"sample_limit"`
SAML2 *SAML2 `tfsdk:"saml2"`
BasicAuth *BasicAuth `tfsdk:"basic_auth"`
Targets []Target `tfsdk:"targets"`
@ -198,6 +202,15 @@ func (r *scrapeConfigResource) Schema(_ context.Context, _ resource.SchemaReques
},
Default: stringdefault.StaticString(DefaultScrapeTimeout),
},
"sample_limit": schema.Int64Attribute{
Description: "Specifies the scrape sample limit. Upper limit depends on the service plan. Default is `5000`.",
Optional: true,
Computed: true,
Validators: []validator.Int64{
int64validator.Between(1, 3000000),
},
Default: int64default.StaticInt64(DefaultSampleLimit),
},
"saml2": schema.SingleNestedAttribute{
Description: "A SAML2 configuration block.",
Optional: true,
@ -470,6 +483,7 @@ func mapFields(sc *argus.Job, model *Model) error {
model.Scheme = types.StringPointerValue(sc.Scheme)
model.ScrapeInterval = types.StringPointerValue(sc.ScrapeInterval)
model.ScrapeTimeout = types.StringPointerValue(sc.ScrapeTimeout)
model.SampleLimit = types.Int64PointerValue(sc.SampleLimit)
handleSAML2(sc, model)
handleBasicAuth(sc, model)
handleTargets(sc, model)
@ -552,7 +566,9 @@ func toCreatePayload(ctx context.Context, model *Model) (*argus.CreateScrapeConf
MetricsPath: conversion.StringValueToPointer(model.MetricsPath),
ScrapeInterval: conversion.StringValueToPointer(model.ScrapeInterval),
ScrapeTimeout: conversion.StringValueToPointer(model.ScrapeTimeout),
Scheme: conversion.StringValueToPointer(model.Scheme),
// potentially lossy conversion, depending on the allowed range for sample_limit
SampleLimit: utils.Ptr(float64(model.SampleLimit.ValueInt64())),
Scheme: conversion.StringValueToPointer(model.Scheme),
}
setDefaultsCreateScrapeConfig(&sc, model)
@ -607,6 +623,9 @@ func setDefaultsCreateScrapeConfig(sc *argus.CreateScrapeConfigPayload, model *M
if model.ScrapeTimeout.IsNull() || model.ScrapeTimeout.IsUnknown() {
sc.ScrapeTimeout = utils.Ptr(DefaultScrapeTimeout)
}
if model.SampleLimit.IsNull() || model.SampleLimit.IsUnknown() {
sc.SampleLimit = utils.Ptr(float64(DefaultSampleLimit))
}
// Make the API default more explicit by setting the field.
if model.SAML2 == nil || model.SAML2.EnableURLParameters.IsNull() || model.SAML2.EnableURLParameters.IsUnknown() {
m := map[string]interface{}{}
@ -631,7 +650,9 @@ func toUpdatePayload(ctx context.Context, model *Model) (*argus.UpdateScrapeConf
MetricsPath: conversion.StringValueToPointer(model.MetricsPath),
ScrapeInterval: conversion.StringValueToPointer(model.ScrapeInterval),
ScrapeTimeout: conversion.StringValueToPointer(model.ScrapeTimeout),
Scheme: conversion.StringValueToPointer(model.Scheme),
// potentially lossy conversion, depending on the allowed range for sample_limit
SampleLimit: utils.Ptr(float64(model.SampleLimit.ValueInt64())),
Scheme: conversion.StringValueToPointer(model.Scheme),
}
setDefaultsUpdateScrapeConfig(&sc, model)
@ -686,4 +707,7 @@ func setDefaultsUpdateScrapeConfig(sc *argus.UpdateScrapeConfigPayload, model *M
if model.ScrapeTimeout.IsNull() || model.ScrapeTimeout.IsUnknown() {
sc.ScrapeTimeout = utils.Ptr(DefaultScrapeTimeout)
}
if model.SampleLimit.IsNull() || model.SampleLimit.IsUnknown() {
sc.SampleLimit = utils.Ptr(float64(DefaultSampleLimit))
}
}

View file

@ -51,6 +51,7 @@ func TestMapFields(t *testing.T) {
Scheme: utils.Ptr("scheme"),
ScrapeInterval: utils.Ptr("1"),
ScrapeTimeout: utils.Ptr("2"),
SampleLimit: utils.Ptr(int64(17)),
StaticConfigs: &[]argus.StaticConfigs{
{
Labels: &map[string]string{"k1": "v1"},
@ -75,6 +76,7 @@ func TestMapFields(t *testing.T) {
Scheme: types.StringValue("scheme"),
ScrapeInterval: types.StringValue("1"),
ScrapeTimeout: types.StringValue("2"),
SampleLimit: types.Int64Value(17),
SAML2: &SAML2{
EnableURLParameters: types.BoolValue(false),
},
@ -158,6 +160,7 @@ func TestToCreatePayload(t *testing.T) {
Scheme: utils.Ptr("https"),
ScrapeInterval: utils.Ptr("5m"),
ScrapeTimeout: utils.Ptr("2m"),
SampleLimit: utils.Ptr(float64(5000)),
StaticConfigs: &[]argus.CreateScrapeConfigPayloadStaticConfigsInner{},
Params: &map[string]any{"saml2": []string{"enabled"}},
},
@ -176,6 +179,7 @@ func TestToCreatePayload(t *testing.T) {
Scheme: utils.Ptr("https"),
ScrapeInterval: utils.Ptr("5m"),
ScrapeTimeout: utils.Ptr("2m"),
SampleLimit: utils.Ptr(float64(5000)),
StaticConfigs: &[]argus.CreateScrapeConfigPayloadStaticConfigsInner{},
Params: &map[string]any{"saml2": []string{"enabled"}},
},
@ -225,6 +229,7 @@ func TestToUpdatePayload(t *testing.T) {
Scheme: utils.Ptr("https"),
ScrapeInterval: utils.Ptr("5m"),
ScrapeTimeout: utils.Ptr("2m"),
SampleLimit: utils.Ptr(float64(5000)),
StaticConfigs: &[]argus.UpdateScrapeConfigPayloadStaticConfigsInner{},
},
true,
@ -241,6 +246,7 @@ func TestToUpdatePayload(t *testing.T) {
Scheme: utils.Ptr("http"),
ScrapeInterval: utils.Ptr("5m"),
ScrapeTimeout: utils.Ptr("2m"),
SampleLimit: utils.Ptr(float64(5000)),
StaticConfigs: &[]argus.UpdateScrapeConfigPayloadStaticConfigsInner{},
},
true,